コード例 #1
0
 def validate(self, req: Request):
     for key in self.keys:
         typeKey = type(key)
         if typeKey is KeyQuery:
             result = req.query(key.key)
         elif typeKey is KeyPath:
             result = req.path(key.key)
         else:
             result = req.form(key.key)
         valid = key.validate(result)
         if valid is not None:
             return valid
     return None
コード例 #2
0
ファイル: calendar.py プロジェクト: ChangProg/DiscordMackBot
    def getCalendarMonthly(self, *args, **kwargs):
        month = Request.get(self,
                            function='core_calendar_get_calendar_monthly_view',
                            year='2020',
                            month='9')['weeks']

        data = []

        for week in month:
            for day in week['days']:
                for events in day['events']:
                    if events['modulename'] == 'assign' or events[
                            'modulename'] == 'bigbluebuttonbn':
                        data.append([
                            events['course']['fullname'],
                            events['name'],
                            events['description']
                            if events['description'] != '' else
                            'Descrição não disponível',
                            'Aula ao vivo - BigBlueButton'
                            if events['modulename'] == 'bigbluebuttonbn' else
                            'Tarefa para entregar via Moodle',
                            day['popovertitle'][:len(day['popovertitle']) - 8],
                            events['formattedtime'].split(
                                ':')[0][::-1][0:2][::-1] + ':' +
                            events['formattedtime'].split(':')[1][0:2],
                            events['url'],
                        ])

        Export.writeFunc(self, name='events.csv', data=data)
コード例 #3
0
 async def _get_html(cls, html, url, **kwargs):
     if html is None and not url:
         raise ValueError("html(url or html_etree) is expected")
     if not html:
         sem = kwargs.pop('sem', None)
         request = Request(url, **kwargs)
         if sem:
             _, response = await request.fetch_callback(sem=sem)
         else:
             response = await request.fetch()
         html = response.html
     return etree.HTML(html)
	def generate(self):
		"""
			Generate new request arrival
		"""
		if self.is_debug: print("		generated new request")
		alpha = rng.exponential(scale=1 / self.lambd)
		beta = rng.exponential(scale=1 / self.mu)
		# alpha = random.expovariate(self.lambd)
		# beta = random.expovariate(self.mu)
		self.next_arrival_time += alpha
		self.request_id += 1
		self.generated_count += 1
		return Request(self.request_id, self.next_arrival_time, alpha, beta)
    def __init__(self, mode, lambd, mu, theta, servers_count,
                 core_servers_count, L, H, simulation_time, max_queue_size,
                 is_debug):
        self.lambd = lambd
        self.mu = mu
        self.theta = theta
        self.servers_count = int(servers_count)
        self.core_servers_count = int(core_servers_count)
        self.L = int(L)
        self.H = int(H)
        self.simulation_time = simulation_time
        self.is_debug = is_debug
        self.auto_continue = not self.is_debug
        self.mode = mode

        self.flow = Flow(lambd, mu, is_debug)
        self.queue = Queue(int(max_queue_size), is_debug)

        self.generated_request = Request(-1, 0, 0, 0)

        self.system_state = States.IDLE
        self.prev_system_state = States.IDLE

        self.served_count = 0
        self.served_sum_w = 0
        self.served_sum_wq = 0

        self.servers = []
        self.generated_requests = []

        self.time = 0
        self.prev_time = 0
        self.up_down_time = 0
        self.prev_up_down_time = 0

        self.up_down_count = 0
        self.up_down_mean = 0

        self.state_time = dict.fromkeys(States.get_States_list(States), 0)
        self.state_count = dict.fromkeys(States.get_States_list(States), 0)

        for i in range(int(servers_count)):
            self.servers.append(
                Server(i, True if i < int(core_servers_count) else False,
                       is_debug))
コード例 #6
0
ファイル: spider.py プロジェクト: Snailwicked/spider_manage
 async def start_master(self):
     for url in self.start_urls:
         request_ins = Request(
             url=url,
             callback=self.parse,
             headers=getattr(self, 'headers', None),
             load_js=getattr(self, 'load_js', False),
             metadata=getattr(self, 'metadata', {}),
             request_config=getattr(self, 'request_config'),
             request_session=getattr(self, 'request_session', None),
             res_type=getattr(self, 'res_type', 'text'),
             **getattr(self, 'kwargs', {}))
         self.request_queue.put_nowait(self.handle_request(request_ins))
     workers = [
         asyncio.ensure_future(self.start_worker()) for i in range(2)
     ]
     await self.request_queue.join()
     await self.stop(SIGINT)
コード例 #7
0
                def callback(*args, **kwargs):
                    f_self = self.__router_cache[f_class_name]
                    req = Request(kwargs, request)
                    res = Response

                    error = self.on_intercept(req, res)
                    if error is not None:
                        return error

                    if issubclass(type(f_self), Controller):
                        error = f_self.on_intercept(req, res)
                        if error is not None:
                            return error

                    if validate is not None:
                        error = validate.validate(req)
                        if error is not None:
                            return res.bad_request(error)

                    return f(f_self, req, res)
コード例 #8
0
ファイル: connection.py プロジェクト: zhengchuan-bot/py_zinx
    def start_reader(self, deal_conn):
        print("reader is running...")
        print(self.conn, self.conn_id, self.addr)
        from core.datapack import DataPack

        while True:
            try:
                dp = DataPack()
                header_data = self.conn.recv(dp.get_head_len())
                msg = dp.unpack(header_data)
                if msg.get_data_len() > 0:
                    binary_data = self.conn.recv(msg.get_data_len())
                    data = struct.unpack("%ds" % msg.get_data_len(),
                                         binary_data)[0].decode("utf-8")
                    msg.set_data(data)
                    print(data)
                    req = Request(deal_conn, msg)
                    self.deal_router(req)
            except Exception as e:
                print(e)
                self.stop()
                break
コード例 #9
0
    def run(self, start_urls, scopes=None):
        start_url = start_urls[0]
        self.start()
        start_time = time.time()
        scope = Scope(start_url, options=self.options.scope_options)
        if scopes:
            scope.scopes = [x.strip() for x in scopes.split(',')]
        self.db.start(start_url, scope.host)
        c = None
        s = None
        loader = None

        self.logger.debug("Parsing scan options")
        login = LoginAction(logger=self.logger.getEffectiveLevel())
        pre_run = login.pre_parse(self.options)
        if pre_run:
            self.scan_cookies = dict(login.session_obj.cookies)
        scanoptions = []
        if self.options.custom_options:
            scan_vars = self.options.custom_options.split(',')
            for v in scan_vars:
                opt = v.strip()
                scanoptions.append(opt)
                self.logger.debug("Enabled option %s" % opt)
        if self.options.scanner or self.options.allin:
            s = ScriptEngine(options=scanoptions,
                             logger=self.logger.getEffectiveLevel(),
                             database=self.db)

        if self.options.use_adv_scripts or self.options.allin:
            loader = modules.CustomModuleLoader(
                options=scanoptions,
                logger=self.logger.getEffectiveLevel(),
                database=self.db,
                scope=scope)

            loader.sslverify = self.options.sslverify
            loader.headers = login.headers
            loader.cookies = self.scan_cookies

        todo = []

        c = Crawler(base_url=start_url, logger=self.logger.getEffectiveLevel())
        for login_header in login.headers:
            c.headers[login_header] = login.headers[login_header]
        if self.options.use_crawler or self.options.allin:
            if pre_run:
                c.login = True
                # set cookies from Login module
                cookies = dict(login.session_obj.cookies)
                if cookies and len(cookies):
                    self.logger.debug(
                        "Setting crawler cookies from login module: %s" %
                        str(cookies))
                    c.cookie.append(cookies)
            c.thread_count = self.thread_count
            c.max_urls = int(self.options.maxurls)
            c.scope = scope
            if self.options.user_agent:
                c.headers = {'User-Agent': self.options.user_agent}
            if len(start_urls) != 1:
                for extra_url in start_urls[1:]:
                    c.parse_url(extra_url, extra_url)
            # discovery scripts, pre-run scripts and advanced modules
            if self.options.scanner or self.options.allin:
                self.logger.info("Starting filesystem discovery (pre-crawler)")
                new_links = s.run_fs(start_url)

                for newlink in new_links:
                    c.parse_url(newlink[0], newlink[0])
                if self.options.use_adv_scripts or self.options.allin:
                    self.logger.info("Running custom scripts (pre-crawler)")
                    links = loader.base_crawler(start_url)
                    for link in links:
                        self.logger.debug("Adding link %s from post scripts" %
                                          link)
                        c.parse_url(link, link)

            if self.options.wl_file:
                wf = WebFinder(url=start_url,
                               logger=self.logger.getEffectiveLevel(),
                               word_list=self.options.wl_file,
                               append=self.options.wl_ext,
                               ok_status_codes=self.options.wl_codes,
                               invalid_text=self.options.wl_404,
                               threads=self.thread_count)
                for wf_result in wf.output:
                    c.parse_url(wf_result, start_url)

            self.logger.info("Starting Crawler")
            c.run_scraper()
            self.logger.debug("Cookies set during scan: %s" %
                              (str(c.cookie.cookies)))
            self.scan_cookies = c.cookie.cookies

            self.logger.info("Creating unique link/post data list")
            todo = uniquinize(c.scraped_pages)
        else:
            todo = [[start_url, None]]

        if self.options.driver:
            self.logger.info("Running GhostDriver")

            m = Mefjus(logger=self.logger.getEffectiveLevel(),
                       driver_path=self.options.driver_path,
                       use_proxy=self.options.proxy,
                       proxy_port=self.options.proxy_port,
                       use_https=scope.is_https,
                       show_driver=self.options.show_driver
                       or self.options.interactive)
            results = m.run(todo, interactive=self.options.interactive)
            for res in results:
                if not scope.in_scope(res[0]):
                    self.logger.debug("IGNORE %s.. out-of-scope" % res)
                    continue
                if c.get_filetype(res[0]) in c.blocked_filetypes:
                    self.logger.debug("IGNORE %s.. bad file-type" % res)
                    continue
                if res in c.scraped_pages:
                    self.logger.debug("IGNORE %s.. exists" % res)
                    continue
                else:
                    todo.append(res)
                    self.logger.debug("QUEUE %s" % res)
            self.logger.info("Creating unique link/post data list")
            old_num = len(todo)
            todo = uniquinize(todo)
            self.logger.debug(
                "WebDriver discovered %d more url/post data pairs" %
                (len(todo) - old_num))

        scanner_obj = None
        if self.options.scanner or self.options.allin:
            self.logger.info("Starting scan sequence")
            if len(todo) < self.thread_count:
                # for performance sake
                self.thread_count = len(todo)
            scanner_obj = scanner.Scanner(
                logger=self.logger.getEffectiveLevel(),
                script_engine=s,
                thread_count=self.thread_count)
            scanner_obj.copy_engine = self.options.optimize
            for page in todo:
                url, data = page
                req = Request(url,
                              data=data,
                              agent=self.options.user_agent,
                              headers=login.headers,
                              cookies=self.scan_cookies)
                req.run()
                scanner_obj.queue.put(req)
                scanner_obj.logger.debug("Queued %s %s" % (url, data))
            scanner_obj.run()

        post_results = []
        if self.options.use_adv_scripts or self.options.allin:
            self.logger.info("Running post scripts")
            post_results = loader.run_post(todo, cookies=self.scan_cookies)
        cms_results = None
        if self.options.cms_enabled or self.options.allin:
            cms_loader = ext.libcms.cms_scanner_core.CustomModuleLoader(
                log_level=self.logger.getEffectiveLevel())
            cms_results = cms_loader.run_scripts(start_url)
            if cms_results:
                for cms in cms_results:
                    for cms_result in cms_results[cms]:
                        self.db.put(result_type="CMS Script",
                                    script=cms,
                                    severity=0,
                                    text=cms_result)

        webapp_results = None
        if self.options.webapp_enabled or self.options.allin:
            webapp_loader = WebAppModuleLoader(
                log_level=self.logger.getEffectiveLevel())
            webapp_loader.load_modules()
            webapp_results = webapp_loader.run_scripts(
                start_url,
                scope=scope,
                cookies=self.scan_cookies,
                headers=login.headers)
            if webapp_results:
                for webapp in webapp_results:
                    for webapp_result in webapp_results[webapp]:
                        self.db.put(result_type="WebApp Script",
                                    script=webapp,
                                    severity=0,
                                    text=json.dumps(webapp_result))
        meta = {}
        if self.options.msf:
            monster = metamonster.MetaMonster(
                log_level=self.logger.getEffectiveLevel())
            creds = self.options.msf_creds.split(':')
            monster.username = creds[0]
            monster.password = creds[1]
            monster.host = self.options.msf_host
            monster.port = self.options.msf_port
            monster.ssl = self.options.msf_ssl
            monster.endpoint = self.options.msf_uri
            monster.should_start = self.options.msf_autostart

            monster.connect(start_url)
            if monster.client and monster.client.is_working:
                monster.get_exploits()
                monster.detect()
                queries = monster.create_queries()
                monster.run_queries(queries)
                meta = monster.results
                for working in meta['working']:
                    msf_module, msf_output = working
                    self.db.put(result_type="Metasploit",
                                script=msf_module,
                                severity=3,
                                text=json.dumps(msf_output))

        scan_tree = {
            'start': start_time,
            'end': time.time(),
            'scope': scope.host,
            'starturl': start_url,
            'crawled': len(c.scraped_pages) if c else 0,
            'scanned': len(todo) if self.options.scanner else 0,
            'results':
            scanner_obj.script_engine.results if scanner_obj else [],
            'metasploit': meta,
            'cms': cms_results,
            'webapps': webapp_results,
            'post': post_results if self.options.use_adv_scripts else []
        }

        self.db.end()

        if self.options.outfile:
            with open(self.options.outfile, 'w') as f:
                f.write(json.dumps(scan_tree))
                self.logger.info("Wrote results to %s" % self.options.outfile)
コード例 #10
0
 def on_intercept(self, req: Request, res: Response):
     if req.header("Authorization") is None:
         return res.unauthor("Token fail")
     return None
コード例 #11
0
ファイル: user.py プロジェクト: quangpv/core-restful-python
 def post_profile(self, req: Request, res: Response):
     return res.success('{}\'s post profile'.format(req.authorization()))
コード例 #12
0
ファイル: user.py プロジェクト: quangpv/core-restful-python
 def get_profile(self, req: Request, res: Response):
     return res.success('{}\'s get profile and {}'.format(req.authorization(), self.app_cache().nameCached))
コード例 #13
0
ファイル: user.py プロジェクト: quangpv/core-restful-python
 def profile(self, req: Request, res: Response):
     return req.on(get=self.get_profile, post=self.post_profile)
コード例 #14
0
 def registry(self, req: Request, res: Response):
     return res.success('{}\'s profile registered'.format(
         req.form("email")))