示例#1
0
    def __fetch_requests(self, task_entry, shell_groups, global_data):
        p = Pattern(task_entry, self.get_current_shell(task_entry,
                                                       shell_groups),
                    global_data)

        timeout = task_entry.get('timeout', 30)
        urls = p.convertPattern('url')
        s = requests.session()
        headers = task_entry.get('headers', [])
        task_entry['datas'] = []
        if not urls:
            return task_entry
        for url in urls:
            self.logger.info("fetching " + url)
            data = ""
            if not url:
                # do not fetch null url
                continue
            try:
                response = s.get(url, timeout=timeout, headers=headers)
                if 200 != response.status_code:
                    self.logger.error("fetch " + url + " failed with code " +
                                      str(response.status_code))
                data = response.text
            except:
                self.logger.error("fetch " + url + " failed in sockets")
            task_entry['datas'].append(data)
        return task_entry
示例#2
0
    def __fetch_requests(self, task_entry, shell_groups):
        p = Pattern(task_entry, self.get_current_shell(task_entry, shell_groups))

        timeout = task_entry.get('timeout', 30)
        urls = p.convertPattern('url')
        s = requests.session()
        headers = task_entry.get('headers', [])
        task_entry['datas'] = []
        if not urls:
            return task_entry
        for url in urls:
            self.logger.info("fetching " + url)
            data = ""
            if not url:
                # do not fetch null url
                continue
            try:
                response = s.get(url, timeout=timeout, headers=headers)
                if 200 != response.status_code:
                    self.logger.error("fetch " + url + " failed with code " + str(response.status_code))
                data = response.text
            except:
                self.logger.error("fetch " + url + " failed in sockets")
            task_entry['datas'].append(data)
        return task_entry
示例#3
0
    def __fetch_webkit(self, task_entry, shell_groups, global_data):
        p = Pattern(task_entry, self.get_current_shell(task_entry,
                                                       shell_groups),
                    global_data)

        task_entry['datas'] = []

        urls = p.convertPattern('url')
        timeout = task_entry.get('timeout', 30)
        delay = task_entry.get('delay', 0)

        for url in urls:
            self.logger.info("fetching " + url)
            data = ""
            if not url:
                # do not fetch null url
                continue
            browser = cwebbrowser.CWebBrowser()
            browser.setHeaders(task_entry.get('headers', []))
            # browser.show()
            try:
                browser.load(url=url, load_timeout=timeout, delay=delay)
            except cwebbrowser.Timeout:
                self.logger.error("fetch " + url + " timeout ")
            except Exception as exception:
                self.logger.error("fetch " + url + " error ")
                print("Exception message:%s" % exception)

            else:
                html = browser.html()
                if html:
                    html = html.encode('utf-8')
                    data = html
                else:
                    self.logger.error("fetch " + url +
                                      " failed with no response")
            task_entry['datas'].append(data)

            browser.close()
        return task_entry
示例#4
0
    def __fetch_requests(self, task_entry, shell_groups):
        p = Pattern(task_entry, self.get_current_shell(task_entry, shell_groups))

        timeout = task_entry.get('timeout', 30)
        urls = p.convertPattern('url')
        s = requests.session()
        headers = task_entry.get('headers', [])
        task_entry['datas'] = []
        task_entry['urls'] = []
        if not urls:
            return task_entry
        for url in urls:
            self.logger.info("fetching " + url)
            data = ""
            if not url:
                # do not fetch null url
                continue
            self.rp_count = 1
            data = self.__get_url_content(url, timeout, headers)
            
            task_entry['datas'].append(data)
            task_entry['urls'].append(url)
        return task_entry
示例#5
0
    def __fetch_webkit(self, task_entry, shell_groups, global_data):
        p = Pattern(task_entry, self.get_current_shell(task_entry, shell_groups), global_data)

        task_entry['datas'] = []

        urls = p.convertPattern('url')
        timeout = task_entry.get('timeout', 30)
        delay = task_entry.get('delay', 0)

        for url in urls:
            self.logger.info("fetching " + url)
            data = ""
            if not url:
                # do not fetch null url
                continue
            browser = cwebbrowser.CWebBrowser()
            browser.setHeaders(task_entry.get('headers', []))
            # browser.show()
            try:
                browser.load(url=url, load_timeout=timeout, delay=delay)
            except cwebbrowser.Timeout:
                self.logger.error("fetch " + url + " timeout ")
            except Exception as exception:
                self.logger.error("fetch " + url + " error ")
                print("Exception message:%s" % exception)

            else:
                html = browser.html()
                if html:
                    html = html.encode('utf-8')
                    data = html
                else:
                    self.logger.error("fetch " + url + " failed with no response")
            task_entry['datas'].append(data)

            browser.close()
        return task_entry