def get_cves(self): params = { 'act': 'sec_bug' } response = requests.get( self.url_list, headers = self.headers(), params = params, timeout = self.timeout ) cves = [] if response.status_code == 200: html = response.content.decode(self.charset) vul_list = re.findall(r'<div class="vulbar">(.*?)</div>', html, re.DOTALL) if vul_list: vuls = re.findall(r"<li><span>(.*?)</span> <a href='/vulndb/(\d+)'>(.*?)</a>", vul_list[0]) for vul in vuls: cve = self.to_cve(vul) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def get_cves(self, limit=5): params = { 'title': '', 'cve': '', 'cnvd': '', 'cnnvd': '', 'order': 'update', 'has_poc': '', 'has_repair': '', 'bug_level': '', 'page': 1, 'per-page': limit, } response = requests.get(self.url_list, headers=self.headers(), params=params, timeout=self.timeout) cves = [] if response.status_code == 200: json_obj = json.loads(response.text) for obj in json_obj.get('data').get('items'): cve = self.to_cve(obj) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def get_cves(self, limit=5): params = { "title": "", "cve": "", "cnvd": "", "cnnvd": "", "order": "update", "has_poc": "", "has_repair": "", "bug_level": "", "page": 1, "per-page": limit, } response = requests.get(self.url_list, headers=self.headers, params=params, timeout=self.timeout) cves = [] if response.status_code == 200: json_obj = json.loads(response.text) for obj in json_obj.get("data").get("items"): cve = self.to_cve(obj) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn("获取 [%s] 威胁情报失败: [HTTP Error %i]" % (self.name_ch, response.status_code)) return cves
def get_cves(self, limit=10): data = 'query={ "page": 1, "page_count": %d }' % limit response = requests.post( self.url_list, headers={ **self.headers, "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", }, data=data, timeout=self.timeout, ) cves = [] if response.status_code == 200: for obj in response.json().get("intgs"): cve = self.to_cve(obj) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn("获取 [%s] 威胁情报失败: [HTTP Error %i]" % (self.name_ch, response.status_code)) return cves
def main(help, github_token, proxy): if help == True: log.info(help_info()) else: log.info("正在读取 [README.md] ...") with open(README_PATH, 'r', encoding=CHARSET) as file: readme = file.read() log.info("正在读取所有项目仓库的活动数据 ...") repos = [] repos.extend(_git.query_repos(github_token, 'master')) repos.extend(_git.query_repos(github_token, 'main')) # 兼容主分支为 main 情况 repos.sort( reverse=True, key=lambda repo: int( time.mktime( datetime.datetime.strptime( repo.pushtime, "%Y-%m-%d %H:%M:%S").timetuple()))) if not repos or len(repos) <= 0: log.warn("获取项目仓库数据失败") else: log.info("获得 [%i] 个项目仓库的数据" % len(repos)) log.info("正在构造 [时间分配] 数据 ...") try: data_wt = weektime.build(repos) readme = reflash(readme, data_wt, 'weektime') log.info(data_wt) except: log.error("构造数据异常") log.info("正在构造 [最近活跃] 数据 ...") try: data_ac = activity.build(repos) readme = reflash(readme, data_ac, 'activity') log.info(data_ac) except: log.error("构造数据异常") log.info("正在构造 [最新文章] 数据 ...") try: data_ar = article.build(github_token, proxy) readme = reflash(readme, data_ar, 'article') log.info(data_ar) except: log.error("构造数据异常") log.info("正在更新 [README.md] ...") with open(README_PATH, 'w', encoding=CHARSET) as file: file.write(readme) log.info("已更新 [README.md]")
def verify(image_path): """ Verifies if a path pointing to an actual image. :param image_path: Image path to check. :return: True if it's an image, False otherwise. """ try: with Image.open(image_path) as img: img.verify() return True except Exception as e: log.warn('Path [{}] does not point to an image: [{}]'.format( image_path, e)) return False
def connect(self, cursor_factory=None): if self.sshtunnel is not None: self.sshtunnel.close() if self.host != "localhost" and self.host != "127.0.0.1" and self.host != "": self.sshtunnel = SSHTunnelForwarder( (self.host, 22), remote_bind_address=("127.0.0.1", self.port)) self.sshtunnel.start() # conn = pgres.connect(database="nnexp", port=server.local_bind_port, host="localhost") connection_info = { "database": self.dbname, "user": self.username, "password": self.password, "host": "localhost", "port": self.sshtunnel.local_bind_port, } else: connection_info = { "database": self.dbname, "user": self.username, "password": self.password, "host": self.host, "port": self.port, } # print connection_info n_retrials = 10 for i in xrange(n_retrials): try: self._psql_conn = pgres.connect(cursor_factory=cursor_factory, **connection_info) except OperationalError: sleep_time = 0.02 * (2**i) warn("Cound not connect to DB ({}), retrying in {} seconds...". format(connection_info, sleep_time)) time.sleep(sleep_time) else: break else: raise Exception( "Could not connect to the DB after {} trials".format( n_retrials))
def upload(correlation_id, output_image_path): """ Uploads an image given its path to an accessible URL. :param correlation_id: Unique identifier for specific request. :param output_image_path: Image path. :return: Accessible image URL """ for attempt in range(3): try: return storage.upload_image(correlation_id, output_image_path) except Exception as e: time.sleep(attempt + 1) log.warn( f'Error uploading image [{output_image_path}] to Storage: [{e}]' ) return None
def main(help, github_token, proxy): if help == True: log.info(help_info()) else: log.info("正在读取 [README.md] ...") with open(README_PATH, 'r', encoding=CHARSET) as file: readme = file.read() log.info("正在读取所有项目仓库的活动数据 ...") repos = _git.query_repos(github_token) if not repos or len(repos) <= 0: log.warn("获取项目仓库数据失败") else: log.info("获得 [%i] 个项目仓库的数据" % len(repos)) log.info("正在构造 [时间分配] 数据 ...") try: data_wt = weektime.build(repos) readme = reflash(readme, data_wt, 'weektime') log.info(data_wt) except: log.error("构造数据异常") log.info("正在构造 [最近活跃] 数据 ...") try: data_ac = activity.build(repos) readme = reflash(readme, data_ac, 'activity') log.info(data_ac) except: log.error("构造数据异常") log.info("正在构造 [最新文章] 数据 ...") try: data_ar = article.build(github_token, proxy) readme = reflash(readme, data_ar, 'article') log.info(data_ar) except: log.error("构造数据异常") log.info("正在更新 [README.md] ...") with open(README_PATH, 'w', encoding=CHARSET) as file: file.write(readme) log.info("已更新 [README.md]")
def get_cves(self): response = requests.get(self.url, headers=self.headers, timeout=self.timeout) cves = [] if response.status_code == 200: html = response.text vul_table = re.findall(r"<tr>(.*?)</tr>", html, re.DOTALL) for vul in vul_table: cve = self.to_cve(vul) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn("获取 [%s] 威胁情报失败: [HTTP Error %i]" % (self.name_ch, response.status_code)) return cves
def get_cves(self, limit=6): response = requests.get(self.url_list, headers=self.headers(), timeout=self.timeout) response.encoding = 'utf-8' cves = [] if response.status_code == 200: ids = re.findall(r'ldxqById\.tag\?CNNVD=([^"]+)">', response.text) for id in ids: cve = self.to_cve(id) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def get_cves(self, limit=10): data = 'query={ "page": 1, "page_count": %d }' % limit response = requests.post(self.url_list, headers=self.to_headers(), data=data, timeout=self.timeout) cves = [] if response.status_code == 200: json_obj = json.loads(response.text) for obj in json_obj.get('intgs'): cve = self.to_cve(obj) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def __init__(self, record, attrs=None, **kwargs): super(Tuple, self).__init__(**kwargs) if attrs is None: attrs = record._fields for attr in attrs: assert attr != "attrs", "Attribute name 'attrs' is not allowed" if isinstance(record, dict) and attr in record: self[attr] = record[attr] elif hasattr(record, attr): self[attr] = getattr(record, attr) else: warn("Skipping attribute '{}': not found in record {}".format( attr, record)) raise Exception("Attribute '{}' not found in record {}".format( attr, record))
def get_cves(self, limit=6): params = {"length": limit, "start": 0} response = requests.get(self.url_list, headers=self.headers, params=params, timeout=self.timeout) cves = [] if response.status_code == 200: json_obj = json.loads(response.text) for obj in json_obj.get("data"): cve = self.to_cve(obj) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn("获取 [%s] 威胁情报失败: [HTTP Error %i]" % (self.name_ch, response.status_code)) return cves
def get_cves(self, limit=6): params = {'length': limit, 'start': 0} response = requests.get(self.url_list, headers=self.headers(), params=params, timeout=self.timeout) cves = [] if response.status_code == 200: json_obj = json.loads(response.text) for obj in json_obj.get('data'): cve = self.to_cve(obj) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def get_cves(self): response = requests.get( self.url, headers = self.headers(), timeout = self.timeout ) cves = [] if response.status_code == 200: html = response.content.decode(self.charset) vul_table = re.findall(r'<tr>(.*?)</tr>', html, re.DOTALL) if vul_table: for vul in vul_table: cve = self.to_cve(vul) if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.name_ch, response.status_code)) return cves
def get_cves(self): response = requests.get(self.url_list, headers=self.headers, timeout=self.timeout) cves = [] if response.status_code == 200: vul_list = re.findall( r"<li><span>(.*?)</span> <a href='/vulndb/(\d+)'>(.*?)</a>", response.content.decode("utf8"), ) for vul in vul_list: cve = self.to_cve(vul) if cve.is_vaild(): cves.append(cve) else: log.warn( f"获取 [{self.name_ch}] 威胁情报失败: [HTTP Error {response.status_code:d}]" ) return cves
def download(correlation_id, image_url, output_path=None): """ Downloads an image given an Internet accessible URL. :param correlation_id: Unique identifier for specific request. :param image_url: Internet accessible URL. :param output_path: Output path where the image should go. :return: Output path of the downloaded image. """ try: response = requests.get(image_url, timeout=15) if response.ok: if not output_path: output_path = os.path.join(TMP_FOLDER, '{}.png'.format(correlation_id)) with open(output_path, 'wb') as f: f.write(response.content) except Exception as e: log.warn('Error downloading [{}]: [{}]'.format(image_url, e)) output_path = None return output_path
def get_cves(self): response = requests.get(self.url, headers=self.headers, timeout=self.timeout) cves = [] if response.status_code == 200: html = response.content.decode("utf8") titles = self.get_titles(html) json_obj = json.loads(self.to_json(html)) idx = 0 for obj in json_obj.get("msg"): cve = self.to_cve(obj, titles[idx]) idx += 1 if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn("获取 [%s] 威胁情报失败: [HTTP Error %i]" % (self.name_ch, response.status_code)) return cves
def decode(correlation_id, image_base64, output_path=None): """ Decodes an encoded image in base64. :param correlation_id: Unique identifier for specific request. :param image_base64: Encoded image. :param output_path: Output path where the image should go. :return: Output path of the decoded image. """ try: image_data = base64.b64decode(image_base64) if not output_path: output_path = os.path.join(TMP_FOLDER, '{}.png'.format(correlation_id)) with open(output_path, 'wb') as f: f.write(image_data) with Image.open(output_path).convert('RGBA') as img: img.save(output_path, format='PNG', quality=95) except Exception as e: log.warn('Error decoding [{}...] to [{}]: [{}]'.format( image_base64[:10], output_path, e)) output_path = None return output_path
def get_cves(self): response = requests.get(self.url, headers=self.headers(), timeout=self.timeout) cves = [] if response.status_code == 200: html = response.content titles = self.get_titles(html) json_str = self.to_json(html) json_obj = json.loads(json_str) idx = 0 for obj in json_obj.get('msg'): cve = self.to_cve(obj, titles[idx]) idx += 1 if cve.is_vaild(): cves.append(cve) # log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def get_cves(self, limit=10): response = requests.get(self.url, headers=self.headers(), timeout=self.timeout) cves = [] if response.status_code == 200: data = ''.join(response.text.split('\n')[1:]) rss = etree.XML(data) items = rss.xpath("//item") cnt = 0 for item in items: cve = self.to_cve(item) if cve.is_vaild(): if cnt < limit: cves.append(cve) # log.debug(cve) cnt += 1 else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves
def get_cves(self, limit = 6): params = { 'length': limit, 'start' : 0 } response = requests.get( self.url_list, headers = self.headers(), params = params, timeout = self.timeout ) cves = [] if response.status_code == 200: ids = re.findall(r'\thref="/flaw/show/([^"]+)"', response.text) for id in ids : cve = self.to_cve(id) if cve.is_vaild(): cves.append(cve) log.debug(cve) else: log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code)) return cves