def get_profile(file_path): functions_tools.print_debug_message( "Reading existing Patch Tuesday profile...") f = open(file_path, "r") profile = json.loads(f.read()) f.close() return profile
def collect_cve_related_data(enabled_data_sources, all_cves, cves_to_exclude, rewrite_flag): """ Collecting vulnerability data from the supported sources and combining basic vulnerability data in combined_cve_data_all :param all_cves: :param rewrite_flag: :return: """ # Collecting vulnerability data from the supported sources functions_tools.print_debug_message("Counting CVE scores...") cve_data_all = dict() cve_data_all, cves_to_exclude = add_ms_cve_data(enabled_data_sources, all_cves, cve_data_all, cves_to_exclude, rewrite_flag) cve_data_all = add_nvd_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag) cve_data_all = add_attackerkb_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag) cve_data_all = add_vulners_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag) cve_data_all = add_combined_cve_data(all_cves, cve_data_all) return cve_data_all, cves_to_exclude
def print_unclassified_products_templates(cve_scores, cve_related_data): unclassified_products = set() for cve in cve_scores: if cve_scores[cve]['components']['Vulnerable Product is Common'][ 'comment'] == "Unclassified Product": unclassified_products.add( cve_related_data['combined_cve_data_all'][cve]['vuln_product']) unclassified_products = list(unclassified_products) unclassified_products.sort() if unclassified_products != list(): functions_tools.print_debug_message( "Add this to data_classification_products.py") for product in unclassified_products: if "Windows" in product: print(''' "''' + product + '''": { "prevalence": 0.8, "description": "Windows component", "additional_detection_strings": [] },''') else: print(''' "''' + product + '''": { "prevalence": 0, "description": "", "additional_detection_strings": [] },''')
def make_ms_patch_tuesday_report(year, month, comments_links_path=False, patch_tuesday_date=False, rewrite_flag=False): # month = "October" # year = "2020" if patch_tuesday_date == False: patch_tuesday_date = functions_profile_ms_patch_tuesday.get_second_tuesday( year=year, long_month_name=month) file_name = "ms_patch_tuesday_" + month.lower() + year + ".json" comments_links = dict() if comments_links_path: f = open(comments_links_path, "r") for line in f.read().split("\n"): comments_links[line.split("|")[0]] = { 'title': line.split("|")[1], 'url': line.split("|")[2] } f.close() if rewrite_flag or not os.path.isfile("data/profiles/" + file_name): functions_tools.print_debug_message( "Creating Patch Tuesday profile...") functions_profile_ms_patch_tuesday.create_profile( month, year, patch_tuesday_date, comments_links, file_name) profile_file_path = "data/profiles/" + file_name functions_report_vulnerabilities.make_vulnerability_report_for_profile( profile_file_path, rewrite_flag)
def add_combined_cve_data(all_cves, cve_data_all, product_data): cve_data_all['combined_cve_data_all'] = dict() n = 0 m = len(all_cves) functions_tools.print_debug_message("Combining CVE data...") for cve_id in all_cves: n += 1 print(str(n) + "/" + str(m) + " " + cve_id) file_path_processed = "data/combined/" + cve_id + ".json" if not os.path.exists(file_path_processed) or not reuse_combined: cve_data_all['combined_cve_data_all'][cve_id] = dict() cve_data_all = add_combined_cve_data_description_product_vuln_type( cve_data_all, product_data, cve_id) cve_data_all = add_combined_cve_data_basic_severity( cve_data_all, cve_id) cve_data_all = add_combined_cve_data_cvss_base_score( cve_data_all, cve_id) f = open(file_path_processed, "w") f.write(json.dumps(cve_data_all['combined_cve_data_all'][cve_id])) f.close() else: f = open(file_path_processed, "r") cve_data_all['combined_cve_data_all'][cve_id] = json.loads( f.read()) f.close() return cve_data_all
def get_all_cves(profile, source_id, cves_to_exclude): cves_text = profile[source_id]['cves_text'] all_cves = set() for line in cves_text.split("\n"): if re.findall("^CVE", line.upper()): if line.upper() not in cves_to_exclude: all_cves.add(line.upper()) functions_tools.print_debug_message("All CVEs: " + str(len(all_cves))) return all_cves
def get_cves_to_exclude(profile, source_id): cves_to_exclude = set() if 'cves_exclude_text' in profile[source_id]: cves_exclude_text = profile[source_id]['cves_exclude_text'] for line in cves_exclude_text.split("\n"): if re.findall("^CVE", line.upper()): cves_to_exclude.add(line.upper()) functions_tools.print_debug_message("Exclude CVEs: " + str(len(cves_to_exclude))) return cves_to_exclude
def add_nvd_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag): if "nvd" in enabled_data_sources: functions_tools.print_debug_message("Collecting NVD CVE data...") nvd_cve_data_all = dict() for cve_id in all_cves: nvd_cve_data = functions_source_nvd_cve.get_nvd_cve_data( cve_id, rewrite_flag) nvd_cve_data_all[cve_id] = nvd_cve_data cve_data_all['nvd_cve_data_all'] = nvd_cve_data_all return cve_data_all
def make_ms_patch_tuesday_report(year, month, patch_tuesday_date=False, rewrite_flag=False): # month = "October" # year = "2020" if patch_tuesday_date == False: patch_tuesday_date = functions_profile_ms_patch_tuesday.get_second_tuesday(year=year, long_month_name=month) file_name = "ms_patch_tuesday_" + month.lower() + year + ".json" if rewrite_flag or not os.path.isfile("data/profiles/" + file_name): functions_tools.print_debug_message("Creating Patch Tuesday profile...") functions_profile_ms_patch_tuesday.create_profile(month, year, patch_tuesday_date, file_name) profile_file_path = "data/profiles/" + file_name functions_report_vulnerabilities.make_vulnerability_report_for_profile(profile_file_path, rewrite_flag)
def add_vulners_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag): if "vulners" in enabled_data_sources: functions_tools.print_debug_message("Collecting Vulners CVE data...") vulners_cve_data_all = dict() if credentials.vulners_key != "": # If we have Vulners API key for cve_id in all_cves: vulners_cve_data = functions_source_vulners.get_vulners_data( cve_id, rewrite_flag) vulners_cve_data_all[cve_id] = vulners_cve_data cve_data_all['vulners_cve_data_all'] = vulners_cve_data_all return cve_data_all
def add_attackerkb_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag): if "attackerkb" in enabled_data_sources: functions_tools.print_debug_message( "Collecting AttackerKB CVE data...") attackerkb_cve_data_all = dict() for cve_id in all_cves: attackerkb_cve_data = functions_source_attackerkb_cve.get_attackerkb_cve_data( cve_id, rewrite_flag) attackerkb_cve_data_all[cve_id] = attackerkb_cve_data cve_data_all['attackerkb_cve_data_all'] = attackerkb_cve_data_all return cve_data_all
def get_products(profile, source_id): all_prodcts = set() if 'products_text' in profile[source_id]: products_text = profile[source_id]['products_text'] for line in products_text.split("\n"): all_prodcts.add(line.upper()) functions_tools.print_debug_message("All products to analyze: " + str(len(all_prodcts))) else: functions_tools.print_debug_message( "No specified products to analyze set in profile, reporting everything" ) return all_prodcts
def make_html_vulnerability_reports_for_all_report_configs( profile, source_id, cve_related_data, cve_scores): functions_tools.print_debug_message( "Making vulnerability reports for each reports config...") for report_config_name in data_report_configs.patch_tuesday_report_configs: functions_tools.print_debug_message("Report config: " + report_config_name) report_config = data_report_configs.patch_tuesday_report_configs[ report_config_name] make_html_vulnerability_report_for_report_config( cve_related_data=cve_related_data, cve_scores=cve_scores, report_config=report_config, profile_data=profile[source_id])
def add_nvd_cve_data(enabled_data_sources, all_cves, cve_data_all, rewrite_flag): if "nvd" in enabled_data_sources: functions_tools.print_debug_message("Collecting NVD CVE data...") nvd_cve_data_all = dict() n = 0 m = len(all_cves) for cve_id in all_cves: print(str(n) + "/" + str(m) + " - " + cve_id) nvd_cve_data = functions_source_nvd_cve.get_nvd_cve_data( cve_id, rewrite_flag) nvd_cve_data_all[cve_id] = nvd_cve_data n += 1 cve_data_all['nvd_cve_data_all'] = nvd_cve_data_all return cve_data_all
def add_ms_cve_data(enabled_data_sources, all_cves, cve_data_all, cves_to_exclude, rewrite_flag): if "ms" in enabled_data_sources: functions_tools.print_debug_message("Collecting MS CVE data...") ms_cve_data_all = dict() n = 0 m = len(all_cves) for cve_id in all_cves: print(str(n) + "/" + str(m) + " - " + cve_id) ms_cve_data = functions_source_ms_cve.get_ms_cve_data( cve_id, rewrite_flag) if 'cveTitle' in ms_cve_data['main']: if ms_cve_data['main']['cveTitle'] == "RETRACTED": functions_tools.print_debug_message("Adding RETRACTED " + cve_id + " to cves_to_exclude") cves_to_exclude.add(cve_id) elif ms_cve_data['not_found_error']: functions_tools.print_debug_message("Adding NOT FOUND " + cve_id + " to cves_to_exclude") cves_to_exclude.add(cve_id) else: ms_cve_data_all[cve_id] = ms_cve_data n += 1 cve_data_all['ms_cve_data_all'] = ms_cve_data_all return cve_data_all, cves_to_exclude
def add_cve_product_and_type_tags(ms_cve_data): ms_cve_data['vuln_type'], ms_cve_data[ 'vuln_product'] = get_vuln_product_and_type_from_title( ms_cve_data['main']['cveTitle']) if ms_cve_data['main']['cveTitle'] != "RETRACTED": if 'vuln_type' not in ms_cve_data: functions_tools.print_debug_message( "No vuln_type in ms_cve_data for " + ms_cve_data['cveNumber']) functions_tools.print_debug_message( json.dumps(ms_cve_data, indent=4)) exit() if 'vuln_product' not in ms_cve_data: functions_tools.print_debug_message( "No vuln_product in ms_cve_data for " + ms_cve_data['cveNumber']) functions_tools.print_debug_message( json.dumps(ms_cve_data, indent=4)) exit() return ms_cve_data
def make_vulnerability_report_for_profile(profile_file_path, rewrite_flag): profile = get_profile(profile_file_path) source_id = list(profile.keys())[0] profile[source_id][ 'product_data'] = data_classification_products.get_product_data() cves_to_exclude = get_cves_to_exclude(profile, source_id) all_products_to_analyze = get_products(profile, source_id) # making list of CVEs not about products in products_to_analyze to exclude it if len(all_products_to_analyze) == 0: selected_cves_to_exclude = cves_to_exclude else: all_cves_tmp = get_all_cves(profile, source_id, cves_to_exclude) enabled_data_sources = get_eanbled_data_sources(profile, source_id) cve_related_data_tmp, cves_to_exclude = functions_combined_vulnerability_data.collect_cve_related_data( enabled_data_sources, all_cves_tmp, cves_to_exclude, profile[source_id]['product_data'], rewrite_flag) selected_cves_to_exclude = cves_to_exclude for selected_cve in all_cves_tmp: functions_tools.print_debug_message( "filtering " + selected_cve + " for one of products_to_analyze") b_product_found = False for list_type_str in cve_related_data_tmp: functions_tools.print_debug_message(" checking in " + list_type_str) if not selected_cve in cve_related_data_tmp[list_type_str]: functions_tools.print_debug_message(" no data for " + selected_cve + " in " + list_type_str) continue if not 'vuln_product' in cve_related_data_tmp[list_type_str][ selected_cve]: functions_tools.print_debug_message( " no vuln_product for " + selected_cve + " in " + list_type_str) continue product_name = (cve_related_data_tmp[list_type_str] [selected_cve]['vuln_product']).upper() for product_name_from_list in all_products_to_analyze: product_name_from_list = product_name_from_list.upper() if product_name_from_list in product_name: b_product_found = True functions_tools.print_debug_message( " found") if not b_product_found: selected_cves_to_exclude.add(selected_cve) functions_tools.print_debug_message( "- final result: no one of products_to_analyze found") else: functions_tools.print_debug_message( "- final result: some of products_to_analyze found") # collecting data without filtered out CVEs all_cves = get_all_cves(profile, source_id, selected_cves_to_exclude) enabled_data_sources = get_eanbled_data_sources(profile, source_id) cve_related_data, selected_cves_to_exclude = functions_combined_vulnerability_data.collect_cve_related_data( enabled_data_sources, all_cves, selected_cves_to_exclude, profile[source_id]['product_data'], rewrite_flag) cve_scores = functions_score.get_cve_scores(all_cves, cve_related_data, profile[source_id]) print_unclassified_products_templates(cve_scores, cve_related_data) make_html_vulnerability_reports_for_all_report_configs( profile, source_id, cve_related_data, cve_scores)
def create_profile(month, year, patch_tuesday_date, comments_links, file_name): # This profile (json file) will describe Microsoft Patch Tuesday reports # month = "October" # year = "2020" # patch_tuesday_date = "10/13/2020" functions_tools.print_debug_message("Year: " + year) functions_tools.print_debug_message("Month: " + month) functions_tools.print_debug_message("Date: " + patch_tuesday_date) ms_cves_for_date_range = functions_source_ms_cve.get_ms_cves_for_date_range( patch_tuesday_date, patch_tuesday_date) functions_tools.print_debug_message("MS CVEs found: " + str(len(ms_cves_for_date_range))) ms_cves_for_date_range = "\n".join(ms_cves_for_date_range) query = month + " " + year + " " + "Patch Tuesday" comments = dict() if "Qualys" in comments_links: qualys_link = comments_links["Qualys"] else: qualys_link = functions_source_analytic_sites.get_qualys_link(query) # qualys_link = {'title':'Microsoft & Adobe Patch Tuesday (October 2021) – Microsoft 74 Vulnerabilities with 3 Critical, 4 Zero-Days. Adobe 10 Vulnerabilities', # 'url':'https://blog.qualys.com/product-tech/2021/10/13/microsoft-adobe-patch-tuesday-october-2021-microsoft-74-vulnerabilities-with-3-critical-4-zero-days-adobe-10-vulnerabilities'} if qualys_link: qualys_text = functions_source_analytic_sites.get_qualys_text_from_url( qualys_link['url']) qualys_text = functions_source_analytic_sites.process_qualys_text( qualys_text) functions_tools.print_debug_message("Qualys query: " + query) functions_tools.print_debug_message("Qualys url found: " + qualys_link['url']) functions_tools.print_debug_message("=== Qualys text ===") functions_tools.print_debug_message(qualys_text) functions_tools.print_debug_message("=== End of Qualys text ===") comments['qualys'] = qualys_text if "Tenable" in comments_links: tenable_link = comments_links["Tenable"] else: tenable_link = functions_source_analytic_sites.get_tenable_link(query) if tenable_link: tenable_text = functions_source_analytic_sites.get_tenable_text_from_url( tenable_link['url']) functions_tools.print_debug_message("Tenable query: " + query) functions_tools.print_debug_message("Tenable url found: " + tenable_link['url']) functions_tools.print_debug_message("=== Tenable text ===") functions_tools.print_debug_message(tenable_text) functions_tools.print_debug_message("=== End of Tenable text ===") comments['tenable'] = tenable_text if "Rapid7" in comments_links: rapid7_link = comments_links["Rapid7"] else: rapid7_link = functions_source_analytic_sites.get_rapid7_link(query) if rapid7_link: rapid7_text = functions_source_analytic_sites.get_rapid7_text_from_url( rapid7_link['url']) functions_tools.print_debug_message("Rapid7 query: " + query) functions_tools.print_debug_message("Rapid7 url found: " + rapid7_link['url']) functions_tools.print_debug_message("=== Rapid7 text ===") functions_tools.print_debug_message(rapid7_text) functions_tools.print_debug_message("=== End of Rapid7 text ===") comments['rapid7'] = rapid7_text if "ZDI" in comments_links: zdi_link = comments_links["ZDI"] else: queries = [ "site:https://www.zerodayinitiative.com/blog THE " + month + " " + year + " SECURITY UPDATE REVIEW", "site:https://www.thezdi.com/blog Microsoft Patches for " + month + " " + year ] zdi_link = functions_source_analytic_sites.get_duckduckgo_search_results_multiple_queries( queries) # zdi_link = {'title':'THE SEPTEMBER 2021 SECURITY UPDATE REVIEW', # 'url':'https://www.zerodayinitiative.com/blog/2021/9/14/the-september-2021-security-update-review-kpgpb'} if zdi_link: zdi_text = functions_source_analytic_sites.get_zdi_text_from_url( zdi_link['url']) functions_tools.print_debug_message("ZDI query: " + query) functions_tools.print_debug_message("ZDI url found: " + zdi_link['url']) functions_tools.print_debug_message("=== ZDI text ===") functions_tools.print_debug_message(zdi_text) functions_tools.print_debug_message("=== End of ZDI text ===") comments['zdi'] = zdi_text report_id = month + " " + year report_name = 'Microsoft Patch Tuesday, ' + month + " " + year file_name_prefix = "ms_patch_tuesday_" + month.lower() + year cves_text = ms_cves_for_date_range data_sources = None # Use all data sources file_path = "data/profiles/" + file_name products_text = "" functions_profile.save_profile(file_path, report_id, report_name, file_name_prefix, cves_text, products_text, data_sources, comments)
def get_cve_scores(all_cves, cve_data_all, profile): functions_tools.print_debug_message("Counting CVE scores...") scores_dict = dict() for cve in all_cves: scores_dict[cve] = get_vvs_struct_for_cve(cve, cve_data_all, profile) return (scores_dict)
def create_profile(month, year, patch_tuesday_date, file_name): # This profile (json file) will describe Microsoft Patch Tuesday reports # month = "October" # year = "2020" # patch_tuesday_date = "10/13/2020" functions_tools.print_debug_message("Year: " + year) functions_tools.print_debug_message("Month: " + month) functions_tools.print_debug_message("Date: " + patch_tuesday_date) ms_cves_for_date_range = functions_source_ms_cve.get_ms_cves_for_date_range(patch_tuesday_date, patch_tuesday_date) functions_tools.print_debug_message("MS CVEs found: " + str(len(ms_cves_for_date_range))) ms_cves_for_date_range = "\n".join(ms_cves_for_date_range) query = month + " " + year + " " + "Patch Tuesday" qualys_link = functions_source_analytic_sites.get_qualys_link(query) qualys_text = functions_source_analytic_sites.get_qualys_text_from_url(qualys_link['url']) functions_tools.print_debug_message("Qualys query: " + query) functions_tools.print_debug_message("Qualys url found: " + qualys_link['url']) functions_tools.print_debug_message("=== Qualys text ===") functions_tools.print_debug_message(qualys_text) functions_tools.print_debug_message("=== End of Qualys text ===") tenable_link = functions_source_analytic_sites.get_tenable_link(query) tenable_text = functions_source_analytic_sites.get_tenable_text_from_url(tenable_link['url']) functions_tools.print_debug_message("Tenable query: " + query) functions_tools.print_debug_message("Tenable url found: " + tenable_link['url']) functions_tools.print_debug_message("=== Tenable text ===") functions_tools.print_debug_message(tenable_text) functions_tools.print_debug_message("=== End of Tenable text ===") rapid7_link = functions_source_analytic_sites.get_rapid7_link(query) rapid7_text = functions_source_analytic_sites.get_rapid7_text_from_url(rapid7_link['url']) functions_tools.print_debug_message("Rapid7 query: " + query) functions_tools.print_debug_message("Rapid7 url found: " + rapid7_link['url']) functions_tools.print_debug_message("=== Rapid7 text ===") functions_tools.print_debug_message(rapid7_text) functions_tools.print_debug_message("=== End of Rapid7 text ===") queries = [ "site:https://www.thezdi.com/blog Microsoft Patches for " + month + " " + year ] zdi_link = functions_source_analytic_sites.get_duckduckgo_search_results_multiple_queries(queries) zdi_text = functions_source_analytic_sites.get_zdi_text_from_url(zdi_link['url']) functions_tools.print_debug_message("ZDI query: " + query) functions_tools.print_debug_message("ZDI url found: " + zdi_link['url']) functions_tools.print_debug_message("=== ZDI text ===") functions_tools.print_debug_message(zdi_text) functions_tools.print_debug_message("=== End of ZDI text ===") comments = { 'qualys': qualys_text, 'tenable': tenable_text, 'rapid7': rapid7_text, 'zdi': zdi_text } report_id = month + " " + year report_name = 'Microsoft Patch Tuesday, ' + month + " " + year file_name_prefix = "ms_patch_tuesday_" + month.lower() + year cves_text = ms_cves_for_date_range data_sources = None # Use all data sources functions_profile.save_profile("data/profiles/" + file_name, report_id, report_name, file_name_prefix, cves_text, data_sources, comments)