def excel_to_report(excel_report_name, report_template_name): file_name = os.path.basename(excel_report_name) metadata = {} json_file = f"{os.path.splitext(file_name)[0]}.json" if os.path.exists(json_file): with open(json_file) as f: metadata = json.load(f) create_report( excel_report_name, report_template_name, f"{os.path.splitext(file_name)[0]}.docx", metadata=metadata, debug=False, )
def test_report_change1_2(self): report = reporting.create_report(user_id = 1, start = 100, end = 230) self.assertEquals(report.followers.count(), 1) self.assertEquals(report.followers.all()[0].user_id, 3) self.assertEquals(report.followers.all()[0].remove, True) self.assertEquals(report.friends.count(), 1) self.assertEquals(report.friends.all()[0].user_id, 2) self.assertEquals(report.friends.all()[0].remove, True) self.assertEquals(report.screen_names.diff_type, 'E') self.assertEquals(report.screen_names.new.screen_name, 'user200') self.assertEquals(report.icons.diff_type, 'E') self.assertEquals(report.icons.new.digest, 'digest200')
def test_report_change2_3(self): report = reporting.create_report(user_id = 1, start = 200, end = 330) self.assertEquals(report.followers.count(), 1) self.assertEquals(report.followers.all()[0].user_id, 4) self.assertNotEquals(report.followers.all()[0].remove, True) self.assertEquals(report.friends.count(), 1) self.assertEquals(report.friends.all()[0].user_id, 3) self.assertNotEquals(report.friends.all()[0].remove, True) self.assertEquals(report.screen_names.diff_type, 'C') self.assertEquals(report.screen_names.old.screen_name, 'user200') self.assertEquals(report.screen_names.new.screen_name, 'user300') self.assertEquals(report.icons.diff_type, 'C') self.assertEquals(report.icons.old.digest, 'digest200') self.assertEquals(report.icons.new.digest, 'digest300')
def test_report_change1_3(self): report = reporting.create_report(user_id = 1, start = 100, end = 330) self.check1_3(report) """
def test_report_nochange2(self): report = reporting.create_report(user_id = 1, start = 210, end = 230) self.assertEquals(report.followers.count(), 0) self.assertEquals(report.friends.count(), 0) self.assertEquals(report.screen_names.diff_type, 'E') self.assertEquals(report.icons.diff_type, 'E')
def map_analytics(excel_template_name, report_template_name, reports_from, ecosystem_template, excel_report, debug): if not os.path.exists(excel_template_name): raise Exception( f"The --excel-template-name={excel_template_name} does not exist") if not excel_report and not os.path.exists(report_template_name): raise Exception( f"The --report-template-name={report_template_name} does not exist" ) for f in [f for f in os.listdir(".") if os.path.isfile(f)]: if f in [ excel_template_name, report_template_name, ecosystem_template ]: continue if os.path.splitext(f)[1] in [".xlsx", ".docx", ".json"]: print(f"Cleaning up old report {f}") os.remove(f) api = os.environ.get("CYBERGRX_API", "https://api.cybergrx.com").rstrip("/") token = os.environ.get("CYBERGRX_API_TOKEN", None) if not token: raise Exception( "The environment variable CYBERGRX_API_TOKEN must be set") ecosystem_writer = init_ecosystem_writer(ecosystem_template) uri = f"{api}/bulk-v1/third-parties?report_date={quote(reports_from)}" print(f"Fetching third parties from {uri} this can take some time.") response = requests.get(uri, headers={"Authorization": token.strip()}) result = json.loads(response.content.decode("utf-8")) def write_tp_if_debug(third_party, json_file): if debug: with open(json_file, "w") as debug_file: debug_file.write(json.dumps(third_party, indent=2)) print( f"Retrieved {str(len(result))} third parties from your ecosystem, building an excel." ) for tp in tqdm(result, total=len(result), desc="Third Party"): company_name = tp["name"] report_date = glom(tp, Coalesce("residual_risk.date", default="")) output_filename = f'{re.sub("[^A-Za-z0-9 &]+", "", company_name).replace(" ", "-")}_{report_date}' scores = glom(tp, Coalesce("residual_risk.scores", default=[])) if not scores: if debug: print(f"{company_name} did not have any residual_risk scores.") write_tp_if_debug(tp, f"{output_filename}.json") continue tier = glom(tp, Coalesce("residual_risk.tier", default=0)) if tier not in [1, 2]: print( f"{company_name} had a T{tier} report, this tier is not supported." ) write_tp_if_debug(tp, f"{output_filename}.json") continue # Inject gaps summary into the TP tp.update(glom(tp, Coalesce(GAPS_SUMMARY, default={}))) if glom(tp, Coalesce("subscription.is_validated", default=False)): all_missing = True for score in scores: if glom(score, (Coalesce("validation_state", default=None), validation_label)) != "Not Reviewed": all_missing = False break if all_missing and len(scores) > 0: print( f"{company_name} had a T{tier} report, but validation_states are all Not Reviewed." ) write_tp_if_debug(tp, f"{output_filename}.json") continue wb, scores_writer, findings_writer, tags_writer, third_party_writer = init_workbook( excel_template_name) for tag in glom(tp, Coalesce("tags", default=[])): tag_meta = {"tag": tag, "company_name": company_name} tags_writer(tag_meta) ecosystem_writer.tags_writer(tag_meta) for finding in glom(tp, Coalesce("residual_risk.findings", default=[])): finding["company_name"] = company_name findings_writer(finding) ecosystem_writer.findings_writer(finding) for score in scores: score["company_name"] = company_name scores_writer(score) ecosystem_writer.scores_writer(score) # Write third party metadata third_party_writer(tp) ecosystem_writer.third_party_writer(tp) # Finalize each writer (fix width, ETC) findings_writer.finalizer() scores_writer.finalizer() tags_writer.finalizer() third_party_writer.finalizer() excel_filename = f"{output_filename}.xlsx" finalize_workbook(wb, excel_filename, debug=debug) if excel_report: process_excel_template(excel_filename, metadata=tp, debug=debug) else: create_report(excel_filename, report_template_name, f"{output_filename}.docx", metadata=tp, debug=debug) ecosystem_writer.process_excel(excel_filename, company_name) ecosystem_writer.finalizer()