def get_tags(): subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) try: with open(complete_name) as json_file: tags = load(json_file) except IOError: tags = {} with open(complete_name, 'w+') as outfile: dump(tags, outfile) if not tags and session['disco_choice'] == '2': tags = get_all_tags(session['dd_api_token'], session['dd_app_token']) subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) with open(complete_name, 'w+') as outfile: dump(tags, outfile) max_tag_len = max([len(data) for data in tags.values()]) return {'subdomain': subdomain, 'tags': tags, 'max_tag_len': max_tag_len}
def post_service_config(): default_ep = session["escalation_policy"] disco_windows = [] for i in range(len(session["selected_services_id"])): disco_windows.append(request.form['window_duration_%s' % str(i + 1)]) session["integrations"] = request.form['integration_choice'] integration_path = INTEGRATION_PATH[session["integrations"]] print(integration_path) session['selected_windows'] = disco_windows subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name_tags = path.join(save_path, "resources/%s/tags.json" % subdomain) tags = {} with open(complete_name_tags, 'w+') as outfile: dump(tags, outfile) complete_name_events = path.join(save_path, "resources/%s/incidents.json" % subdomain) incidents = [] with open(complete_name_events, 'w+') as outfile: dump(incidents, outfile) complete_name_session = path.join(save_path, "resources/%s/session.json" % subdomain) with open(complete_name_session) as json_file: session_data = load(json_file) template = { "escalation_policy": default_ep, "alert_creation": "create_alerts_and_incidents", "alert_grouping": "intelligent", "timed_grouping": False, "event_rules_path": integration_path, "event_rules_path_str": ".".join(integration_path[1:]), "integrations": session["integrations"], "business_services": True, "impact_metrics": True, "business_services_relation": "1:1", "poi": session_data['biz_owner'] } complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) with open(complete_name_template, 'w+') as outfile: dump(template, outfile) return redirect(url_for('get_incident_list'))
def post_metadata(): default_ep = request.form["escalation_policy"] session["escalation_policy"] = default_ep session["consultant_name"] = request.form["consultant_name"] session["customer_name"] = request.form["customer_name"] session["escalation_policy"] = default_ep session["tech_owner"] = request.form["tech_owner"] session["biz_owner"] = request.form["biz_owner"] session["slack_number"] = request.form["slack_number"] session["github_id"] = request.form["github_id"] session["support_hours"] = request.form["support_hours"] session["run_book"] = request.form["run_book"] session["note"] = request.form["customer_note"] session["session_owner"] = { "consultant_name": session["consultant_name"], "customer_name": session["customer_name"], "note": session["note"], "tech_owner": session["tech_owner"], "biz_owner": session["biz_owner"], "slack_number": session["slack_number"], "github_id": session["github_id"], "support_hours": session["support_hours"], "run_book": session["run_book"] } subdomain = session['subdomain'] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/session.json" % subdomain) if session['provision_previewed']: complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) with open(complete_name_template) as json_file: template = load(json_file) template['escalation_policy'] = session["escalation_policy"] with open(complete_name_template, 'w+') as outfile: dump(template, outfile) with open(complete_name, 'w+') as outfile: dump(session["session_owner"], outfile) if session['disco_choice'] == '2': return redirect(url_for('tag_list_multiple')) return redirect(url_for("get_service_discovery"))
def post_provision_data(): subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) with open(complete_name_template) as json_file: template = load(json_file) if request.form.get("escalation_policy"): template['escalation_policy'] = request.form.get("escalation_policy") if request.form.get("business_service"): # print("PRINTING BIZ SERVICE REQ:", request.form['business_service']) if request.form['business_service'] == "true": template['business_services'] = True else: template['business_services'] = False if request.form.get("impact_metrics"): if request.form['impact_metrics'] == "true": template['impact_metrics'] = True else: template['impact_metrics'] = False if request.form.get("eventRules"): template['event_rules_path_str'] = request.form.get("eventRules") template['event_rules_path'] = [ 'path' ] + template['event_rules_path_str'].split(".") if request.form.get("alert_creation"): template['alert_creation'] = request.form.get("alert_creation") if template['alert_creation'] == 'create_alerts_and_incidents': if request.form.get("alert_grouping"): template['alert_grouping'] = request.form.get("alert_grouping") if template['alert_grouping'] == 'time' and request.form.get( "timed_grouping"): template['timed_grouping'] = request.form.get( "timed_grouping") with open(complete_name_template, 'w+') as outfile: dump(template, outfile) return redirect(url_for('get_provision_data'))
def deprovision_data(): global_api = session['global_token'] user_api = session['user_token'] today = date.today() subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/api_logs.json" % subdomain) log_file = path.join(save_path, "resources/%s/logs/log_%s.log" % (subdomain, today)) complete_name_logs = path.join(save_path, "resources/%s/service.json" % subdomain) complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) with open(complete_name_template) as json_file: template = load(json_file) with open(complete_name) as json_file: api_logs = load(json_file) deprovision(global_api, user_api, api_logs, log_file, template) api_logs = {} with open(complete_name, 'w+') as outfile: dump(api_logs, outfile) log_services = [] with open(complete_name, 'w+') as outfile: dump(log_services, outfile) session['provisioned'] = False if "provision_failed" in session and session["provision_failed"] == True: flash("Something went wrong with the provisioning!") session["provision_failed"] = False return redirect(url_for('get_provision_data')) if session['disco_choice'] != '2': return redirect(url_for('get_service_discovery')) return redirect(url_for('tag_list_multiple'))
def get_metadata(): global_key = session['global_token'] all_ep = ep_iter_all(global_key) subdomain = session['subdomain'] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/session.json" % subdomain) try: with open(complete_name) as json_file: current_session = load(json_file) except IOError: print("No customers found") current_session = {} return render_template('metadata_view.html', session=current_session, all_ep=all_ep, subdomain=session['subdomain'])
def verify_api_token(): # Get all user inputs from POST global_token = request.form['global_token'] user_token = request.form['user_token'] if session['disco_choice'] == '2': dd_api_token = request.form['dd_api_token'] dd_app_token = request.form['dd_app_token'] if not is_dd_token_valid(dd_api_token, dd_app_token): last_4_dd_api = (len(dd_api_token) - 4) * "*" + dd_api_token[-4:] last_4_dd_app = (len(dd_app_token) - 4) * "*" + dd_app_token[-4:] flash("Invalid Tokens! API Token: %s or Application Token: %s" % (last_4_dd_api, last_4_dd_app)) else: session['dd_api_token'] = dd_api_token session['dd_app_token'] = dd_app_token last_4_global = (len(global_token) - 4) * "*" + global_token[-4:] last_4_user = (len(user_token) - 4) * "*" + user_token[-4:] if not is_token_valid(global_token) and not is_token_valid(user_token): flash("Invalid Tokens! Global API Token: '%s' & User API Token: '%s'" % (last_4_global, last_4_user)) elif not is_token_valid(global_token): flash("Invalid Tokens! Global Level API Token: '%s'" % last_4_global) elif not is_token_valid(user_token): flash("Invalid Tokens! User Level API Token: '%s'" % last_4_user) else: session[ 'message'] = "Global API Token '%s' and User API Token '%s' are valid!" % ( last_4_global, last_4_user) session['global_token'] = global_token session['user_token'] = user_token subdomain = get_subdomain(global_token) subdomain_user = get_subdomain(user_token) if subdomain != subdomain_user: flash("Tokens don't belong to the same subdomain. Recheck") return redirect(url_for('get_api_token')) session['subdomain'] = subdomain tags_exists = False provisioned = False incidents_exist = False provision_previewed = False create_folder('resources/' + subdomain) create_folder('resources/' + subdomain + '/logs') save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) complete_name_api_log = path.join( save_path, "resources/%s/api_logs.json" % subdomain) today = date.today() log_file = path.join( save_path, "resources/%s/logs/log_%s.log" % (subdomain, today)) lf = open(log_file, 'a+') lf.write("\n[%s] [%s]: Subdomain for global API: %s" % (datetime.now(), 'Info', subdomain)) lf.write("\n[%s] [%s]: Subdomain for user API: %s" % (datetime.now(), 'Info', subdomain_user)) lf.close() complete_name_changes = path.join( save_path, "resources/%s/changes.json" % subdomain) try: with open(complete_name_changes) as json_file: provision_preview = load(json_file) if provision_preview: provision_previewed = True except: provision_preview = {} with open(complete_name_changes, 'w+') as outfile: dump(provision_preview, outfile) try: with open(complete_name) as json_file: tags = load(json_file) if tags: tags_exists = True except IOError: tags = {} with open(complete_name, 'w+') as outfile: dump(tags, outfile) try: with open(complete_name_api_log) as json_file: api_logs = load(json_file) if api_logs: provisioned = True except IOError: api_logs = {} with open(complete_name_api_log, 'w+') as outfile: dump(api_logs, outfile) complete_name_events = path.join( save_path, "resources/%s/incidents.json" % subdomain) try: with open(complete_name_events) as json_file: incidents = load(json_file) if incidents: incidents_exist = True except IOError: incidents = [] with open(complete_name_events, 'w+') as outfile: dump(incidents, outfile) session['tags_exist'] = tags_exists session['provisioned'] = provisioned session['incidents_exist'] = incidents_exist session['provision_previewed'] = provision_previewed return redirect(url_for('get_metadata')) return redirect(url_for('get_api_token'))
def clear_cache(): subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name_tag = path.join(save_path, "resources/%s/tags.json" % subdomain) tags = {} with open(complete_name_tag, 'w+') as outfile: dump(tags, outfile) complete_name_api = path.join(save_path, "resources/%s/api_logs.json" % subdomain) api_logs = {} with open(complete_name_api, 'w+') as outfile: dump(api_logs, outfile) complete_name_changes = path.join(save_path, "resources/%s/changes.json" % subdomain) changes = {} with open(complete_name_changes, 'w+') as outfile: dump(changes, outfile) complete_name_tag_combo = path.join( save_path, "resources/%s/tag_combo.json" % subdomain) tag_combo = {} with open(complete_name_tag_combo, 'w+') as outfile: dump(tag_combo, outfile) complete_name_combo = path.join(save_path, "resources/%s/combos.json" % subdomain) combos = [] with open(complete_name_combo, 'w+') as outfile: dump(combos, outfile) complete_name_types = path.join(save_path, "resources/%s/types.json" % subdomain) types = {} with open(complete_name_types, 'w+') as outfile: dump(types, outfile) complete_name_sep = path.join(save_path, "resources/%s/separators.json" % subdomain) separators = [] with open(complete_name_sep, 'w+') as outfile: dump(separators, outfile) complete_name_str = path.join( save_path, "resources/%s/str_abstractions.json" % subdomain) str_abstractions = [] with open(complete_name_str, 'w+') as outfile: dump(str_abstractions, outfile) complete_name_abs = path.join(save_path, "resources/%s/abstractions.json" % subdomain) abstractions = [] with open(complete_name_abs, 'w+') as outfile: dump(abstractions, outfile) complete_name_service = path.join(save_path, "resources/%s/service.json" % subdomain) log_services = [] with open(complete_name_service, 'w+') as outfile: dump(log_services, outfile) complete_name_events = path.join(save_path, "resources/%s/incidents.json" % subdomain) incidents = [] with open(complete_name_events, 'w+') as outfile: dump(incidents, outfile) complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) # Set to a Datadog default template = { "escalation_policy": "", "alert_creation": "create_alerts_and_incidents", "alert_grouping": "intelligent", "timed_grouping": False, "event_rules_path": ["path", "details", "tags"], "event_rules_path_str": "details.tags", "integrations": "datadog", "business_services": True, "impact_metrics": True, "business_services_relation": "1:1", "poi": "" } with open(complete_name_template, 'w+') as outfile: dump(template, outfile) complete_name_abstraction = path.join( save_path, "resources/%s/service_abstraction.json" % subdomain) abstractions = "" with open(complete_name_abstraction, 'w+') as outfile: dump(abstractions, outfile) session["tags_exist"] = False session['provisioned'] = False session['incidents_exist'] = False session['provision_previewed'] = False if session['disco_choice'] == '2': return redirect(url_for('tag_list_multiple')) else: return redirect(url_for('get_service_discovery'))
def tag_discovery(): subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) try: with open(complete_name) as json_file: tags = load(json_file) except IOError: tags = {} with open(complete_name, 'w+') as outfile: dump(tags, outfile) new_tags = deepcopy(tags) action = request.form["tag_action"] for key in tags: for i in range(len(tags[key])): if request.form.get('tag_%s_%s' % (key, str(i + 1))): if action == "delete": new_tags[key].remove(tags[key][i]) if not new_tags[key]: new_tags.pop(key) elif action == "save": new_key = request.form["tag_selection"] if not new_key: continue if new_key == "custom_adapt": custom_key = request.form["custom_field"] if custom_key not in new_tags: new_tags[custom_key] = [tags[key][i]] elif custom_key in new_tags: new_tags[custom_key].append(tags[key][i]) elif new_key and new_key != "custom_adapt": new_tags[new_key].append(tags[key][i]) new_tags[key].remove(tags[key][i]) if not new_tags[key]: new_tags.pop(key) elif action == "extract_tags": selected_field = tags[key][i] fields = "" if ";" in selected_field: fields = selected_field.split(";") elif "," in selected_field: fields = selected_field.split(",") if not fields: flash("No key-value format found") flash("key=value or key:value") return redirect(url_for("tag_discovery")) for field in fields: if "=" not in field or ":" not in field: flash("No key-value format found") flash("key=value or key:value") print(field) temp = field.replace("=", ":") if temp.split(":")[0] in new_tags and temp.split( ":")[1] not in new_tags[temp.split(":")[0]]: new_tags[temp.split(":")[0]].append( temp.split(":")[1]) else: new_tags[temp.split(":")[0]] = [temp.split(":")[1]] new_tags[key].remove(tags[key][i]) if not new_tags[key]: new_tags.pop(key) elif action == "extract_sep": selected_field = tags[key][i] hostname = selected_field.replace(".", "_").replace( "-", "_").replace(" ", "_") if "_" in hostname: fields = hostname.split("_") else: flash("No separators found to extract") return redirect(url_for("tag_list")) for x in range(len(fields)): print("CURRENT KEY:", key) current_key = key + "_" + str(x + 1) if current_key in new_tags and fields[ x] not in new_tags[current_key]: new_tags[current_key].append(fields[x]) elif current_key not in new_tags: new_tags[current_key] = [fields[x]] elif action == "extract_regex": regex = [ "(^[a-zA-Z0-9]{3})([a-zA-Z0-9]{3}\d?)([A-Za-z]{2,3})([A-Za-z]\d{1,2}|\d{2})" ] selected_field = tags[key][i] try: for r in regex: regex_result = findall(r, selected_field) fields = list(regex_result[0]) if not regex_result: break except error as e: msg = "Regex Expression Error: {}".format(str(e)) flash(msg) return redirect(url_for("tag_discovery")) except IndexError as ie: flash( "Regex Expression Error: Invalid character in the expression" ) return redirect(url_for("tag_discovery")) for x in range(len(fields)): current_key = key + "_" + str(x + 1) if current_key in new_tags and fields[ x] not in new_tags[current_key]: new_tags[current_key].append(fields[x]) elif current_key not in new_tags: new_tags[current_key] = [fields[x]] elif action == "extract_regex_custom": regex = request.form["custom_regex"] print("REGEX:", regex) selected_field = tags[key][i] try: regex_result = findall(regex, selected_field) fields = list(regex_result[0]) print(fields) except error as e: msg = "Regex Expression Error: {}".format(str(e)) flash(msg) print("ERROR:", e) return redirect(url_for("tag_discovery_all")) except IndexError as ie: print("ERROR:", ie) flash( "Regex Expression Error: Invalid character in the expression" ) return redirect(url_for("tag_discovery_all")) for x in range(len(fields)): current_key = key + "_" + str(x + 1) if current_key in new_tags and fields[x].upper( ) not in new_tags[current_key]: new_tags[current_key].append(fields[x].upper()) elif current_key not in new_tags: new_tags[current_key] = [fields[x].upper()] subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) with open(complete_name, 'w+') as outfile: dump(new_tags, outfile) return redirect(url_for("tag_list_single"))
def get_provision_data(): global_key = session['global_token'] all_ep = ep_iter_all(global_key) # service_abstraction = session["service_abstraction"] default_ep = session["escalation_policy"] # tag_combo = session["tag_combo"] subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name_tag_combo = path.join( save_path, "resources/%s/tag_combo.json" % subdomain) try: with open(complete_name_tag_combo) as json_file: tag_combo = load(json_file) except IOError: tag_combo = {} with open(complete_name_tag_combo, 'w+') as outfile: dump(tag_combo, outfile) complete_name_abstraction = path.join( save_path, "resources/%s/service_abstraction.json" % subdomain) with open(complete_name_abstraction) as json_file: service_abstraction = load(json_file) complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) with open(complete_name_template) as json_file: template = load(json_file) # integration_path = INTEGRATION_PATH[template["integrations"]] tags = tag_combo[service_abstraction['string']] # tags_2 = tag_combo[service_abstraction['string']] # integrations = session["integrations"] conditions = get_event_rules(tags, service_abstraction, template["event_rules_path"]) services = service_get_payload(tags, template["escalation_policy"], template["alert_creation"], \ template["alert_grouping"], template["timed_grouping"]) business_services = biz_svc_get_payload( tags, customer_name=session["customer_name"], poi=template["poi"]) impact_metrics = impact_get_payload(tags) changes = {} print("TAGS !@!#@!#:", tags[0]) for i in range(len(tags[0])): changes[tags[0][i]] = { "service": services[i], "event_rules": conditions[i], "impact_metrics": impact_metrics[i], "business_service": business_services[i] } complete_name_changes = path.join(save_path, "resources/%s/changes.json" % subdomain) with open(complete_name_changes, 'w+') as outfile: dump(changes, outfile) all_services = list(changes.keys()) print("ALL SERVICES:", all_services) session['provision_previewed'] = True return render_template("provision_preview.html", changes=changes, all_services=all_services, subdomain=session['subdomain'], template=template, provisioned=session['provisioned'], tags_exist=session['tags_exist'], incidents_exist=session['incidents_exist'], all_ep=all_ep)
def extract_logs(): api_logs = {} global_token = session['global_token'] user_token = session['user_token'] customer_name = session['customer_name'] subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/changes.json" % subdomain) today = date.today() complete_name_success = path.join( save_path, "resources/%s/logs/log_%s.log" % (subdomain, today)) try: with open(complete_name) as json_file: reviewed_data = load(json_file) except IOError: reviewed_data = {} with open(complete_name, 'w+') as outfile: dump(reviewed_data, outfile) complete_name_template = path.join( save_path, "resources/%s/template.json" % subdomain) with open(complete_name_template) as json_file: template = load(json_file) for data in reviewed_data: sleep(2) service_name = reviewed_data[data]['service']['name'] ep_name = reviewed_data[data]['service']['ep_id'] alert_creation = reviewed_data[data]['service']['alert_creation'] alert_grouping = reviewed_data[data]['service']['alert_grouping'] # Create New Service here and log changes new_service = provision_service(service_name, ep_name, alert_creation, alert_grouping) new_service.create(global_token, complete_name_success) service_href = '<a href="%s" target="_blank">%s</a>' % ( new_service.html_url, service_name) current_service_id = new_service.id api_logs[service_name] = { "service": { "name": new_service.name, "id": new_service.id, "url": new_service.html_url, "href": service_href, "payload": new_service.payload } } print("PRINTING CONDITION:", reviewed_data[data]['event_rules']['condition']) condition = reviewed_data[data]['event_rules']['condition'] # Create new event rules and log it here new_event_rule = provision_event_rules(new_service, condition, current_service_id) new_event_rule.create(global_token, complete_name_success) api_logs[service_name]["event_rules"] = { "service_id": new_service.id, "condition": condition, "payload": new_event_rule.payload, "str_rep": cond_to_str(new_event_rule.condition, service_href), "id": new_event_rule.payload["id"] } is_name = reviewed_data[data]["impact_metrics"]["name"] if template["business_services"]: biz_name = reviewed_data[data]["business_service"]["name"] biz_poc = reviewed_data[data]["business_service"][ "point_of_contact"] biz_rel = reviewed_data[data]["business_service"]["relationship"] # Create new business service and log it here new_biz_service = provision_biz_associations( biz_name, biz_poc, biz_rel, customer_name) new_biz_service.create(global_token, complete_name_success) api_logs[service_name]["business_service"] = { "name": new_biz_service.name, "supporting_services": None, "dependent_services": None, "id": new_biz_service.id, "test": new_biz_service.payload } service_reference = { "id": new_service.id, "type": "service_reference" } if biz_rel == "supporting_services": new_biz_service.assign_supporting_services( global_token, new_service.id, complete_name_success) api_logs[service_name]["business_service"][ "support_services"] = new_biz_service.supporting_services elif biz_rel == "dependent_services": new_biz_service.assign_dependent_services( global_token, new_service.id, complete_name_success) api_logs[service_name]["business_service"][ "dependent_services"] = new_biz_service.dependent_services # Create new impact metrics and log it here if template["impact_metrics"]: new_impact_metrics = provision_impact_metrics( is_name, customer_name) new_impact_metrics.create(user_token, complete_name_success) api_logs[service_name]["impact_metrics"] = { "name": new_impact_metrics.provisioned_name, "payload": new_impact_metrics.payload, "id": new_impact_metrics.id } impact_id = new_impact_metrics.id if template["business_services"]: # Associate impact metrics and supporting services new_biz_service.assign_impact_metrics(user_token, impact_id, complete_name_success) else: print("SKIPPING IMPACT METRICS") print("====\n", api_logs) all_services = list(api_logs.keys()) subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/api_logs.json" % subdomain) with open(complete_name, 'w+') as outfile: dump(api_logs, outfile) complete_name_logs = path.join(save_path, "resources/%s/service.json" % subdomain) with open(complete_name_logs, 'w+') as outfile: dump(list(api_logs.keys()), outfile) session['global_token'] = global_token session['user_token'] = user_token print("====\n", api_logs) all_services = list(api_logs.keys()) with open(complete_name_logs) as log_file: log_services = load(log_file) # log_services = session["log_services"] session['provisioned'] = True return render_template("provision_result.html", api_logs=api_logs, log_services=log_services, subdomain=session['subdomain'], template=template, all_services=all_services, provisioned=session['provisioned'], tags_exist=session['tags_exist'], incidents_exist=session['incidents_exist'])
def post_incident_list(): tags_path = session['tags_path'] tag_action = request.form["tag_action"] subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name_tags = path.join(save_path, "resources/%s/tags.json" % subdomain) try: with open(complete_name_tags) as json_file: tags = load(json_file) except IOError: tags = {} with open(complete_name_tags, 'w+') as outfile: dump(tags, outfile) if tag_action == "delete": # tags = session["tags"] tag_keys = list(tags.keys()) for tag in tag_keys: if request.form.get("tag-%s" % tag): del tags[tag] elif tag_action == "update": flattened_keys = session["flattened_keys"] ftle_keys = session["ftle_keys"] alerts_keys = session['alerts_keys'] incidents = [] # tags = session["tags"] disco_choice = session["disco_choice"] has_tags = disco_choice != "3" for i in range(len(session["selected_services_id"])): incidents = incidents + incident_iter_selected( session['global_token'], session['selected_windows'][i], [session['selected_services_id'][i]], session["integrations"][i], tags) tagged_keys = [[], []] for i in range(len(flattened_keys)): if request.form.get("incident_field_%s" % str(i)): incident_field = request.form.get("incident_field_%s" % str(i)) tagged_keys[0].append(incident_field) if incident_field not in tags: tags[incident_field] = [] for i in range(len(ftle_keys)): if request.form.get("ftle_field_%s" % str(i)): ftle_field = request.form.get("ftle_field_%s" % str(i)) tagged_keys[1].append(ftle_field) if ftle_field not in tags: tags[ftle_field] = [] for incident in incidents: incident_temp = incident ftle_temp = incident_temp.pop("first_trigger_log_entry") flattened_incident = flatten(incident_temp) ftle = flatten(ftle_temp) for key in tagged_keys[0]: if key in flattened_incident and flattened_incident[ key] not in tags[key]: tags[key].append(flattened_incident[key]) for key in tagged_keys[1]: if key in ftle and ftle[key] not in tags[key]: tags[key].append(ftle[key]) subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) with open(complete_name, 'w+') as outfile: dump(tags, outfile) return redirect(url_for('get_incident_list'))
def get_incident_list(): if 'tags_path' not in session: session['tags_path'] = {} subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name_tags = path.join(save_path, "resources/%s/tags.json" % subdomain) try: with open(complete_name_tags) as json_file: tags = load(json_file) except IOError: tags = {} with open(complete_name_tags, 'w+') as outfile: dump(tags, outfile) complete_name_events = path.join(save_path, "resources/%s/incidents.json" % subdomain) try: with open(complete_name_events) as json_file: incidents = load(json_file) print("Found incidents. Loading the file") except IOError: incidents = [] with open(complete_name_events, 'w+') as outfile: dump(incidents, outfile) print("Couldn't find incidents. Dumping empty list.") if not incidents: for i in range(len(session["selected_services_id"])): incidents = incidents + incident_iter_selected( session['global_token'], session['selected_windows'][i], [session['selected_services_id'][i]], session["integrations"], tags) subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/combos.json" % subdomain) try: with open(complete_name) as json_file: combos = load(json_file) except IOError: combos = [] with open(complete_name, 'w+') as outfile: dump(combos, outfile) keys = [] flattened_keys = [] flattened_incidents = [] ftle = [] ftle_keys = [] channels = [] channels_keys = [] alerts = [] alerts_keys = [] for incident in incidents: incident_temp = deepcopy(incident) ftle_temp = incident_temp.pop("first_trigger_log_entry") # ftle_channel = ftle_temp.pop("channel") alerts_temp = incident_temp.pop("all_alerts") flattened_incidents.append(flatten(incident_temp)) flattened_keys = list( set(flattened_keys + list(flatten(incident_temp).keys()))) ftle.append(flatten(ftle_temp)) ftle_keys = list(set(ftle_keys + list(flatten(ftle_temp).keys()))) # channels.append(flatten(ftle_channel)) # channels_keys = list(set(channels_keys + list(flatten(ftle_channel).keys()))) alerts.append(flatten(alerts_temp)) alerts_keys = list(set(alerts_keys + list(flatten(alerts_temp).keys()))) flattened_keys.sort() alerts_keys.sort() ftle_keys.sort() session["flattened_keys"] = flattened_keys session["ftle_keys"] = ftle_keys session['alerts_keys'] = alerts_keys disco_choice = session["disco_choice"] subdomain = session["subdomain"] create_folder('resources/' + subdomain) save_path = getcwd() complete_name = path.join(save_path, "resources/%s/tags.json" % subdomain) with open(complete_name, 'w+') as outfile: dump(tags, outfile) if not incidents: flash( "No incidents found in the selected window. Please select a larger window!" ) return redirect(url_for('get_service_config')) complete_name_events = path.join(save_path, "resources/%s/incidents.json" % subdomain) with open(complete_name_events, 'w+') as outfile: dump(incidents, outfile) return render_template('incident_list.html', incidents=flattened_incidents, keys=flattened_keys, alerts=alerts, alerts_keys=alerts_keys, ftle=ftle, ftle_keys=ftle_keys, channels=channels, channels_keys=channels_keys, disco_choice=disco_choice, tags=tags, provisioned=session['provisioned'], tags_exist=session['tags_exist'], subdomain=session['subdomain'], incidents_exist=session['incidents_exist'])