示例#1
0
    def response_stage(self, rs_path):
        """Populate Response Actions"""

        print("[*] Populating Response Actions..")
        if rs_path:
            rs_list = glob.glob(rs_path + '*.yml')
        else:
            rs_dir = ATCconfig.get('response_stages_dir')
            rs_list = glob.glob(rs_dir + '/*.yml')

        for rs_file in rs_list:
            try:
                rs = ResponseStage(rs_file)
                rs.render_template("markdown")
                rs.save_markdown_file(atc_dir=self.atc_dir)
            except Exception as e:
                print(rs_file + " failed\n\n%s\n\n" % e)
                print("Err message: %s" % e)
                print('-' * 60)
                traceback.print_exc(file=sys.stdout)
                print('-' * 60)

        template = env.get_template('markdown_responsestage_main_template.j2')

        rss, rs_paths = ATCutils.load_yamls_with_paths(
            ATCconfig.get('response_stages_dir'))

        rs_filenames = [
            _rs_path.split('/')[-1].replace('.yml', '')
            for _rs_path in rs_paths
        ]

        rss_dict = {}
        rss_list = []

        for i in range(len(rss)):

            rs_title = rss[i].get('title')
            rs_id = rss[i].get('id')
            rs_description = rss[i].get('description')

            rss_list.append((rs_id, rs_title, rs_description))

        rss_dict.update({'rss_list': sorted(rss_list)})

        content = template.render(rss_dict)

        ATCutils.write_file('docs/responsestages.md', content)
        print("[+] Response Stages populated!")
示例#2
0
    def render_template(self, template_type):
        """Description
        template_type:
            - "markdown"
        """

        if template_type not in ["markdown"]:
            raise Exception("Bad template_type. Available values:" +
                            " [\"markdown\"]")

        # Point to the templates directory
        env = Environment(loader=FileSystemLoader('scripts/templates'))

        template = env.get_template('markdown_responsestage_template.md.j2')

        self.ra_parsed_file.update(
            {'description': self.ra_parsed_file.get('description').strip()})

        ras, ra_paths = ATCutils.load_yamls_with_paths(
            ATCconfig.get('response_actions_dir'))
        ra_filenames = [
            ra_path.split('/')[-1].replace('.yml', '') for ra_path in ra_paths
        ]

        rs_id = self.ra_parsed_file.get('id')

        stage_list = []

        for i in range(len(ras)):
            if rs_mapping[rs_id] == ATCutils.normalize_rs_name(
                    ras[i].get('stage')):
                ra_id = ras[i].get('id')
                ra_filename = ra_filenames[i]
                ra_title = ATCutils.normalize_react_title(ras[i].get('title'))
                ra_description = ras[i].get('description').strip()
                stage_list.append(
                    (ra_id, ra_filename, ra_title, ra_description))

        self.ra_parsed_file.update({'stage_list': sorted(stage_list)})

        self.content = template.render(self.ra_parsed_file)
    def __init__(self):

        dn_path = ATCconfig.get('data_needed_dir')
        lp_path = ATCconfig.get('logging_policies_dir')
        en_path = ATCconfig.get('enrichments_directory')
        rp_path = ATCconfig.get('response_playbooks_dir')
        ra_path = ATCconfig.get('response_actions_dir')
        cu_path = ATCconfig.get('customers_directory')

        cu_list = ATCutils.load_yamls(cu_path)
        dn_list = ATCutils.load_yamls(dn_path)
        lp_list = ATCutils.load_yamls(lp_path)
        ra_list = ATCutils.load_yamls(ra_path)
        rp_list = ATCutils.load_yamls(rp_path)
        enrichments_list = ATCutils.load_yamls(en_path)

        pivoting = []
        analytics = []
        result = []

        dr_dirs = ATCconfig.get('detection_rules_directories')

        print("[*] Iterating through Detection Rules")
        # Iterate through alerts and pathes to them

        for dr_path in dr_dirs:
            alerts, path_to_alerts = ATCutils.load_yamls_with_paths(dr_path)

            for alert, path in zip(alerts, path_to_alerts):
                if not isinstance(alert.get('tags'), list):
                    continue

                list_of_customers = []
                for specific_customer in cu_list:
                    if alert['title'] in specific_customer[
                            'detectionrule'] and specific_customer[
                                'customer_name'] not in list_of_customers:
                        list_of_customers.append(
                            specific_customer['customer_name'])

                if not isinstance(list_of_customers,
                                  list) or len(list_of_customers) == 0:
                    list_of_customers = ["None"]

                customer = ';'.join(list_of_customers)

                threats = [
                    tag for tag in alert['tags'] if tag.startswith('attack')
                ]
                tactics = [
                    f'{ta_mapping[threat][1]}: {ta_mapping[threat][0]}'
                    for threat in threats if threat in ta_mapping.keys()
                ]
                techniques = [
                    threat for threat in threats
                    if threat.startswith('attack.t')
                ]

                enrichments = [
                    er for er in enrichments_list
                    if er['title'] in alert.get('enrichment', [])
                ]
                dn_titles = ATCutils.main_dn_calculatoin_func(path)

                alert_dns = [
                    data for data in dn_list if data['title'] in dn_titles
                ]

                logging_policies = []

                for dn in alert_dns:

                    if 'loggingpolicy' in dn:
                        # If there are logging policies in DN that we havent added yet - add them
                        logging_policies.extend([
                            l for l in lp_list
                            if l['title'] in dn['loggingpolicy']
                            and l not in logging_policies
                        ])
                    # If there are no logging policices at all - make an empty one just to make one row in csv
                    if not isinstance(logging_policies,
                                      list) or len(logging_policies) == 0:
                        logging_policies = [{
                            'title': "-",
                            'eventID': [
                                -1,
                            ]
                        }]

                for dn in alert_dns:
                    pivot = [
                        dn['category'], dn['platform'], dn['type'],
                        dn['channel'], dn['provider'], dn['title'], '', ''
                    ]

                    for tactic in tactics:
                        for technique in techniques:
                            technique_name = technique.replace('attack.t', 'T') + ': ' +\
                                ATCutils.get_attack_technique_name_by_id(
                                    technique.replace('attack.', ''))
                            for lp in logging_policies:
                                rps = [
                                    rp for rp in rp_list
                                    if technique in rp['tags']
                                    or tactic in rp['tags']
                                ]
                                if len(rps) < 1:
                                    rps = [{'title': '-'}]
                                for rp in rps:
                                    ras_buf = []
                                    [
                                        ras_buf.extend(l) for l in rp.values()
                                        if isinstance(l, list)
                                    ]
                                    ras = [
                                        ra for ra in ras_buf
                                        if ra.startswith('RA')
                                    ]
                                    if len(ras) < 1:
                                        ras = ['title']
                                    #if len(rp) > 1:
                                    #todo
                                    for ra in ras:
                                        lp['title'] = lp['title'].replace(
                                            '\n', '')
                                        result.append([
                                            customer, tactic, technique_name,
                                            alert['title'], dn['category'],
                                            dn['platform'], dn['type'],
                                            dn['channel'], dn['provider'],
                                            dn['title'], lp['title'], '', '',
                                            rp['title'], ra
                                        ])

                    # pivoting.append(pivot)
                    for field in dn['fields']:
                        analytics.append([field] + pivot)

                for er in enrichments:
                    for dn in [
                            dnn for dnn in dn_list
                            if dnn['title'] in er.get('data_to_enrich', [])
                    ]:
                        pivot = [
                            dn['category'], dn['platform'], dn['type'],
                            dn['channel'], dn['provider'], dn['title'],
                            er['title'], ';'.join(er.get('requirements', []))
                        ]
                        for tactic in tactics:
                            for technique in techniques:
                                technique_name = technique.replace('attack.t', 'T') + ': ' + \
                                    ATCutils.get_attack_technique_name_by_id(
                                        technique.replace('attack.', ''))
                                for lp in logging_policies:
                                    lp['title'] = lp['title'].replace('\n', '')
                                    result.append([
                                        customer, tactic, technique_name,
                                        alert['title'], dn['category'],
                                        dn['platform'], dn['type'],
                                        dn['channel'], dn['provider'],
                                        dn['title'], lp['title'], er['title'],
                                        ';'.join(er.get('requirements',
                                                        [])), '-', '-'
                                    ])

                        # pivoting.append(pivot)
                        for field in er['new_fields']:
                            analytics.append([field] + pivot)

        analytics = []

        for dn in dn_list:

            if 'category' in dn:
                dn_category = dn['category']
            else:
                dn_category = "-"
            if 'platform' in dn:
                dn_platform = dn['platform']
            else:
                dn_platform = "-"
            if 'type' in dn:
                dn_type = dn['type']
            else:
                dn_type = "-"
            if 'channel' in dn:
                dn_channel = dn['channel']
            else:
                dn_channel = "-"
            if 'provider' in dn:
                dn_provider = dn['provider']
            else:
                dn_provider = "-"
            if 'title' in dn:
                dn_title = dn['title']
            else:
                dn_title = "-"

            pivot = [
                dn_category, dn_platform, dn_type, dn_channel, dn_provider,
                dn_title, '', ''
            ]
            for field in dn['fields']:
                analytics.append([field] + pivot)

        for er in enrichments_list:
            for dn in [
                    dnn for dnn in dn_list
                    if dnn['title'] in er.get('data_to_enrich', [])
            ]:
                pivot = [
                    dn['category'], dn['platform'], dn['type'], dn['channel'],
                    dn['provider'], dn['title'], er['title'],
                    ';'.join(er.get('requirements', []))
                ]
                for field in er['new_fields']:
                    analytics.append([field] + pivot)

        filename = 'analytics.csv'
        exported_analytics_directory = ATCconfig.get(
            'exported_analytics_directory')

        with open(exported_analytics_directory + '/' + filename,
                  'w',
                  newline='') as csvfile:
            # maybe need some quoting
            alertswriter = csv.writer(csvfile, delimiter=',')
            alertswriter.writerow([
                'customer', 'tactic', 'technique', 'detection_rule',
                'category', 'platform', 'type', 'channel', 'provider',
                'data_needed', 'logging policy', 'enrichment',
                'enrichment requirements', 'response playbook',
                'response action'
            ])
            for row in result:
                alertswriter.writerow(row)
        print(f'[+] Created {filename}')

        filename = 'pivoting.csv'
        exported_analytics_directory = ATCconfig.get(
            'exported_analytics_directory')

        with open(exported_analytics_directory + '/' + filename,
                  'w',
                  newline='') as csvfile:
            # maybe need some quoting
            alertswriter = csv.writer(csvfile, delimiter=',')
            alertswriter.writerow([
                'field', 'category', 'platform', 'type', 'channel', 'provider',
                'data_needed', 'enrichment', 'enrichment requirements'
            ])
            for row in analytics:
                alertswriter.writerow(row)

        print(f'[+] Created {filename}')
    def __init__(self,
                 ra=False,
                 rp=False,
                 rs=False,
                 auto=False,
                 ra_path=False,
                 rp_path=False,
                 rs_path=False,
                 atc_dir=False,
                 init=False):
        """Init"""

        # Check if atc_dir provided
        if atc_dir:
            self.atc_dir = atc_dir
        else:
            self.atc_dir = ATCconfig.get('md_name_of_root_directory') + '/'

        # Main logic
        if auto:
            self.response_action(ra_path)
            self.response_playbook(rp_path)
            self.response_stage(rs_path)

        if ra:
            self.response_action(ra_path)

        if rp:
            self.response_playbook(rp_path)

        if rs:
            self.response_stage(rs_path)

        if ra_path:
            ras, ra_paths = ATCutils.load_yamls_with_paths(ra_path)
        else:
            ras, ra_paths = ATCutils.load_yamls_with_paths(
                ATCconfig.get('response_actions_dir'))

        if rp_path:
            rps, rp_paths = ATCutils.load_yamls_with_paths(rp_path)
        else:
            rps, rp_paths = ATCutils.load_yamls_with_paths(
                ATCconfig.get('response_playbooks_dir'))

        if rs_path:
            rss, rs_paths = ATCutils.load_yamls_with_paths(rs_path)
        else:
            rss, rs_paths = ATCutils.load_yamls_with_paths(
                ATCconfig.get('response_stages_dir'))

        ra_filenames = [
            ra_path.split('/')[-1].replace('.yml', '') for ra_path in ra_paths
        ]
        rp_filenames = [
            rp_path.split('/')[-1].replace('.yml', '') for rp_path in rp_paths
        ]
        rs_filenames = [
            rs_path.split('/')[-1].replace('.yml', '') for rs_path in rs_paths
        ]

        # Point to the templates directory
        env = Environment(loader=FileSystemLoader('scripts/templates'))

        # Get proper template
        template = env.get_template('mkdocs_config_template.md.j2')

        preparation = []
        identification = []
        containment = []
        eradication = []
        recovery = []
        lessons_learned = []
        detect = []
        deny = []
        disrupt = []
        degrade = []
        deceive = []
        destroy = []
        deter = []

        stages = [('preparation', preparation),
                  ('identification', identification),
                  ('containment', containment), ('eradication', eradication),
                  ('recovery', recovery), ('lessons_learned', lessons_learned),
                  ('detect', detect), ('deny', deny), ('disrupt', disrupt),
                  ('degrade', degrade), ('deceive', deceive),
                  ('destroy', destroy), ('deter', deter)]

        playbooks = []

        data_to_render = {}

        for i in range(len(ras)):

            ra_updated_title = ras[i].get('id')\
                + ": "\
                + ATCutils.normalize_react_title(ras[i].get('title'))

            if "RA1" in ras[i]['id']:
                preparation.append((ra_updated_title, ra_filenames[i]))
            elif "RA2" in ras[i]['id']:
                identification.append((ra_updated_title, ra_filenames[i]))
            elif "RA3" in ras[i]['id']:
                containment.append((ra_updated_title, ra_filenames[i]))
            elif "RA4" in ras[i]['id']:
                eradication.append((ra_updated_title, ra_filenames[i]))
            elif "RA5" in ras[i]['id']:
                recovery.append((ra_updated_title, ra_filenames[i]))
            elif "RA6" in ras[i]['id']:
                lessons_learned.append((ra_updated_title, ra_filenames[i]))

        stages = [(stage_name.replace('_',
                                      ' ').capitalize(), sorted(stage_list))
                  for stage_name, stage_list in stages]

        for i in range(len(rps)):

            rp_updated_title = rps[i].get('id')\
                + ": "\
                + ATCutils.normalize_react_title(rps[i].get('title'))

            playbooks.append((rp_updated_title, rp_filenames[i]))

        rs_list = []

        for i in range(len(rss)):

            rs_title = rss[i].get('title')
            rs_id = rss[i].get('id')

            rs_list.append((rs_title, rs_id))

        data_to_render.update({'stages': stages})
        data_to_render.update({'playbooks': sorted(playbooks)})
        data_to_render.update({'rs_list': rs_list})

        content = template.render(data_to_render)
        try:
            ATCutils.write_file('mkdocs.yml', content)
            print("[+] Created mkdocs.yml")
        except:
            print("[-] Failed to create mkdocs.yml")
示例#5
0
   def __init__(self, ra=False, rp=False, auto=False,
                 ra_path=False, rp_path=False,
                 atc_dir=False, init=False):
        """Init"""

        # Check if atc_dir provided
        if atc_dir:
            self.atc_dir = atc_dir
        else:
            self.atc_dir = ATCconfig.get('md_name_of_root_directory') + '/'

        # Main logic
        if auto:
            self.response_action(ra_path)
            self.response_playbook(rp_path)

        if ra:
            self.response_action(ra_path)

        if rp:
            self.response_playbook(rp_path)

        if ra_path:
            ras, ra_paths = ATCutils.load_yamls_with_paths(ra_path)
        else:
            ras, ra_paths = ATCutils.load_yamls_with_paths(ATCconfig.get('response_actions_dir'))

        if rp_path:
            rps, rp_paths = ATCutils.load_yamls_with_paths(rp_path)
        else:
            rps, rp_paths = ATCutils.load_yamls_with_paths(ATCconfig.get('response_playbooks_dir'))

        
        ra_filenames = [ra_path.split('/')[-1].replace('.yml', '') for ra_path in ra_paths]
        rp_filenames = [rp_path.split('/')[-1].replace('.yml', '') for rp_path in rp_paths]

        _preparation = []
        _identification = []
        _containment = []
        _eradication = []
        _recovery = []
        _lessons_learned = []

        stages = [
            ('preparation', _preparation), ('identification', _identification),
            ('containment', _containment), ('eradication', _eradication),
            ('recovery', _recovery), ('lessons_learned', _lessons_learned)
        ]

        for i in range(len(ras)):

            normalized_title = ATCutils.normalize_react_title(ras[i].get('title'))

            ra_updated_title = ras[i].get('id')\
                + ":"\
                + normalized_title
            
            if "RA1" in ras[i]['id']:
                stage = 'preparation'
            elif "RA2" in ras[i]['id']:
                stage = 'identification'
            elif "RA3" in ras[i]['id']:
                stage = 'containment'
            elif "RA4" in ras[i]['id']:
                stage = 'eradication'
            elif "RA5" in ras[i]['id']:
                stage = 'recovery'
            elif "RA6" in ras[i]['id']:
                stage = 'lessons-learned'

            kill_chain_phases = [{
                "kill_chain_name": 'atc-react',
                "phase_name": stage
            }]

            external_references = [{
                "source_name": "atc-react",
                "external_id": ras[i].get('id'),
                "url": "https://atc-project.github.io/atc-react/Response_Actions/" + ra_filenames[i]
            }]

            ra = ReactAction(
                name=normalized_title, 
                description=ras[i].get('description'),
                external_references=external_references,
                kill_chain_phases=kill_chain_phases,
                x_mitre_platforms=['Windows', 'Linux', 'macOS'],
                allow_custom=True
            )

            stix_mem.add(ra)

        stix_mem.add( [ preparation, 
                        identification,
                        containment,
                        eradication,
                        recovery,
                        lessons_learned
        ])

        stix_mem.add(react_matrix)

        try:
            stix_mem.save_to_file("docs/react.json")
            print("[+] Created react.json STIX file")
        except:
            print("[-] Failed to create react.json STIX file")
示例#6
0
# ########################################################################### #
# ############################## Customer ################################### #
# ########################################################################### #

env = Environment(loader=FileSystemLoader('scripts/templates'))

ATCconfig = ATCutils.load_config("config.yml")

dr_dirs = ATCconfig.get('detection_rules_directories')

all_rules = []
all_names = []
all_titles = []

for dr_path in dr_dirs:
    rules, paths = ATCutils.load_yamls_with_paths(dr_path)
    all_rules = all_rules + rules
    names = [path.split('/')[-1].replace('.yml', '') for path in paths]
    all_names = all_names + names
    titles = [rule.get('title') for rule in rules]
    all_titles = all_titles + titles

_ = zip(all_rules, all_names, all_titles)
rules_by_title = {title: (rule, name) for (rule, name, title) in _}


class Customer:
    """Class for Customer entity"""
    def __init__(self, yaml_file, apipath=None, auth=None, space=None):
        """ Init method """
示例#7
0
    def __init__(self, dn_path=False, lp_path=False, 
                 en_path=False, atc_dir=False, init=False):
        """Init"""

        # Check if atc_dir provided
        if atc_dir:
            self.atc_dir = atc_dir
        else:
            self.atc_dir = ATCconfig.get('md_name_of_root_directory') + '/'

        # Main logic

        if dn_path:
            dns, dn_paths = ATCutils.load_yamls_with_paths(dn_path)
        else:
            dns, dn_paths = ATCutils.load_yamls_with_paths(ATCconfig.get('data_needed_dir'))

        if lp_path:
            lps, lp_paths = ATCutils.load_yamls_with_paths(lp_path)
        else:
            lps, lp_paths = ATCutils.load_yamls_with_paths(ATCconfig.get('logging_policies_dir'))

        if en_path:
            ens, en_paths = ATCutils.load_yamls_with_paths(en_path)
        else:
            ens, en_paths = ATCutils.load_yamls_with_paths(ATCconfig.get('enrichments_dir'))


        dn_filenames = [dn_path.split('/')[-1].replace('.yml', '') for dn_path in dn_paths]
        lp_filenames = [lp_path.split('/')[-1].replace('.yml', '') for lp_path in lp_paths]
        en_filenames = [en_path.split('/')[-1].replace('.yml', '') for en_path in en_paths]

        # Point to the templates directory
        env = Environment(loader=FileSystemLoader('scripts/templates'))

        # Get proper template
        template = env.get_template(
            'mkdocs_config_template.yml.j2'
        )

        data_to_render = {}

        data_needed_list = []
        for i in range(len(dns)):

            dn_updated_title = dns[i].get('title')
            
            data_needed_list.append((dn_updated_title, dn_filenames[i]))
        
        logging_policy_list = []
        for i in range(len(lps)):

            rp_updated_title = lps[i].get('title')

            logging_policy_list.append((rp_updated_title, lp_filenames[i]))

        enrichment_list = []
        for i in range(len(ens)):

            en_updated_title = ens[i].get('title')
            
            enrichment_list.append((en_updated_title, en_filenames[i]))


        data_to_render.update({'data_needed_list': sorted(data_needed_list)})
        data_to_render.update({'logging_policy_list': sorted(logging_policy_list)})
        data_to_render.update({'enrichment_list': sorted(enrichment_list)})
        
        content = template.render(data_to_render)
        try:
            ATCutils.write_file('mkdocs.yml', content)
            print("[+] Created mkdocs.yml")
        except:
            print("[-] Failed to create mkdocs.yml")
示例#8
0
# ############################## Customer ################################### #
# ########################################################################### #

ATCconfig = ATCutils.load_config("config.yml")

env = Environment(loader=FileSystemLoader(
    ATCconfig.get('templates_directory', 'scripts/templates')))

dr_dirs = ATCconfig.get('detection_rules_directories')

all_rules = []
all_names = []
all_titles = []

for dr_path in dr_dirs:
    rules, paths = ATCutils.load_yamls_with_paths(dr_path)
    all_rules = all_rules + rules
    names = [path.split('/')[-1].replace('.yml', '') for path in paths]
    all_names = all_names + names
    titles = [rule.get('title') for rule in rules]
    all_titles = all_titles + titles

_ = zip(all_rules, all_names, all_titles)
rules_by_title = {title: (rule, name) for (rule, name, title) in _}

uc_dirs = ATCconfig.get('usecases_directory')

all_usecases = []
all_ucnames = []
all_uctitles = []
示例#9
0
    def __init__(self):

        dn_path = ATCconfig.get('data_needed_dir')
        lp_path = ATCconfig.get('logging_policies_dir')
        en_path = ATCconfig.get('enrichments_directory')
        rp_path = ATCconfig.get('response_playbooks_dir')
        ra_path = ATCconfig.get('response_actions_dir')
        cu_path = ATCconfig.get('customers_directory')

        dn_list = ATCutils.load_yamls(dn_path)
        lp_list = ATCutils.load_yamls(lp_path)
        ra_list = ATCutils.load_yamls(ra_path)
        rp_list = ATCutils.load_yamls(rp_path)
        cu_list = ATCutils.load_yamls(cu_path)

        enrichments_list = ATCutils.load_yamls(en_path)
        _index = {}

        dr_dirs = ATCconfig.get('detection_rules_directories')

        try:
            os.remove(exported_analytics_directory + '/' + filename)
            print("[-] Old atc_es_index.json has been deleted")
        except:
            pass

        # Iterate through alerts and pathes to them
        for dr_path in dr_dirs:
            alerts, path_to_alerts = ATCutils.load_yamls_with_paths(dr_path)
            for alert, path in zip(alerts, path_to_alerts):

                tactics = []
                techniques = []
                list_of_customers = []

                # raw Sigma rule without fields that are present separately
                dr_raw = alert.copy()

                fields_to_remove_from_raw_dr = [
                    'title', 'id', 'status', 'date', 'modified', 'description',
                    'references', 'author', 'tags', 'logsource',
                    'falsepositives', 'level'
                ]

                for field in fields_to_remove_from_raw_dr:
                    dr_raw.pop(field, None)

                dr_raw = str(yaml.dump((dr_raw), default_flow_style=False))

                for customer in cu_list:
                    if 'detectionrule' in customer:
                        if alert['title'] in customer[
                                'detectionrule'] and customer[
                                    'customer_name'] not in list_of_customers:
                            list_of_customers.append(customer['customer_name'])

                if not isinstance(list_of_customers,
                                  list) or len(list_of_customers) == 0:
                    list_of_customers = ["None"]

                if isinstance(alert.get('tags'), list):
                    try:
                        threats = [
                            tag for tag in alert['tags']
                            if tag.startswith('attack')
                        ]
                        tactics = [
                            f'{ta_mapping[threat][1]}: {ta_mapping[threat][0]}'
                            for threat in threats
                            if threat in ta_mapping.keys()
                        ]
                    except:
                        pass

                    try:
                        threats = [
                            tag for tag in alert['tags']
                            if tag.startswith('attack')
                        ]
                        techniques = [
                            threat for threat in threats
                            if threat.startswith('attack.t')
                        ]
                    except:
                        pass

                enrichments = [
                    er for er in enrichments_list
                    if er['title'] in alert.get('enrichment', [{
                        'title': '-'
                    }])
                ]
                if len(enrichments) < 1:
                    enrichments = [{'title': 'not defined'}]
                dn_titles = ATCutils.main_dn_calculatoin_func(path)
                alert_dns = [
                    data for data in dn_list if data['title'] in dn_titles
                ]
                if len(alert_dns) < 1:
                    alert_dns = [{
                        'category': 'not defined',
                        'platform': 'not defined',
                        'provider': 'not defined',
                        'type': 'not defined',
                        'channel': 'not defined',
                        'title': 'not defined',
                        'loggingpolicy': ['not defined']
                    }]
                logging_policies = []
                for dn in alert_dns:
                    # If there are logging policies in DN that we havent added yet - add them
                    logging_policies.extend([
                        l for l in lp_list if l['title'] in dn['loggingpolicy']
                        and l not in logging_policies
                    ])
                    # If there are no logging policices at all - make an empty one just to make one row in csv
                    if not isinstance(logging_policies,
                                      list) or len(logging_policies) == 0:
                        logging_policies = [{
                            'title': "not defined",
                            'eventID': [
                                -1,
                            ]
                        }]

                # we use date of creation to have timelines of progress
                if 'date' in alert:

                    try:
                        date_created = datetime.datetime.strptime(
                            alert['date'], '%Y/%m/%d').isoformat()
                    except:
                        pass

                    if not date_created:
                        try:
                            # in case somebody mixed up month and date, like in "Detection of SafetyKatz"
                            date_created = datetime.datetime.strptime(
                                alert['date'], '%Y/%d/%m').isoformat()
                        except:
                            # temporary solution to avoid errors. all DRs must have date of creation
                            print(
                                'date in ' + alert['date'] + ' is not in ' +
                                '%Y/%m/%d and %Y/%d/%m formats. Set up current date and time'
                            )
                            date_created = datetime.datetime.now().isoformat()
                else:
                    # temporary solution to avoid errors. all internal DRs must have date of creation
                    #date_created = datetime.datetime.now().isoformat()
                    date_created = '2019-03-01T22:50:37.587060'

                    # we use date of creation to have timelines of progress
                if 'modified' in alert:

                    try:
                        date_modified = datetime.datetime.strptime(
                            alert['modified'], '%Y/%m/%d').isoformat()
                    except:
                        pass

                    if not date_modified:
                        try:
                            # in case somebody mixed up month and date, like in "Detection of SafetyKatz"
                            date_modified = datetime.datetime.strptime(
                                alert['modified'], '%Y/%d/%m').isoformat()
                        except:
                            date_modified = None
                else:
                    # temporary solution to avoid errors. all internal DRs must have date of creation
                    #date_created = datetime.datetime.now().isoformat()
                    date_modified = None

                # we create index document based on DR title, which is unique (supposed to be).
                # this way we will update existing documents in case of changes,
                # and create new ones when new DRs will be developed
                # update: well, better recreate everything from the scratch all the time.
                # if name of DR will change, we will have doubles in index. todo
                document_id = hash(alert['title'])

                list_of_tactics = []
                list_of_techniques = []

                if tactics:
                    for tactic in tactics:
                        if tactic not in list_of_tactics:
                            list_of_tactics.append(tactic)
                else:
                    list_of_tactics = ['not defined']

                if techniques:
                    for technique in techniques:
                        technique_name = technique.replace('attack.t', 'T') + ': ' +\
                                ATCutils.get_attack_technique_name_by_id(technique.replace('attack.', ''))
                        if technique not in list_of_techniques:
                            list_of_techniques.append(technique_name)
                else:
                    list_of_techniques = ['not defined']

                dr_title = alert['title']
                dn_titles = []
                dn_categories = []
                dn_platforms = []
                dn_types = []
                dn_channels = []
                dn_providers = []
                lp_titles = []
                en_titles = []
                en_requirements = []

                if 'author' in alert:
                    dr_author = alert['author']
                else:
                    dr_author = 'not defined'

                if 'description' in alert:
                    dr_description = alert['description']
                else:
                    dr_description = 'not defined'

                if 'references' in alert:
                    dr_references = alert['references']
                else:
                    dr_references = 'not defined'

                if 'id' in alert:
                    dr_id = alert['id']
                else:
                    dr_id = 'not defined'

                if 'internal_responsible' in alert:
                    dr_internal_responsible = alert['internal_responsible']
                else:
                    dr_internal_responsible = 'not defined'

                if 'status' in alert:
                    dr_status = alert['status']
                else:
                    dr_status = 'not defined'

                if 'level' in alert:
                    dr_severity = alert['level']
                else:
                    dr_severity = 'not defined'

                if 'confidence' in alert:
                    dr_confidence = alert['confidence']
                else:
                    dr_confidence = 'not defined'

                for dn in alert_dns:
                    if dn['title'] not in dn_titles:
                        dn_titles.append(dn['title'])
                    if dn['category'] not in dn_categories:
                        dn_categories.append(dn['category'])
                    if dn['platform'] not in dn_platforms:
                        dn_platforms.append(dn['platform'])
                    if dn['type'] not in dn_types:
                        dn_types.append(dn['type'])
                    if dn['channel'] not in dn_channels:
                        dn_channels.append(dn['channel'])
                    if dn['provider'] not in dn_providers:
                        dn_providers.append(dn['provider'])

                for lp in logging_policies:
                    if lp['title'] not in lp_titles:
                        lp_titles.append(lp['title'])

                for er in enrichments:
                    if er['title'] not in en_titles:
                        en_titles.append(er['title'])
                    if 'requirements' in er:
                        en_requirements.append(er['requirements'])
                    else:
                        if "-" not in en_requirements:
                            en_requirements.append("not defined")

                _index.update({
                    "date_created": date_created,
                    "sigma_rule_path": path[25:],
                    "date_modified": date_modified,
                    "description": dr_description,
                    "references": dr_references,
                    "customer": list_of_customers,
                    "tactic": list_of_tactics,
                    "dr_id": dr_id,
                    "technique": list_of_techniques,
                    "raw_detection_rule": dr_raw,
                    "detection_rule_title": dr_title,
                    "detection_rule_author": dr_author,
                    "detection_rule_internal_responsible":
                    dr_internal_responsible,
                    "detection_rule_development_status": dr_status,
                    "detection_rule_severity": dr_severity,
                    "detection_rule_confidence": dr_confidence,
                    "category": dn_categories,
                    "platform": dn_platforms,
                    "type": dn_types,
                    "channel": dn_channels,
                    "provider": dn_providers,
                    "data_needed": dn_titles,
                    "logging_policy": lp_titles,
                    "enrichment": en_titles,
                    "enrichment_requirements": en_requirements
                })

                index_line = {"index": {"_id": document_id}}

                with open(exported_analytics_directory + '/' + filename,
                          'a') as fp:
                    json.dump(index_line, fp)
                    fp.write("\n")
                    json.dump(_index, fp)
                    fp.write("\n")

        print(f'[+] Created {filename}')