def get_technique_and_mitigation_relationships() -> List[CourseOfAction]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) technique_filter = [ Filter("type", "=", "relationship"), Filter("relationship_type", "=", "mitigates"), ] all_techniques = file_system.query(technique_filter) return all_techniques
def get_technique_and_mitigation_relationships() -> List[CourseOfAction]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) technique_filter = [ Filter('type', '=', 'relationship'), Filter('relationship_type', '=', 'mitigates') ] all_techniques = file_system.query(technique_filter) return all_techniques
def get_all_attack_techniques() -> Dict[str, AttackPattern]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) technique_filter = [Filter('type', '=', 'attack-pattern')] all_techniques = file_system.query(technique_filter) all_techniques = { technique['id']: technique for technique in all_techniques } return all_techniques
def get_all_mitigations() -> Dict[str, CourseOfAction]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) mitigation_filter = [Filter('type', '=', 'course-of-action')] all_mitigations = file_system.query(mitigation_filter) all_mitigations = { mitigation['id']: mitigation for mitigation in all_mitigations } return all_mitigations
def get_technique_and_mitigation_relationships( attack_data_path: Path) -> List[CourseOfAction]: file_system = FileSystemSource(attack_data_path) technique_filter = [ Filter("type", "=", "relationship"), Filter("relationship_type", "=", "mitigates"), ] all_techniques = file_system.query(technique_filter) return all_techniques
def get_all_mitigations(attack_data_path: Path) -> Dict[str, CourseOfAction]: file_system = FileSystemSource(attack_data_path) mitigation_filter = [Filter("type", "=", "course-of-action")] all_mitigations = file_system.query(mitigation_filter) all_mitigations = { mitigation["id"]: mitigation for mitigation in all_mitigations } return all_mitigations
def get_all_attack_techniques( attack_data_path: Path) -> Dict[str, AttackPattern]: file_system = FileSystemSource(attack_data_path) technique_filter = [Filter("type", "=", "attack-pattern")] all_techniques = file_system.query(technique_filter) all_techniques = { technique["id"]: technique for technique in all_techniques } return all_techniques
class AtomicCaldera: def __init__(self, services, ac_data_svc): self.ac_data_svc = ac_data_svc self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('atomiccaldera') self.log.debug('Atomic-Caldera Plugin Logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) def get_conf(self): confPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/artconf.yml') try: with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) self.artpath = os.path.expanduser(conf['artpath']) self.log.debug(self.ctipath) self.log.debug(self.artpath) except: pass @template('atomiccaldera.html') async def landing(self, request): await self.auth_svc.check_permissions(request) abilities = [] tactics = [] variables = [] try: abilities = await self.ac_data_svc.explode_art_abilities() for ab in abilities: if not ab['tactic'] in tactics: tactics.append(ab['tactic']) except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.explode_art_variables() except Exception as e: self.log.error(e) return { 'abilities': json.dumps(abilities), 'tactics': tactics, 'variables': json.dumps(variables) } async def getMITREPhase(self, attackID): filter = [ Filter('type', '=', 'attack-pattern'), Filter('external_references.external_id', '=', attackID) ] result = self.fs.query(filter) if result: return result[0].kill_chain_phases[0].phase_name else: return 'unknown' async def get_atomics(self): await self.ac_data_svc.build_db( os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/ac.sql')) artAbilities = [] artVars = [] if os.path.exists(self.artpath): for root, dirs, files in os.walk(self.artpath): for procFile in files: fullFile = os.path.join(root, procFile) if os.path.splitext(fullFile)[-1].lower() == '.yaml': self.log.debug('Processing {}'.format(fullFile)) try: artObj = ARTyaml() except: continue with open(fullFile, 'r') as yamlFile: try: artObj.load(yamlFile) except: continue # Loop through the tests if artObj.atomicTests: for atomic in artObj.atomicTests: for platform in atomic['supported_platforms']: if platform.lower() in [ 'windows', 'linux', 'macos' ]: name = atomic['name'] description = atomic['description'] if 'command' in atomic[ 'executor'].keys(): command = re.sub( r'x07', r'a', repr(atomic['executor'] ['command'])).strip() command = command.encode( 'utf-8').decode( 'unicode_escape') executor = atomic['executor'][ 'name'] if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') else: command = '' executor = '' try: if command != '': checkUnique = { 'technique': int(artObj.attackTech[1:]), 'command': b64encode( command.encode('utf-8') ).decode('utf-8') } except Exception as e: print(e) # Check to see if the command has been added to the database if (command != '' and not await self.ac_data_svc. check_art_ability(checkUnique) ): uuidBool = True while (uuidBool): ability_id = str(uuid.uuid4()) if not await self.ac_data_svc.check_art_ability( {'ability_id': ability_id }): uuidBool = False try: # Add the new ability to export artAbilities.append({ 'ability_id': ability_id, 'technique': artObj.attackTech[1:], 'name': name, 'description': description, 'tactic': await self.getMITREPhase( artObj.attackTech), 'attack_name': artObj.displayName, 'platform': platform, 'executor': executor, 'command': b64encode( command.encode('utf-8') ).decode('utf-8') }) except Exception as e: print(e) if 'input_arguments' in atomic.keys( ): for argument in atomic[ 'input_arguments'].keys( ): try: curVar = re.sub( r'x07', r'a', repr(atomic[ 'input_arguments'] [argument] ['default']) ).strip() if curVar[0] == '\'': curVar = curVar.strip( '\'') elif curVar[0] == '\"': curVar = curVar.strip( '\"') curVar = curVar.replace( '\\\\', '\\') artVars.append({ 'ability_id': ability_id, 'var_name': argument, 'value': b64encode( curVar.encode( 'utf-8')). decode('utf-8') }) except: pass else: self.log.debug('Paths are not valid') return {'abilities': [], 'variables': []} self.log.debug('Got to the end.') return {'abilities': artAbilities, 'variables': artVars} async def export_all_to_stockpile(self, data): try: abilities = await self.ac_data_svc.explode_art_abilities() except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.explode_art_variables() except Exception as e: self.log.error(e) if await self.export_to_stockpile(abilities, variables): return 'Abilities successfully exported.' else: return 'Failed to export abilities.' async def export_one_to_stockpile(self, data): abilities = [] variables = [] ability_id = {'ability_id': data.pop('ability_id')} try: abilities = await self.ac_data_svc.get_art_ability(ability_id) except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.get_art_variable(ability_id) except Exception as e: self.log.error(e) if await self.export_to_stockpile(abilities, variables): return 'Ability successfully exported.' else: return 'Failed to export ability.' async def export_to_stockpile(self, abilities, variables): # String representer foy PyYAML to format the command string yaml.add_representer(cmdStr, cmd_presenter) for ability in abilities: executor = ability['executor'] platform = ability['platform'] payload = '' # Fix the command formatting command = b64decode(ability['command']) command = command.decode('utf-8') if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') # Determin the executor # Fill in the variables for variable in variables: if variable['ability_id'] == ability['ability_id']: value = b64decode(variable['value']).decode('utf-8') if value[0] == '\'': value = value.strip('\'') elif value[0] == '\"': value = value.strip('\"') value = value.replace('\\\\', '\\') command = re.sub( r"\#{{{argName}}}".format( argName=str(variable['var_name'])), value.encode('unicode-escape').decode(), command) if (executor.lower() == 'sh' or executor.lower() == 'bash'): if platform.lower() == 'linux': platform = 'linux' elif platform.lower() == 'macos': platform = 'darwin' elif (executor.lower() == 'command_prompt' or executor.lower() == 'powershell'): if (executor.lower() == 'command_prompt'): executor = 'cmd' else: executor = 'psh' command = command.replace('\\n', '\n') # Future additions parserName = '' parserProperty = '' parserScript = '' # Build the YAML data #newYaml = [{ 'id': ability['ability_id'], # 'name': ability['name'], # 'description': ability['description'], # 'tactic': ability['tactic'], # 'technique': { 'attack_id': 'T{}'.format(str(ability['technique'])), 'name': ability['attack_name'] }, # 'platforms': { platform: { executor.lower(): { 'command': cmdStr(command), 'payload': payload, 'parser': { 'name': parserName, 'property': parserProperty, 'script': parserScript }}}}}] newYaml = [{ 'id': ability['ability_id'], 'name': ability['name'], 'description': ability['description'], 'tactic': ability['tactic'], 'technique': { 'attack_id': 'T{}'.format(str(ability['technique'])), 'name': ability['attack_name'] }, 'platforms': { platform: { executor.lower(): { 'command': cmdStr(command), 'payload': payload } } } }] payloadPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') abilityPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') # Check and create payloads folder if it does not exist try: if not os.path.exists(payloadPath): os.makedirs(payloadPath) except Exception as e: self.log.error(e) return False # Write the BAT file if needed if payload != '': with open(os.path.join(payloadPath, payload), 'w') as payloadFile: payloadFile.write(batCommand) # Check and create ability folder if it does not exist try: if not os.path.exists( os.path.join(abilityPath, ability['tactic'])): os.makedirs(os.path.join(abilityPath, ability['tactic'])) except Exception as e: self.log.error(e) return False # Write the YAML file to the correct directory try: with open( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id'])), 'w') as newYAMLFile: dump = yaml.dump(newYaml, default_style=None, default_flow_style=False, allow_unicode=True, encoding=None, sort_keys=False) newYAMLFile.write(dump) except Exception as e: self.log.error(e) return False return True async def get_art(self, request): self.log.debug('Landed in get_art.') try: atomics = await self.get_atomics() except Exception as e: self.log.error(e) pass return atomics async def import_art_abilities(self): try: atomics = await self.get_atomics() except Exception as e: self.log.error(e) return 'Failed to load abilities.' for ability in atomics['abilities']: await self.ac_data_svc.create_art_ability(ability) for variable in atomics['variables']: await self.ac_data_svc.create_art_variable(variable) return 'Successfully imported new abilities.' async def save_art_ability(self, data): key = data.pop('key') value = data.pop('value') updates = data.pop('data') if await self.ac_data_svc.update_art_ability(key, value, updates): return 'Updated ability: {}'.format(value) else: return 'Update failed for ability: {}'.format(value) async def save_art_variables(self, data): updates = data.pop('data') if await self.ac_data_svc.update_art_variables(updates): return 'Updated variables successfully.' else: return 'Updates to variables failed.' async def delete_all(self): abilities = [] payloadPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') abilityPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/abilities/') try: abilities = await self.ac_data_svc.explode_art_abilities() except Exception as e: self.log.error(e) for ability in abilities: if os.path.exists( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id']))): os.remove( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id']))) if os.path.exists( os.path.join(payloadPath, '{}.bat'.format(ability['ability_id']))): os.remove( os.path.join(payloadPath, '{}.bat'.format(ability['ability_id']))) status = await self.ac_data_svc.delete_all() await self.ac_data_svc.build_db( os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/ac.sql')) return status async def rest_api(self, request): self.log.debug('Starting Rest call.') await self.auth_svc.check_permissions(request) data = dict(await request.json()) index = data.pop('index') self.log.debug('Index: {}'.format(index)) options = dict( PUT=dict(ac_ability=lambda d: self.import_art_abilities(**d)), POST=dict( ac_ability=lambda d: self.ac_data_svc.explode_art_abilities(**d ), ac_ability_save=lambda d: self.save_art_ability(data=d), ac_variables_save=lambda d: self.save_art_variables(data=d), ac_export_all=lambda d: self.export_all_to_stockpile(**d), ac_export_one=lambda d: self.export_one_to_stockpile(data=d)), DELETE=dict(delete_all=lambda d: self.delete_all(**d))) try: output = await options[request.method][index](data) except Exception as e: self.log.debug('Stopped at api call.') self.log.error(e) return web.json_response(output)
def main(args, config): """ Wrapper to run all components of CDAS Calls context, agents, and asset builders to create simulation componenets. Passes resulting components to simulation module. Manages output. Parameters ---------- args : list The arguments passed in from argparse or read from the configuration file in the arguments method config : dict The configuration file opened and loaded from json """ # Set up the Output directory if os.path.isdir(args.output): q = (f"Overwrite the output folder {os.getcwd() + '/' + args.output}? " f"(y/n) ") else: q = f"Output path {os.getcwd() + '/' + args.output} does not exist.\n\ Create this directory? (y/n) " if not args.overwrite_output: answer = "" while answer not in ['y', 'n']: answer = input(q) else: answer = 'y' if answer == 'n': sys.exit(f"CDAS exited without completing") else: if os.path.isdir(args.output): for filename in os.listdir(args.output): file_path = os.path.join(args.output, filename) try: if os.path.isfile(file_path) or os.path.islink(file_path): os.unlink(file_path) elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: print('Failed to delete %s. %s' % (file_path, e)) else: os.mkdir(args.output) # Set up the STIX data stores # Check if it's okay to overwrite the contents of the temporary data store temp_path = pkg_resources.resource_filename(__name__, config['temp_path']) if os.path.isdir(temp_path): q = f"Overwrite temporary stix data folder ({temp_path})? (y/n) " overwrite = input(q) if overwrite == 'n': print(f"Rename the 'temp path' variable in config file and \ restart the simulation.") sys.exit() elif overwrite == 'y': shutil.rmtree(temp_path) os.mkdir(temp_path) else: overwrite = input(q) else: os.mkdir(temp_path) fs_gen = FileSystemStore(temp_path) fs_real = FileSystemSource( pkg_resources.resource_filename(__name__, "assets/mitre_cti/")) # Load or create country data countries = [] if args.randomize_geopol is True: print("Creating countries...") with open(args.random_geodata, encoding='utf-8') as f: context_options = json.load(f) # seed file f.close() map_matrix = context.Map(args.num_countries) for c in range(0, args.num_countries): countries.append( context.Country(fs_gen, context_options, map_matrix.map)) for c in countries: # This loop is used mainly to convert references to other countries # to the names of those countries instead of their ID numbers, # since, during the generation of each country it only has access # to map_matrix with ID numbers of the other countries # Convert the neighbors listed by id# to neighbor country names neighbors = {} for n in c.neighbors: n_name = next((x.name for x in countries if x.id == n), None) neighbors[n_name] = c.neighbors[n] c.neighbors = neighbors if len(c.neighbors) == 0: c.neighbors = "None (island nation)" # if country is a terrority, find its owner if c.government_type == "non-self-governing territory": gdps = [(int(gdp.gdp[1:].replace(',', '')), gdp.name) for gdp in countries] gdps.sort() # Territory owners are most likely to be high GDP countries # pick a random one from the top three GDP owner_name = np.random.choice([gdp[1] for gdp in gdps][-3:]) if c.name in [gdp[1] for gdp in gdps][-3:]: # if the territory itself is in the top three GDP, change # its gov type to a republic instead of a territory c.government_type = "federal parliamentary republic" else: c.government_type += f" of {str(owner_name)}" # update ethnic groups to include owner instead of random owner = next( (x.id for x in countries if x.name == owner_name), None) if str(owner) not in c.ethnic_groups: egs = {} for eg in c.ethnic_groups: try: int(eg) if str(owner) not in egs: egs[str(owner)] = c.ethnic_groups[eg] else: egs[eg] = c.ethnic_groups[eg] except ValueError: egs[eg] = c.ethnic_groups[eg] c.ethnic_groups = egs # update forces to include owner name if necessary msf = c.military_and_security_forces c.military_and_security_forces = msf.replace( "[COUNTRY]", owner_name) # update languages to include owner instead of random if str(owner) not in c.languages: langs = {} for eg in c.languages: try: int(eg) if str(owner) not in langs: langs[str(owner)] = c.languages[eg] else: langs[eg] = c.languages[eg] except ValueError: langs[eg] = c.languages[eg] c.languages = langs # Apply nationalities to ethnic groups listed by id# egs = {} for eg in c.ethnic_groups: try: egs[next( (x.nationality for x in countries if x.id == int(eg)), None)] = c.ethnic_groups[eg] except ValueError: egs[eg] = c.ethnic_groups[eg] c.ethnic_groups = egs # Convert languges listed by id# to country names egs = {} for eg in c.languages: try: eg_name = next( (x.name for x in countries if x.id == int(eg)), None) if eg_name.endswith(('a', 'e', 'i', 'o', 'u')): eg_name += "nese" else: eg_name += 'ish' egs[eg_name] = c.languages[eg] except ValueError: egs[eg] = c.languages[eg] c.languages = egs else: # Using country data files instead of random generation print("Loading countries...") for fn in os.listdir(args.country_data): with open(args.country_data + fn, 'r') as f: country_data = json.load(f) f.close() countries.append(context.Country(fs_gen, **country_data)) # Load or create actor data print("Creating threat actors...") with open(pkg_resources.resource_filename(__name__, "assets/stix_vocab.json"), encoding='utf-8') as json_file: stix_vocab = json.load(json_file) json_file.close() if config['agents']['randomize_threat_actors'] is True: apt_store = fs_gen with open(pkg_resources.resource_filename( __name__, config['agents']['random_variables']['actor_name_1']), encoding='utf-8') as f: adjectives = [line.rstrip() for line in f] f.close() with open(pkg_resources.resource_filename( __name__, config['agents']['random_variables']['actor_name_2']), encoding='utf-8') as f: nouns = [line.rstrip() for line in f] f.close() actors = 1 while actors <= config['agents']['random_variables']['num_agents']: agents.create_threatactor(stix_vocab, nouns, adjectives, countries, apt_store) actors += 1 else: # no randomization - use provided data set if config['agents']['non_random_vars']['apt_data'] == "mitre_cti": apt_store = fs_real else: apt_store = FileSystemStore( config['agents']['non_random_vars']['apt_data']) # Create organizations print('Creating organizations...') with open( pkg_resources.resource_filename( __name__, config['agents']['org_variables']['org_names'])) as f: org_names = f.read().splitlines() # organization name possibilities f.close() with open(pkg_resources.resource_filename(__name__, 'assets/NIST_assess.json'), encoding='utf-8') as json_file: assessment = json.load(json_file) json_file.close() for c in countries: orgs = 0 while orgs < config['agents']['org_variables']["orgs_per_country"]: agents.create_organization(stix_vocab, fs_gen, c, org_names, assessment) orgs += 1 # Run simulation print('Running simulation...') start = datetime.strptime(config["simulation"]['time_range'][0], '%Y-%m-%d') end = datetime.strptime(config["simulation"]['time_range'][1], '%Y-%m-%d') td = end - start actors = apt_store.query(Filter("type", "=", "intrusion-set")) orgs = fs_gen.query([ Filter("type", "=", "identity"), Filter("identity_class", "=", "organization") ]) tools = fs_real.query(Filter('type', '=', 'tool')) malwares = fs_real.query(Filter('type', '=', 'malware')) for r in range(1, int(config["simulation"]['number_of_rounds']) + 1): print(f'\tRound {r}') simulator.simulate( actors, orgs, tools, malwares, fs_gen, start, td.days / (config["simulation"]['number_of_rounds'] * len(actors))) start += timedelta(days=td.days / config["simulation"]['number_of_rounds']) # Create output files print('Saving output...') # Map country_names = {} for country in countries: country_names[str(country.id)] = country.name try: map_matrix.plot_map(args.output, **country_names) except NameError: pass for ot in args.output_types: print(f'\t{ot}') path = args.output + "/" + ot if ot == "stix": shutil.copytree(temp_path, path) else: os.mkdir(path) os.mkdir(path + '/countries/') os.mkdir(path + '/actors/') os.mkdir(path + '/reports/') os.mkdir(path + '/organizations/') for country in countries: country.save(path + '/countries/', ot) apts = apt_store.query(Filter("type", "=", "intrusion-set")) for apt in apts: agents.save(apt, path + '/actors/', ot, fs_gen, fs_real) events = fs_gen.query(Filter("type", "=", "sighting")) for e in events: simulator.save(e, apt_store, fs_real, path + '/reports/', ot) for org in orgs: agents.save_org(org, path + '/organizations/', ot, assessment) if ot == "html": html_src = pkg_resources.resource_filename( __name__, 'assets/html_templates') html_templates = os.listdir(html_src) for f in html_templates: shutil.copy(html_src + '/' + f, path) f = open(path + '/COUNTRY.html', 'r') c_template = f.read() f.close() for country in countries: f = open(path + '/countries/' + country.name + '.html', 'w') f.write(c_template.replace('COUNTRY', country.name)) f.close() os.remove(path + '/COUNTRY.html') shutil.rmtree(temp_path) print('Done')
from stix2 import FileSystemSource fs = FileSystemSource('./enterprise-attack') from stix2 import Filter filt = Filter('type', '=', 'attack-pattern') malwares = fs.query(Filter("type", "=", 'malware')) [print(m) for m in malwares if m.name == 'Emotet'] # print(malwares[3].name) # * Query relationships all_rs = fs.query(Filter("type", "=", 'relationship')) # print(all_rs[3]) relationships = [ r for r in all_rs if r.source_ref == 'malware--32066e94-3112-48ca-b9eb-ba2b59d2f023' ] print(relationships) # print(type(relationships)) # * Query relationships # all_rs = fs.query(Filter("type", "=", 'relationship')) # [print(r) for r in all_rs if r.target_ref == 'malware--32066e94-3112-48ca-b9eb-ba2b59d2f023'] # * Query techniques # techniques = fs.query([filt]) # print(techniques[0].x_mitre_data_sources) # [print(t) for t in techniques] # * Query software # from itertools import chain
class abilitymanager: def __init__(self, services): self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('abilitymanager') self.log.debug('Ability Manager Plugin logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) self.stockPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') def get_conf(self): confPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/amconf.yml') with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) self.log.debug('Getting local configuration from: {}'.format(confPath)) try: with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) if 'payloadPath' in conf.keys(): self.payloadPath = os.path.expander(conf['payloadPath']) else: self.payloadPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') if 'abilityPath' in conf.keys(): self.abilityPath = os.path.expander(conf['abilityPath']) else: self.abilityPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') except Exception as e: self.log.debug('Stopped at api call.') self.log.error(e) pass async def get_uuid(self, data): try: return str(uuid.uuid4()) except Exception as e: self.log.debug('Error getting UUID.') self.log.error('e') return 'Failure' async def getMITRETactics(self): tacticList = [] tactics = {} matrix = self.fs.query([ Filter('type', '=', 'x-mitre-matrix'), ]) for i in range(len(matrix)): tactics[matrix[i]['name']] = [] for tactic_id in matrix[i]['tactic_refs']: tactics[matrix[i]['name']].append( self.fs.query([Filter('id', '=', tactic_id)])[0]) for tactic in tactics['Enterprise ATT&CK']: tacticList.append(tactic['name']) return tacticList async def getMITRETechniques(self, data): tactic = data['data'].replace(' ', '-').lower() techniques = [] filter = [ Filter('type', '=', 'attack-pattern'), Filter('kill_chain_phases.phase_name', '=', tactic) ] results = self.fs.query(filter) # This is used in the official documentation. I'm not sure it is needed. doubleCheck = [ t for t in results if { 'kill_chain_name': 'mitre-attack', 'phase_name': tactic, } in t.kill_chain_phases ] for entry in doubleCheck: techniques.append({ 'tactic': tactic, 'name': entry['name'], 'id': entry['external_references'][0]['external_id'] }) return techniques async def getATTACK(self): attackList = [] tactics = await self.getMITRETactics() try: for tactic in tactics: for technique in await self.getMITRETechniques( {'data': tactic}): attackList.append(technique) return attackList except Exception as e: self.log.debug('Failed to parse tactics') self.log.error(e) return [] async def explode_stockpile(self): self.log.debug('Starting stockpile ability explosion') stockAbilities = [] self.log.debug('Checking stockpile path: {}'.format(self.stockPath)) if os.path.exists(self.stockPath): for root, dirs, files in os.walk(self.stockPath): for procFile in files: fullFile = os.path.join(root, procFile) if os.path.splitext(fullFile)[-1].lower() == '.yml': newAbility = {} with open(fullFile, 'r') as yamlFile: try: stockData = yaml.load(yamlFile, Loader=yaml.Loader) except: continue platformData = [] rawAbility = stockData[0] rawPlatform = rawAbility['platforms'] for keyName in rawPlatform.keys(): for test in rawPlatform[keyName]: newTest = {'platform': keyName, 'executor': ''} parserName = '' parserProperty = '' parserScript = '' if 'command' in rawPlatform[keyName][ test].keys(): newTest.update({ 'command': b64encode(rawPlatform[keyName][test] ['command'].encode( 'utf-8')).decode('utf-8') }) if 'cleanup' in rawPlatform[keyName][ test].keys(): newTest.update({ 'cleanup': b64encode(rawPlatform[keyName][test] ['cleanup'].encode( 'utf-8')).decode('utf-8') }) if 'payload' in rawPlatform[keyName][ test].keys(): newTest.update({ 'payload': rawPlatform[keyName][test]['payload'] }) if 'parser' in rawPlatform[keyName][test].keys( ): if rawPlatform[keyName][test]['parser'][ 'name']: parserName = rawPlatform[keyName][ test]['parser']['name'] if rawPlatform[keyName][test]['parser'][ 'property']: parserProperty = rawPlatform[keyName][ test]['parser']['property'] if rawPlatform[keyName][test]['parser'][ 'script']: parserScript = rawPlatform[keyName][ test]['parser']['script'] newTest.update({ 'parser': { 'name': parserName, 'property': parserProperty, 'script': b64encode( parserScript.encode( 'utf-8')).decode('utf-8') } }) if (len(test.split(',')) > 1): for subTest in test.split(','): newTest['executor'] = subTest platformData.append(newTest.copy()) else: newTest['executor'] = test platformData.append(newTest) newAbility = { 'id': rawAbility['id'], 'name': rawAbility['name'], 'description': rawAbility['description'], 'tactic': rawAbility['tactic'], 'technique': rawAbility['technique'], 'platforms': platformData, 'path': fullFile } stockAbilities.append(newAbility) return stockAbilities async def delete_ability(self, data): pathData = data['data'] try: os.remove(pathData) return 'File deleted.' except Exception as e: self.log.error(e) return 'File deletion failed.' async def save_ability(self, data): abilityData = data.pop('data') newYaml = [] newYamlEntry = {} newPlatforms = {} osList = [] # Add the YAML presenter yaml.add_representer(cmdStr, cmd_presenter) # Get the OS names for test in abilityData['platforms']: osList.append(test['platform']) osList = list(set(osList)) try: for osSys in osList: newPlatforms[osSys] = {} for test in abilityData['platforms']: if osSys == test['platform']: newTest = {} command = b64decode(test['command']) command = command.decode('utf-8') if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') command = command.replace('\\n', '\n') newTest['command'] = cmdStr(command) # Check for payload if 'payload' in test.keys(): newTest['payload'] = test['payload'] if 'cleanup' in test.keys(): cleanup = b64decode(test['cleanup']) cleanup = cleanup.decode('utf-8') if cleanup[0] == '\'': cleanup = cleanup.strip('\'') elif cleanup[0] == '\"': cleanup = cleanup.strip('\"') cleanup = cleanup.replace('\\n', '\n') newTest['cleanup'] = cmdStr(cleanup) if 'parser' in test.keys(): newParser = {} newParser['name'] = test['parser']['name'] newParser['property'] = test['parser']['property'] newParser['script'] = b64decode( test['parser']['script']).decode('utf-8') newTest['parser'] = newParser newPlatforms[osSys][test['executor']] = newTest else: pass newYamlEntry['id'] = abilityData['id'] newYamlEntry['name'] = abilityData['name'] newYamlEntry['description'] = abilityData['description'] newYamlEntry['tactic'] = abilityData['tactic'] newYamlEntry['technique'] = abilityData['technique'] newYamlEntry['platforms'] = newPlatforms newYaml.append(newYamlEntry) except Exception as e: self.log.error(e) return 'Failed to parse ability data.' #payloadPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') #abilityPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') # You can change the output path for testing or to seperate your changes if you like. #payloadPath = '/tmp/' #abilityPath = '/tmp/' # Check and create payloads folder if it does not exist try: if not os.path.exists(self.payloadPath): os.makedirs(self.payloadPath) except Exception as e: self.log.error(e) return False # Check and create ability folder if it does not exist try: if not os.path.exists( os.path.join(self.abilityPath, abilityData['tactic'])): os.makedirs( os.path.join(self.abilityPath, abilityData['tactic'])) except Exception as e: self.log.error(e) return False # Write the YAML file to the correct directory try: with open( os.path.join(self.abilityPath, abilityData['tactic'], '{}.yml'.format(abilityData['id'])), 'w') as newYAMLFile: dump = yaml.dump(newYaml, default_style=None, default_flow_style=False, allow_unicode=True, encoding=None, sort_keys=False) newYAMLFile.write(dump) except Exception as e: self.log.error(e) # Delete the original file if necessary try: if (os.path.dirname(abilityData['path']) != os.path.join( self.abilityPath, abilityData['tactic'])) and ( os.path.basename(abilityData['path']) == '{}.yml'.format(abilityData['id'])): os.remove(abilityData['path']) except Exception as e: self.log.error(e) return 'Test saved successfully. Click the reload button to reload the list of available abilities.' @template('abilitymanager.html') async def landing(self, request): try: fullTactics = [] attackLit = [] await self.auth_svc.check_permissions(request) abilities = await self.explode_stockpile() tactics = set([a['tactic'].lower() for a in abilities]) fullTactics = await self.getMITRETactics() attackList = await self.getATTACK() self.log.debug('Landing call completed.') except Exception as e: self.log.debug('Failed to land.') self.log.error(e) return { 'abilities': json.dumps(abilities), 'tactics': tactics, 'fulltactics': json.dumps(fullTactics), 'techniques': json.dumps(attackList) } async def rest_api(self, request): self.log.debug('Starting Rest call.') await self.auth_svc.check_permissions(request) data = dict(await request.json()) index = data.pop('index') self.log.debug('Index: {}'.format(index)) options = dict( PUT=dict(), POST=dict( am_ability=lambda d: self.explode_stockpile(**d), am_ability_save=lambda d: self.save_ability(data=d), am_ability_delete=lambda d: self.delete_ability(data=d), am_get_uuid=lambda d: self.get_uuid(data=d), am_get_tactics=lambda d: self.getMITRETactics(), am_get_techniques=lambda d: self.getMITRETechniques(data=d), am_get_attack=lambda d: self.getATTACK()), DELETE=dict()) try: output = await options[request.method][index](data) except Exception as e: self.log.error(e) return web.json_response(output)