class OperationService(OpControl): def __init__(self, data_svc, utility_svc, planner): super().__init__(data_svc.dao) self.data_svc = data_svc self.utility_svc = utility_svc self.loop = asyncio.get_event_loop() self.log = Logger('operation') planning_module = import_module(planner) self.planner = getattr(planning_module, 'LogicalPlanner')(self.data_svc, self.utility_svc, self.log) async def resume(self): for op in await self.data_svc.dao.get('core_operation'): if not op['finish']: self.loop.create_task(self.run(op['id'])) async def close_operation(self, op_id): self.log.debug('Operation complete: %s' % op_id) update = dict(finish=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=update) async def run(self, op_id): self.log.debug('Starting operation: %s' % op_id) operation = await self.data_svc.explode_operation(dict(id=op_id)) try: for phase in operation[0]['adversary']['phases']: self.log.debug('Operation phase %s: started' % phase) await self.planner.execute(operation[0], phase) self.log.debug('Operation phase %s: completed' % phase) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=dict(phase=phase)) operation = await self.data_svc.explode_operation( dict(id=op_id)) if operation[0]['cleanup']: await self.cleanup(op_id) await self.close_operation(op_id) except Exception: traceback.print_exc() async def cleanup(self, op_id): self.log.debug('Running cleanup on: %s' % op_id) clean_commands = await self.data_svc.dao.get('core_cleanup', dict(op_id=op_id)) for c in reversed(clean_commands): link = dict(op_id=c['op_id'], host_id=c['agent_id'], ability_id=c['ability_id'], decide=datetime.now(), command=c['command'], score=0, jitter=1) await self.data_svc.create_link(link) await self.cleanup_operation(op_id)
class CustomShell: def __init__(self, services): self.log = Logger('terminal') self.shell_prompt = 'caldera> ' self.modes = dict(session=Session(services, self.log), agent=Agent(services, self.log), ability=Ability(services, self.log), adversary=Adversary(services, self.log), operation=Operation(services, self.log)) async def start_shell(self): await asyncio.sleep(1) while True: try: cmd = await ainput(self.shell_prompt) self.log.debug(cmd) mode = re.search(r'\((.*?)\)', self.shell_prompt) if cmd == 'help': await self._print_help() elif cmd.startswith('log'): await self._print_logs(int(cmd.split(' ')[1])) elif cmd in self.modes.keys(): self.shell_prompt = 'caldera (%s)> ' % cmd elif mode: await self.modes[mode.group(1)].execute(cmd) elif cmd == '': pass else: self.log.console( 'Bad command - are you in the right mode?', 'red') except Exception as e: self.log.console('Bad command: %s' % e, 'red') async def accept_sessions(self, reader, writer): address = writer.get_extra_info('peername') connection = writer.get_extra_info('socket') connection.setblocking(1) self.modes['session'].sessions.append(connection) self.modes['session'].addresses.append('%s:%s' % (address[0], address[1])) self.log.console('New session: %s:%s' % (address[0], address[1])) async def _print_help(self): print('HELP MENU:') print('-> help: show this help menu') print('-> logs [n]: view the last n-lines of each log file') print( 'Enter one of the following modes. Once inside, enter "info" to see available commands.' ) for cmd, v in self.modes.items(): print('-> %s' % cmd) @staticmethod async def _print_logs(n): for name in glob.iglob('.logs/*.log', recursive=False): with open(name, 'r') as f: print('***** %s ***** ' % name) lines = f.readlines() print(*lines[-n:])
class FileSvc: def __init__(self, file_stores): self.file_stores = file_stores self.log = Logger('file_svc') async def render(self, request): name = request.headers.get('file') group = request.rel_url.query.get('group') environment = request.app[aiohttp_jinja2.APP_KEY] url_root = '{scheme}://{host}'.format(scheme=request.scheme, host=request.host) headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) rendered = await self._render(name, group, environment, url_root) if rendered: return web.HTTPOk(body=rendered, headers=headers) return web.HTTPNotFound(body=rendered) async def download(self, request): name = request.headers.get('file') file_path, headers = await self._download(name) if file_path: return web.FileResponse(path=file_path, headers=headers) return web.HTTPNotFound(body='File not found') async def upload(self, request): try: reader = await request.multipart() field = await reader.next() filename = field.filename size = 0 with open(os.path.join('/tmp/', filename), 'wb') as f: while True: chunk = await field.read_chunk() if not chunk: break size += len(chunk) f.write(chunk) self.log.debug('Uploaded file %s' % filename) except Exception as e: self.log.debug('Exception uploading file %s' % e) @staticmethod async def _render(name, group, environment, url_root): try: t = environment.get_template(name) return t.render(url_root=url_root, group=group) except Exception: return None async def _download(self, name): for store in self.file_stores: for root, dirs, files in os.walk(store): if name in files: headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) return os.path.join(root, name), headers return None, None
class FileSvc: def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = [p for p in payload_dirs if os.path.isdir(p)] self.log = Logger('file_svc') self.exfil_dir = exfil_dir self.log.debug('Downloaded files will come from %s' % self.payload_dirs) async def download(self, request): name = request.headers.get('file') file_path, headers = await self.find_file(name) if file_path: return web.FileResponse(path=file_path, headers=headers) return web.HTTPNotFound(body='File not found') async def find_file(self, name): for store in self.payload_dirs: for root, dirs, files in os.walk(store): if name in files: headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) self.log.debug('downloading %s...' % name) return os.path.join(root, name), headers return None, None async def upload(self, request): try: reader = await request.multipart() exfil_dir = await self._create_unique_exfil_sub_directory() while True: field = await reader.next() if not field: break filename = field.filename with open(os.path.join(exfil_dir, filename), 'wb') as f: while True: chunk = await field.read_chunk() if not chunk: break f.write(chunk) self.log.debug('Uploaded file %s' % filename) return web.Response() except Exception as e: self.log.debug('Exception uploading file %s' % e) @staticmethod async def write_csv(dictionary, location): with open(location, 'w') as csv_file: fieldnames = dictionary[0].keys() writer = csv.DictWriter(csv_file, fieldnames=fieldnames) for element in dictionary: writer.writerow(element) """ PRIVATE """ async def _create_unique_exfil_sub_directory(self): dir_name = str(uuid.uuid4()) path = os.path.join(self.exfil_dir, dir_name) os.makedirs(path) return path
class OperationService: def __init__(self, data_svc, utility_svc, planning_svc, parsing_svc, planner): self.data_svc = data_svc self.utility_svc = utility_svc self.loop = asyncio.get_event_loop() self.parsing_svc = parsing_svc self.log = Logger('operation_svc') planning_module = import_module(planner) self.planner = getattr(planning_module, 'LogicalPlanner')(self.data_svc, planning_svc) async def resume(self): for op in await self.data_svc.dao.get('core_operation'): if not op['finish']: self.loop.create_task(self.run(op['id'])) async def close_operation(self, op_id): self.log.debug('Operation complete: %s' % op_id) update = dict(finish=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=update) async def run(self, op_id): self.log.debug('Starting operation: %s' % op_id) operation = await self.data_svc.explode_operation(dict(id=op_id)) try: for phase in operation[0]['adversary']['phases']: self.log.debug('Operation %s phase %s: started' % (op_id, phase)) await self.parsing_svc.parse_facts(operation[0]) operation = await self.data_svc.explode_operation(dict(id=op_id)) await self.planner.execute(operation[0], phase) self.log.debug('Operation %s phase %s: completed' % (op_id, phase)) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=dict(phase=phase)) await self.close_operation(op_id) except Exception: traceback.print_exc()
class FileSvc: def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = payload_dirs self.log = Logger('file_svc') self.exfil_dir = exfil_dir async def download(self, request): name = request.headers.get('file') file_path, headers = await self._download(name) if file_path: self.log.debug('downloading %s...' % name) return web.FileResponse(path=file_path, headers=headers) return web.HTTPNotFound(body='File not found') async def upload(self, request): try: reader = await request.multipart() field = await reader.next() filename = field.filename size = 0 with open(os.path.join(self.exfil_dir, filename), 'wb') as f: while True: chunk = await field.read_chunk() if not chunk: break size += len(chunk) f.write(chunk) self.log.debug('Uploaded file %s' % filename) except Exception as e: self.log.debug('Exception uploading file %s' % e) async def _download(self, name): for store in self.payload_dirs: for root, dirs, files in os.walk(store): if name in files: headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) return os.path.join(root, name), headers return None, None
class AtomicCaldera: def __init__(self, services, ac_data_svc): self.ac_data_svc = ac_data_svc self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('atomiccaldera') self.log.debug('Atomic-Caldera Plugin Logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) def get_conf(self): confPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/artconf.yml') try: with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) self.artpath = os.path.expanduser(conf['artpath']) self.log.debug(self.ctipath) self.log.debug(self.artpath) except: pass @template('atomiccaldera.html') async def landing(self, request): await self.auth_svc.check_permissions(request) abilities = [] tactics = [] variables = [] try: abilities = await self.ac_data_svc.explode_art_abilities() for ab in abilities: if not ab['tactic'] in tactics: tactics.append(ab['tactic']) except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.explode_art_variables() except Exception as e: self.log.error(e) return { 'abilities': json.dumps(abilities), 'tactics': tactics, 'variables': json.dumps(variables) } async def getMITREPhase(self, attackID): filter = [ Filter('type', '=', 'attack-pattern'), Filter('external_references.external_id', '=', attackID) ] result = self.fs.query(filter) if result: return result[0].kill_chain_phases[0].phase_name else: return 'unknown' async def get_atomics(self): await self.ac_data_svc.build_db( os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/ac.sql')) artAbilities = [] artVars = [] if os.path.exists(self.artpath): for root, dirs, files in os.walk(self.artpath): for procFile in files: fullFile = os.path.join(root, procFile) if os.path.splitext(fullFile)[-1].lower() == '.yaml': self.log.debug('Processing {}'.format(fullFile)) try: artObj = ARTyaml() except: continue with open(fullFile, 'r') as yamlFile: try: artObj.load(yamlFile) except: continue # Loop through the tests if artObj.atomicTests: for atomic in artObj.atomicTests: for platform in atomic['supported_platforms']: if platform.lower() in [ 'windows', 'linux', 'macos' ]: name = atomic['name'] description = atomic['description'] if 'command' in atomic[ 'executor'].keys(): command = re.sub( r'x07', r'a', repr(atomic['executor'] ['command'])).strip() command = command.encode( 'utf-8').decode( 'unicode_escape') executor = atomic['executor'][ 'name'] if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') else: command = '' executor = '' try: if command != '': checkUnique = { 'technique': int(artObj.attackTech[1:]), 'command': b64encode( command.encode('utf-8') ).decode('utf-8') } except Exception as e: print(e) # Check to see if the command has been added to the database if (command != '' and not await self.ac_data_svc. check_art_ability(checkUnique) ): uuidBool = True while (uuidBool): ability_id = str(uuid.uuid4()) if not await self.ac_data_svc.check_art_ability( {'ability_id': ability_id }): uuidBool = False try: # Add the new ability to export artAbilities.append({ 'ability_id': ability_id, 'technique': artObj.attackTech[1:], 'name': name, 'description': description, 'tactic': await self.getMITREPhase( artObj.attackTech), 'attack_name': artObj.displayName, 'platform': platform, 'executor': executor, 'command': b64encode( command.encode('utf-8') ).decode('utf-8') }) except Exception as e: print(e) if 'input_arguments' in atomic.keys( ): for argument in atomic[ 'input_arguments'].keys( ): try: curVar = re.sub( r'x07', r'a', repr(atomic[ 'input_arguments'] [argument] ['default']) ).strip() if curVar[0] == '\'': curVar = curVar.strip( '\'') elif curVar[0] == '\"': curVar = curVar.strip( '\"') curVar = curVar.replace( '\\\\', '\\') artVars.append({ 'ability_id': ability_id, 'var_name': argument, 'value': b64encode( curVar.encode( 'utf-8')). decode('utf-8') }) except: pass else: self.log.debug('Paths are not valid') return {'abilities': [], 'variables': []} self.log.debug('Got to the end.') return {'abilities': artAbilities, 'variables': artVars} async def export_all_to_stockpile(self, data): try: abilities = await self.ac_data_svc.explode_art_abilities() except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.explode_art_variables() except Exception as e: self.log.error(e) if await self.export_to_stockpile(abilities, variables): return 'Abilities successfully exported.' else: return 'Failed to export abilities.' async def export_one_to_stockpile(self, data): abilities = [] variables = [] ability_id = {'ability_id': data.pop('ability_id')} try: abilities = await self.ac_data_svc.get_art_ability(ability_id) except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.get_art_variable(ability_id) except Exception as e: self.log.error(e) if await self.export_to_stockpile(abilities, variables): return 'Ability successfully exported.' else: return 'Failed to export ability.' async def export_to_stockpile(self, abilities, variables): # String representer foy PyYAML to format the command string yaml.add_representer(cmdStr, cmd_presenter) for ability in abilities: executor = ability['executor'] platform = ability['platform'] payload = '' # Fix the command formatting command = b64decode(ability['command']) command = command.decode('utf-8') if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') # Determin the executor # Fill in the variables for variable in variables: if variable['ability_id'] == ability['ability_id']: value = b64decode(variable['value']).decode('utf-8') if value[0] == '\'': value = value.strip('\'') elif value[0] == '\"': value = value.strip('\"') value = value.replace('\\\\', '\\') command = re.sub( r"\#{{{argName}}}".format( argName=str(variable['var_name'])), value.encode('unicode-escape').decode(), command) if (executor.lower() == 'sh' or executor.lower() == 'bash'): if platform.lower() == 'linux': platform = 'linux' elif platform.lower() == 'macos': platform = 'darwin' elif (executor.lower() == 'command_prompt' or executor.lower() == 'powershell'): if (executor.lower() == 'command_prompt'): executor = 'cmd' else: executor = 'psh' command = command.replace('\\n', '\n') # Future additions parserName = '' parserProperty = '' parserScript = '' # Build the YAML data #newYaml = [{ 'id': ability['ability_id'], # 'name': ability['name'], # 'description': ability['description'], # 'tactic': ability['tactic'], # 'technique': { 'attack_id': 'T{}'.format(str(ability['technique'])), 'name': ability['attack_name'] }, # 'platforms': { platform: { executor.lower(): { 'command': cmdStr(command), 'payload': payload, 'parser': { 'name': parserName, 'property': parserProperty, 'script': parserScript }}}}}] newYaml = [{ 'id': ability['ability_id'], 'name': ability['name'], 'description': ability['description'], 'tactic': ability['tactic'], 'technique': { 'attack_id': 'T{}'.format(str(ability['technique'])), 'name': ability['attack_name'] }, 'platforms': { platform: { executor.lower(): { 'command': cmdStr(command), 'payload': payload } } } }] payloadPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') abilityPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') # Check and create payloads folder if it does not exist try: if not os.path.exists(payloadPath): os.makedirs(payloadPath) except Exception as e: self.log.error(e) return False # Write the BAT file if needed if payload != '': with open(os.path.join(payloadPath, payload), 'w') as payloadFile: payloadFile.write(batCommand) # Check and create ability folder if it does not exist try: if not os.path.exists( os.path.join(abilityPath, ability['tactic'])): os.makedirs(os.path.join(abilityPath, ability['tactic'])) except Exception as e: self.log.error(e) return False # Write the YAML file to the correct directory try: with open( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id'])), 'w') as newYAMLFile: dump = yaml.dump(newYaml, default_style=None, default_flow_style=False, allow_unicode=True, encoding=None, sort_keys=False) newYAMLFile.write(dump) except Exception as e: self.log.error(e) return False return True async def get_art(self, request): self.log.debug('Landed in get_art.') try: atomics = await self.get_atomics() except Exception as e: self.log.error(e) pass return atomics async def import_art_abilities(self): try: atomics = await self.get_atomics() except Exception as e: self.log.error(e) return 'Failed to load abilities.' for ability in atomics['abilities']: await self.ac_data_svc.create_art_ability(ability) for variable in atomics['variables']: await self.ac_data_svc.create_art_variable(variable) return 'Successfully imported new abilities.' async def save_art_ability(self, data): key = data.pop('key') value = data.pop('value') updates = data.pop('data') if await self.ac_data_svc.update_art_ability(key, value, updates): return 'Updated ability: {}'.format(value) else: return 'Update failed for ability: {}'.format(value) async def save_art_variables(self, data): updates = data.pop('data') if await self.ac_data_svc.update_art_variables(updates): return 'Updated variables successfully.' else: return 'Updates to variables failed.' async def delete_all(self): abilities = [] payloadPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') abilityPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/abilities/') try: abilities = await self.ac_data_svc.explode_art_abilities() except Exception as e: self.log.error(e) for ability in abilities: if os.path.exists( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id']))): os.remove( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id']))) if os.path.exists( os.path.join(payloadPath, '{}.bat'.format(ability['ability_id']))): os.remove( os.path.join(payloadPath, '{}.bat'.format(ability['ability_id']))) status = await self.ac_data_svc.delete_all() await self.ac_data_svc.build_db( os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/ac.sql')) return status async def rest_api(self, request): self.log.debug('Starting Rest call.') await self.auth_svc.check_permissions(request) data = dict(await request.json()) index = data.pop('index') self.log.debug('Index: {}'.format(index)) options = dict( PUT=dict(ac_ability=lambda d: self.import_art_abilities(**d)), POST=dict( ac_ability=lambda d: self.ac_data_svc.explode_art_abilities(**d ), ac_ability_save=lambda d: self.save_art_ability(data=d), ac_variables_save=lambda d: self.save_art_variables(data=d), ac_export_all=lambda d: self.export_all_to_stockpile(**d), ac_export_one=lambda d: self.export_one_to_stockpile(data=d)), DELETE=dict(delete_all=lambda d: self.delete_all(**d))) try: output = await options[request.method][index](data) except Exception as e: self.log.debug('Stopped at api call.') self.log.error(e) return web.json_response(output)
class abilitymanager: def __init__(self, services): self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('abilitymanager') self.log.debug('Ability Manager Plugin logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) self.stockPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') def get_conf(self): confPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/amconf.yml') with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) self.log.debug('Getting local configuration from: {}'.format(confPath)) try: with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) if 'payloadPath' in conf.keys(): self.payloadPath = os.path.expander(conf['payloadPath']) else: self.payloadPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') if 'abilityPath' in conf.keys(): self.abilityPath = os.path.expander(conf['abilityPath']) else: self.abilityPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') except Exception as e: self.log.debug('Stopped at api call.') self.log.error(e) pass async def get_uuid(self, data): try: return str(uuid.uuid4()) except Exception as e: self.log.debug('Error getting UUID.') self.log.error('e') return 'Failure' async def getMITRETactics(self): tacticList = [] tactics = {} matrix = self.fs.query([ Filter('type', '=', 'x-mitre-matrix'), ]) for i in range(len(matrix)): tactics[matrix[i]['name']] = [] for tactic_id in matrix[i]['tactic_refs']: tactics[matrix[i]['name']].append( self.fs.query([Filter('id', '=', tactic_id)])[0]) for tactic in tactics['Enterprise ATT&CK']: tacticList.append(tactic['name']) return tacticList async def getMITRETechniques(self, data): tactic = data['data'].replace(' ', '-').lower() techniques = [] filter = [ Filter('type', '=', 'attack-pattern'), Filter('kill_chain_phases.phase_name', '=', tactic) ] results = self.fs.query(filter) # This is used in the official documentation. I'm not sure it is needed. doubleCheck = [ t for t in results if { 'kill_chain_name': 'mitre-attack', 'phase_name': tactic, } in t.kill_chain_phases ] for entry in doubleCheck: techniques.append({ 'tactic': tactic, 'name': entry['name'], 'id': entry['external_references'][0]['external_id'] }) return techniques async def getATTACK(self): attackList = [] tactics = await self.getMITRETactics() try: for tactic in tactics: for technique in await self.getMITRETechniques( {'data': tactic}): attackList.append(technique) return attackList except Exception as e: self.log.debug('Failed to parse tactics') self.log.error(e) return [] async def explode_stockpile(self): self.log.debug('Starting stockpile ability explosion') stockAbilities = [] self.log.debug('Checking stockpile path: {}'.format(self.stockPath)) if os.path.exists(self.stockPath): for root, dirs, files in os.walk(self.stockPath): for procFile in files: fullFile = os.path.join(root, procFile) if os.path.splitext(fullFile)[-1].lower() == '.yml': newAbility = {} with open(fullFile, 'r') as yamlFile: try: stockData = yaml.load(yamlFile, Loader=yaml.Loader) except: continue platformData = [] rawAbility = stockData[0] rawPlatform = rawAbility['platforms'] for keyName in rawPlatform.keys(): for test in rawPlatform[keyName]: newTest = {'platform': keyName, 'executor': ''} parserName = '' parserProperty = '' parserScript = '' if 'command' in rawPlatform[keyName][ test].keys(): newTest.update({ 'command': b64encode(rawPlatform[keyName][test] ['command'].encode( 'utf-8')).decode('utf-8') }) if 'cleanup' in rawPlatform[keyName][ test].keys(): newTest.update({ 'cleanup': b64encode(rawPlatform[keyName][test] ['cleanup'].encode( 'utf-8')).decode('utf-8') }) if 'payload' in rawPlatform[keyName][ test].keys(): newTest.update({ 'payload': rawPlatform[keyName][test]['payload'] }) if 'parser' in rawPlatform[keyName][test].keys( ): if rawPlatform[keyName][test]['parser'][ 'name']: parserName = rawPlatform[keyName][ test]['parser']['name'] if rawPlatform[keyName][test]['parser'][ 'property']: parserProperty = rawPlatform[keyName][ test]['parser']['property'] if rawPlatform[keyName][test]['parser'][ 'script']: parserScript = rawPlatform[keyName][ test]['parser']['script'] newTest.update({ 'parser': { 'name': parserName, 'property': parserProperty, 'script': b64encode( parserScript.encode( 'utf-8')).decode('utf-8') } }) if (len(test.split(',')) > 1): for subTest in test.split(','): newTest['executor'] = subTest platformData.append(newTest.copy()) else: newTest['executor'] = test platformData.append(newTest) newAbility = { 'id': rawAbility['id'], 'name': rawAbility['name'], 'description': rawAbility['description'], 'tactic': rawAbility['tactic'], 'technique': rawAbility['technique'], 'platforms': platformData, 'path': fullFile } stockAbilities.append(newAbility) return stockAbilities async def delete_ability(self, data): pathData = data['data'] try: os.remove(pathData) return 'File deleted.' except Exception as e: self.log.error(e) return 'File deletion failed.' async def save_ability(self, data): abilityData = data.pop('data') newYaml = [] newYamlEntry = {} newPlatforms = {} osList = [] # Add the YAML presenter yaml.add_representer(cmdStr, cmd_presenter) # Get the OS names for test in abilityData['platforms']: osList.append(test['platform']) osList = list(set(osList)) try: for osSys in osList: newPlatforms[osSys] = {} for test in abilityData['platforms']: if osSys == test['platform']: newTest = {} command = b64decode(test['command']) command = command.decode('utf-8') if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') command = command.replace('\\n', '\n') newTest['command'] = cmdStr(command) # Check for payload if 'payload' in test.keys(): newTest['payload'] = test['payload'] if 'cleanup' in test.keys(): cleanup = b64decode(test['cleanup']) cleanup = cleanup.decode('utf-8') if cleanup[0] == '\'': cleanup = cleanup.strip('\'') elif cleanup[0] == '\"': cleanup = cleanup.strip('\"') cleanup = cleanup.replace('\\n', '\n') newTest['cleanup'] = cmdStr(cleanup) if 'parser' in test.keys(): newParser = {} newParser['name'] = test['parser']['name'] newParser['property'] = test['parser']['property'] newParser['script'] = b64decode( test['parser']['script']).decode('utf-8') newTest['parser'] = newParser newPlatforms[osSys][test['executor']] = newTest else: pass newYamlEntry['id'] = abilityData['id'] newYamlEntry['name'] = abilityData['name'] newYamlEntry['description'] = abilityData['description'] newYamlEntry['tactic'] = abilityData['tactic'] newYamlEntry['technique'] = abilityData['technique'] newYamlEntry['platforms'] = newPlatforms newYaml.append(newYamlEntry) except Exception as e: self.log.error(e) return 'Failed to parse ability data.' #payloadPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') #abilityPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') # You can change the output path for testing or to seperate your changes if you like. #payloadPath = '/tmp/' #abilityPath = '/tmp/' # Check and create payloads folder if it does not exist try: if not os.path.exists(self.payloadPath): os.makedirs(self.payloadPath) except Exception as e: self.log.error(e) return False # Check and create ability folder if it does not exist try: if not os.path.exists( os.path.join(self.abilityPath, abilityData['tactic'])): os.makedirs( os.path.join(self.abilityPath, abilityData['tactic'])) except Exception as e: self.log.error(e) return False # Write the YAML file to the correct directory try: with open( os.path.join(self.abilityPath, abilityData['tactic'], '{}.yml'.format(abilityData['id'])), 'w') as newYAMLFile: dump = yaml.dump(newYaml, default_style=None, default_flow_style=False, allow_unicode=True, encoding=None, sort_keys=False) newYAMLFile.write(dump) except Exception as e: self.log.error(e) # Delete the original file if necessary try: if (os.path.dirname(abilityData['path']) != os.path.join( self.abilityPath, abilityData['tactic'])) and ( os.path.basename(abilityData['path']) == '{}.yml'.format(abilityData['id'])): os.remove(abilityData['path']) except Exception as e: self.log.error(e) return 'Test saved successfully. Click the reload button to reload the list of available abilities.' @template('abilitymanager.html') async def landing(self, request): try: fullTactics = [] attackLit = [] await self.auth_svc.check_permissions(request) abilities = await self.explode_stockpile() tactics = set([a['tactic'].lower() for a in abilities]) fullTactics = await self.getMITRETactics() attackList = await self.getATTACK() self.log.debug('Landing call completed.') except Exception as e: self.log.debug('Failed to land.') self.log.error(e) return { 'abilities': json.dumps(abilities), 'tactics': tactics, 'fulltactics': json.dumps(fullTactics), 'techniques': json.dumps(attackList) } async def rest_api(self, request): self.log.debug('Starting Rest call.') await self.auth_svc.check_permissions(request) data = dict(await request.json()) index = data.pop('index') self.log.debug('Index: {}'.format(index)) options = dict( PUT=dict(), POST=dict( am_ability=lambda d: self.explode_stockpile(**d), am_ability_save=lambda d: self.save_ability(data=d), am_ability_delete=lambda d: self.delete_ability(data=d), am_get_uuid=lambda d: self.get_uuid(data=d), am_get_tactics=lambda d: self.getMITRETactics(), am_get_techniques=lambda d: self.getMITRETechniques(data=d), am_get_attack=lambda d: self.getATTACK()), DELETE=dict()) try: output = await options[request.method][index](data) except Exception as e: self.log.error(e) return web.json_response(output)