def __init__(self, data_svc, utility_svc, planning_svc, parsing_svc): self.data_svc = data_svc self.utility_svc = utility_svc self.loop = asyncio.get_event_loop() self.parsing_svc = parsing_svc self.planning_svc = planning_svc self.log = Logger('operation_svc')
class OperationService(OpControl): def __init__(self, data_svc, utility_svc, planner): super().__init__(data_svc.dao) self.data_svc = data_svc self.utility_svc = utility_svc self.loop = asyncio.get_event_loop() self.log = Logger('operation') planning_module = import_module(planner) self.planner = getattr(planning_module, 'LogicalPlanner')(self.data_svc, self.utility_svc, self.log) async def resume(self): for op in await self.data_svc.dao.get('core_operation'): if not op['finish']: self.loop.create_task(self.run(op['id'])) async def close_operation(self, op_id): self.log.debug('Operation complete: %s' % op_id) update = dict(finish=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=update) async def run(self, op_id): self.log.debug('Starting operation: %s' % op_id) operation = await self.data_svc.explode_operation(dict(id=op_id)) try: for phase in operation[0]['adversary']['phases']: self.log.debug('Operation phase %s: started' % phase) await self.planner.execute(operation[0], phase) self.log.debug('Operation phase %s: completed' % phase) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=dict(phase=phase)) operation = await self.data_svc.explode_operation( dict(id=op_id)) if operation[0]['cleanup']: await self.cleanup(op_id) await self.close_operation(op_id) except Exception: traceback.print_exc() async def cleanup(self, op_id): self.log.debug('Running cleanup on: %s' % op_id) clean_commands = await self.data_svc.dao.get('core_cleanup', dict(op_id=op_id)) for c in reversed(clean_commands): link = dict(op_id=c['op_id'], host_id=c['agent_id'], ability_id=c['ability_id'], decide=datetime.now(), command=c['command'], score=0, jitter=1) await self.data_svc.create_link(link) await self.cleanup_operation(op_id)
def __init__(self, services): self.log = Logger('terminal') self.shell_prompt = 'caldera> ' self.modes = dict(session=Session(services, self.log), agent=Agent(services, self.log), ability=Ability(services, self.log), adversary=Adversary(services, self.log), operation=Operation(services, self.log))
def __init__(self, services, ac_data_svc): self.ac_data_svc = ac_data_svc self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('atomiccaldera') self.log.debug('Atomic-Caldera Plugin Logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath)
def __init__(self, data_svc, utility_svc, planning_svc, planner): self.data_svc = data_svc self.utility_svc = utility_svc self.loop = asyncio.get_event_loop() self.log = Logger('operation_svc') planning_module = import_module(planner) self.planner = getattr(planning_module, 'LogicalPlanner')(self.data_svc, planning_svc)
class FileSvc: def __init__(self, file_stores): self.file_stores = file_stores self.log = Logger('file_svc') async def render(self, request): name = request.headers.get('file') group = request.rel_url.query.get('group') environment = request.app[aiohttp_jinja2.APP_KEY] url_root = '{scheme}://{host}'.format(scheme=request.scheme, host=request.host) headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) rendered = await self._render(name, group, environment, url_root) if rendered: return web.HTTPOk(body=rendered, headers=headers) return web.HTTPNotFound(body=rendered) async def download(self, request): name = request.headers.get('file') file_path, headers = await self._download(name) if file_path: return web.FileResponse(path=file_path, headers=headers) return web.HTTPNotFound(body='File not found') async def upload(self, request): try: reader = await request.multipart() field = await reader.next() filename = field.filename size = 0 with open(os.path.join('/tmp/', filename), 'wb') as f: while True: chunk = await field.read_chunk() if not chunk: break size += len(chunk) f.write(chunk) self.log.debug('Uploaded file %s' % filename) except Exception as e: self.log.debug('Exception uploading file %s' % e) @staticmethod async def _render(name, group, environment, url_root): try: t = environment.get_template(name) return t.render(url_root=url_root, group=group) except Exception: return None async def _download(self, name): for store in self.file_stores: for root, dirs, files in os.walk(store): if name in files: headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) return os.path.join(root, name), headers return None, None
class Parser(BaseParser): def __init__(self, parser_info): super().__init__(parser_info) self.mappers = parser_info['mappers'] self.used_facts = parser_info['used_facts'] self.log = Logger('parsing_svc') def parse(self, blob): relationships = [] json_output = self._load_json(blob) if json_output is not None: for mp in self.mappers: if 'json_key' not in dir(mp): self.log.warning( 'JSON Parser not given a json_key, not parsing') continue json_type = mp.json_type if 'json_type' in dir(mp) else None for match in self._get_vals_from_json(json_output, mp.json_key, json_type): source = self.set_value(mp.source, match, self.used_facts) target = self.set_value(mp.target, match, self.used_facts) relationships.append( Relationship(source=(mp.source, source), edge=mp.edge, target=(mp.target, target))) return relationships def _load_json(self, blob): try: return json.loads(blob) except Exception: self.log.warning('Output not JSON, use a different parser') return None def _get_vals_from_json(self, json_output, key, json_type): """ Get all values for a specified key recursively from JSON output. :param json_output: :param key: :param json_type: a list of types to filter matches by. Example options are - 'str', 'int', 'list', 'dict' :return: generator that yields matched values """ if isinstance(json_output, list): for item in json_output: for res in self._get_vals_from_json(item, key, json_type): yield res elif isinstance(json_output, dict): for k, v in json_output.items(): if k == key and (json_type is None or v.__class__.__name__ in json_type): yield json.dumps(v) if v.__class__.__name__ in [ 'list', 'dict' ] else v if isinstance(v, list) or isinstance(v, dict): for res in self._get_vals_from_json(v, key, json_type): yield res
def __init__(self, services): self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('abilitymanager') self.log.debug('Ability Manager Plugin logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) self.stockPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/')
class OperationService: def __init__(self, data_svc, utility_svc, planning_svc, parsing_svc, planner): self.data_svc = data_svc self.utility_svc = utility_svc self.loop = asyncio.get_event_loop() self.parsing_svc = parsing_svc self.log = Logger('operation_svc') planning_module = import_module(planner) self.planner = getattr(planning_module, 'LogicalPlanner')(self.data_svc, planning_svc) async def resume(self): for op in await self.data_svc.dao.get('core_operation'): if not op['finish']: self.loop.create_task(self.run(op['id'])) async def close_operation(self, op_id): self.log.debug('Operation complete: %s' % op_id) update = dict(finish=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=update) async def run(self, op_id): self.log.debug('Starting operation: %s' % op_id) operation = await self.data_svc.explode_operation(dict(id=op_id)) try: for phase in operation[0]['adversary']['phases']: self.log.debug('Operation %s phase %s: started' % (op_id, phase)) await self.parsing_svc.parse_facts(operation[0]) operation = await self.data_svc.explode_operation(dict(id=op_id)) await self.planner.execute(operation[0], phase) self.log.debug('Operation %s phase %s: completed' % (op_id, phase)) await self.data_svc.dao.update('core_operation', key='id', value=op_id, data=dict(phase=phase)) await self.close_operation(op_id) except Exception: traceback.print_exc()
class FileSvc: def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = [p for p in payload_dirs if os.path.isdir(p)] self.log = Logger('file_svc') self.exfil_dir = exfil_dir self.log.debug('Downloaded files will come from %s' % self.payload_dirs) async def download(self, request): name = request.headers.get('file') file_path, headers = await self.find_file(name) if file_path: return web.FileResponse(path=file_path, headers=headers) return web.HTTPNotFound(body='File not found') async def find_file(self, name): for store in self.payload_dirs: for root, dirs, files in os.walk(store): if name in files: headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) self.log.debug('downloading %s...' % name) return os.path.join(root, name), headers return None, None async def upload(self, request): try: reader = await request.multipart() exfil_dir = await self._create_unique_exfil_sub_directory() while True: field = await reader.next() if not field: break filename = field.filename with open(os.path.join(exfil_dir, filename), 'wb') as f: while True: chunk = await field.read_chunk() if not chunk: break f.write(chunk) self.log.debug('Uploaded file %s' % filename) return web.Response() except Exception as e: self.log.debug('Exception uploading file %s' % e) @staticmethod async def write_csv(dictionary, location): with open(location, 'w') as csv_file: fieldnames = dictionary[0].keys() writer = csv.DictWriter(csv_file, fieldnames=fieldnames) for element in dictionary: writer.writerow(element) """ PRIVATE """ async def _create_unique_exfil_sub_directory(self): dir_name = str(uuid.uuid4()) path = os.path.join(self.exfil_dir, dir_name) os.makedirs(path) return path
class CustomShell: def __init__(self, services): self.log = Logger('terminal') self.shell_prompt = 'caldera> ' self.modes = dict(session=Session(services, self.log), agent=Agent(services, self.log), ability=Ability(services, self.log), adversary=Adversary(services, self.log), operation=Operation(services, self.log)) async def start_shell(self): await asyncio.sleep(1) while True: try: cmd = await ainput(self.shell_prompt) self.log.debug(cmd) mode = re.search(r'\((.*?)\)', self.shell_prompt) if cmd == 'help': await self._print_help() elif cmd.startswith('log'): await self._print_logs(int(cmd.split(' ')[1])) elif cmd in self.modes.keys(): self.shell_prompt = 'caldera (%s)> ' % cmd elif mode: await self.modes[mode.group(1)].execute(cmd) elif cmd == '': pass else: self.log.console( 'Bad command - are you in the right mode?', 'red') except Exception as e: self.log.console('Bad command: %s' % e, 'red') async def accept_sessions(self, reader, writer): address = writer.get_extra_info('peername') connection = writer.get_extra_info('socket') connection.setblocking(1) self.modes['session'].sessions.append(connection) self.modes['session'].addresses.append('%s:%s' % (address[0], address[1])) self.log.console('New session: %s:%s' % (address[0], address[1])) async def _print_help(self): print('HELP MENU:') print('-> help: show this help menu') print('-> logs [n]: view the last n-lines of each log file') print( 'Enter one of the following modes. Once inside, enter "info" to see available commands.' ) for cmd, v in self.modes.items(): print('-> %s' % cmd) @staticmethod async def _print_logs(n): for name in glob.iglob('.logs/*.log', recursive=False): with open(name, 'r') as f: print('***** %s ***** ' % name) lines = f.readlines() print(*lines[-n:])
class Parser(BaseParser): def __init__(self, parser_info): self.mappers = parser_info['mappers'] self.used_facts = parser_info['used_facts'] self.log = Logger('Parser') def gd_parser(self, text): results = dict() for block in text.split('\r\n\r\n'): if block: hostname = None pvi = None for line in block.splitlines(): hostname = self._parse_hostname(line, hostname) pvi = self._parse_version(line, pvi) if line.startswith('Exception') and '(0x80005000)' in line: # Domain communication error self.log.warning('Get-Domain parser: Domain Issue 0x80005000: Verify that the rat is running ' 'under a Domain Account, and that the Domain Controller can be reached.') if hostname and pvi: results[hostname] = dict(parsed_version_info=pvi) if not results: self.log.warning('Get-Domain Parser: Returned data contained no parseable information!') return results def parse(self, blob): relationships = [] try: parse_data = self.gd_parser(blob) for match in parse_data: for mp in self.mappers: relationships.append( Relationship(source=(mp.source, match), edge=mp.edge, target=(mp.target, None))) except Exception as error: self.log.warning('Get-Domain parser encountered an error - {}. Continuing...'.format(error)) return relationships ''' PRIVATE FUNCTION ''' @staticmethod def _parse_hostname(line, current): if line.startswith('dnshostname'): field_name, value = [c.strip() for c in line.split(':')] return value.lower() return current @staticmethod def _parse_version(line, current): if line.startswith('operatingsystemversion'): value = line.split(':')[-1].strip() # Looks like: '10.0 (14393)' os_version, build_number = value.split(' ') build_number = build_number[1:-1] # remove parens major_version, minor_version = os_version.split('.') return dict(os_name='windows', major_version=major_version, minor_version=minor_version, build_number=build_number) return current
class FileSvc: def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = payload_dirs self.log = Logger('file_svc') self.exfil_dir = exfil_dir async def download(self, request): name = request.headers.get('file') file_path, headers = await self._download(name) if file_path: self.log.debug('downloading %s...' % name) return web.FileResponse(path=file_path, headers=headers) return web.HTTPNotFound(body='File not found') async def upload(self, request): try: reader = await request.multipart() field = await reader.next() filename = field.filename size = 0 with open(os.path.join(self.exfil_dir, filename), 'wb') as f: while True: chunk = await field.read_chunk() if not chunk: break size += len(chunk) f.write(chunk) self.log.debug('Uploaded file %s' % filename) except Exception as e: self.log.debug('Exception uploading file %s' % e) async def _download(self, name): for store in self.payload_dirs: for root, dirs, files in os.walk(store): if name in files: headers = dict([('CONTENT-DISPOSITION', 'attachment; filename="%s"' % name)]) return os.path.join(root, name), headers return None, None
class AtomicCaldera: def __init__(self, services, ac_data_svc): self.ac_data_svc = ac_data_svc self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('atomiccaldera') self.log.debug('Atomic-Caldera Plugin Logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) def get_conf(self): confPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/artconf.yml') try: with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) self.artpath = os.path.expanduser(conf['artpath']) self.log.debug(self.ctipath) self.log.debug(self.artpath) except: pass @template('atomiccaldera.html') async def landing(self, request): await self.auth_svc.check_permissions(request) abilities = [] tactics = [] variables = [] try: abilities = await self.ac_data_svc.explode_art_abilities() for ab in abilities: if not ab['tactic'] in tactics: tactics.append(ab['tactic']) except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.explode_art_variables() except Exception as e: self.log.error(e) return { 'abilities': json.dumps(abilities), 'tactics': tactics, 'variables': json.dumps(variables) } async def getMITREPhase(self, attackID): filter = [ Filter('type', '=', 'attack-pattern'), Filter('external_references.external_id', '=', attackID) ] result = self.fs.query(filter) if result: return result[0].kill_chain_phases[0].phase_name else: return 'unknown' async def get_atomics(self): await self.ac_data_svc.build_db( os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/ac.sql')) artAbilities = [] artVars = [] if os.path.exists(self.artpath): for root, dirs, files in os.walk(self.artpath): for procFile in files: fullFile = os.path.join(root, procFile) if os.path.splitext(fullFile)[-1].lower() == '.yaml': self.log.debug('Processing {}'.format(fullFile)) try: artObj = ARTyaml() except: continue with open(fullFile, 'r') as yamlFile: try: artObj.load(yamlFile) except: continue # Loop through the tests if artObj.atomicTests: for atomic in artObj.atomicTests: for platform in atomic['supported_platforms']: if platform.lower() in [ 'windows', 'linux', 'macos' ]: name = atomic['name'] description = atomic['description'] if 'command' in atomic[ 'executor'].keys(): command = re.sub( r'x07', r'a', repr(atomic['executor'] ['command'])).strip() command = command.encode( 'utf-8').decode( 'unicode_escape') executor = atomic['executor'][ 'name'] if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') else: command = '' executor = '' try: if command != '': checkUnique = { 'technique': int(artObj.attackTech[1:]), 'command': b64encode( command.encode('utf-8') ).decode('utf-8') } except Exception as e: print(e) # Check to see if the command has been added to the database if (command != '' and not await self.ac_data_svc. check_art_ability(checkUnique) ): uuidBool = True while (uuidBool): ability_id = str(uuid.uuid4()) if not await self.ac_data_svc.check_art_ability( {'ability_id': ability_id }): uuidBool = False try: # Add the new ability to export artAbilities.append({ 'ability_id': ability_id, 'technique': artObj.attackTech[1:], 'name': name, 'description': description, 'tactic': await self.getMITREPhase( artObj.attackTech), 'attack_name': artObj.displayName, 'platform': platform, 'executor': executor, 'command': b64encode( command.encode('utf-8') ).decode('utf-8') }) except Exception as e: print(e) if 'input_arguments' in atomic.keys( ): for argument in atomic[ 'input_arguments'].keys( ): try: curVar = re.sub( r'x07', r'a', repr(atomic[ 'input_arguments'] [argument] ['default']) ).strip() if curVar[0] == '\'': curVar = curVar.strip( '\'') elif curVar[0] == '\"': curVar = curVar.strip( '\"') curVar = curVar.replace( '\\\\', '\\') artVars.append({ 'ability_id': ability_id, 'var_name': argument, 'value': b64encode( curVar.encode( 'utf-8')). decode('utf-8') }) except: pass else: self.log.debug('Paths are not valid') return {'abilities': [], 'variables': []} self.log.debug('Got to the end.') return {'abilities': artAbilities, 'variables': artVars} async def export_all_to_stockpile(self, data): try: abilities = await self.ac_data_svc.explode_art_abilities() except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.explode_art_variables() except Exception as e: self.log.error(e) if await self.export_to_stockpile(abilities, variables): return 'Abilities successfully exported.' else: return 'Failed to export abilities.' async def export_one_to_stockpile(self, data): abilities = [] variables = [] ability_id = {'ability_id': data.pop('ability_id')} try: abilities = await self.ac_data_svc.get_art_ability(ability_id) except Exception as e: self.log.error(e) try: variables = await self.ac_data_svc.get_art_variable(ability_id) except Exception as e: self.log.error(e) if await self.export_to_stockpile(abilities, variables): return 'Ability successfully exported.' else: return 'Failed to export ability.' async def export_to_stockpile(self, abilities, variables): # String representer foy PyYAML to format the command string yaml.add_representer(cmdStr, cmd_presenter) for ability in abilities: executor = ability['executor'] platform = ability['platform'] payload = '' # Fix the command formatting command = b64decode(ability['command']) command = command.decode('utf-8') if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') # Determin the executor # Fill in the variables for variable in variables: if variable['ability_id'] == ability['ability_id']: value = b64decode(variable['value']).decode('utf-8') if value[0] == '\'': value = value.strip('\'') elif value[0] == '\"': value = value.strip('\"') value = value.replace('\\\\', '\\') command = re.sub( r"\#{{{argName}}}".format( argName=str(variable['var_name'])), value.encode('unicode-escape').decode(), command) if (executor.lower() == 'sh' or executor.lower() == 'bash'): if platform.lower() == 'linux': platform = 'linux' elif platform.lower() == 'macos': platform = 'darwin' elif (executor.lower() == 'command_prompt' or executor.lower() == 'powershell'): if (executor.lower() == 'command_prompt'): executor = 'cmd' else: executor = 'psh' command = command.replace('\\n', '\n') # Future additions parserName = '' parserProperty = '' parserScript = '' # Build the YAML data #newYaml = [{ 'id': ability['ability_id'], # 'name': ability['name'], # 'description': ability['description'], # 'tactic': ability['tactic'], # 'technique': { 'attack_id': 'T{}'.format(str(ability['technique'])), 'name': ability['attack_name'] }, # 'platforms': { platform: { executor.lower(): { 'command': cmdStr(command), 'payload': payload, 'parser': { 'name': parserName, 'property': parserProperty, 'script': parserScript }}}}}] newYaml = [{ 'id': ability['ability_id'], 'name': ability['name'], 'description': ability['description'], 'tactic': ability['tactic'], 'technique': { 'attack_id': 'T{}'.format(str(ability['technique'])), 'name': ability['attack_name'] }, 'platforms': { platform: { executor.lower(): { 'command': cmdStr(command), 'payload': payload } } } }] payloadPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') abilityPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') # Check and create payloads folder if it does not exist try: if not os.path.exists(payloadPath): os.makedirs(payloadPath) except Exception as e: self.log.error(e) return False # Write the BAT file if needed if payload != '': with open(os.path.join(payloadPath, payload), 'w') as payloadFile: payloadFile.write(batCommand) # Check and create ability folder if it does not exist try: if not os.path.exists( os.path.join(abilityPath, ability['tactic'])): os.makedirs(os.path.join(abilityPath, ability['tactic'])) except Exception as e: self.log.error(e) return False # Write the YAML file to the correct directory try: with open( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id'])), 'w') as newYAMLFile: dump = yaml.dump(newYaml, default_style=None, default_flow_style=False, allow_unicode=True, encoding=None, sort_keys=False) newYAMLFile.write(dump) except Exception as e: self.log.error(e) return False return True async def get_art(self, request): self.log.debug('Landed in get_art.') try: atomics = await self.get_atomics() except Exception as e: self.log.error(e) pass return atomics async def import_art_abilities(self): try: atomics = await self.get_atomics() except Exception as e: self.log.error(e) return 'Failed to load abilities.' for ability in atomics['abilities']: await self.ac_data_svc.create_art_ability(ability) for variable in atomics['variables']: await self.ac_data_svc.create_art_variable(variable) return 'Successfully imported new abilities.' async def save_art_ability(self, data): key = data.pop('key') value = data.pop('value') updates = data.pop('data') if await self.ac_data_svc.update_art_ability(key, value, updates): return 'Updated ability: {}'.format(value) else: return 'Update failed for ability: {}'.format(value) async def save_art_variables(self, data): updates = data.pop('data') if await self.ac_data_svc.update_art_variables(updates): return 'Updated variables successfully.' else: return 'Updates to variables failed.' async def delete_all(self): abilities = [] payloadPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') abilityPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/abilities/') try: abilities = await self.ac_data_svc.explode_art_abilities() except Exception as e: self.log.error(e) for ability in abilities: if os.path.exists( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id']))): os.remove( os.path.join(abilityPath, ability['tactic'], '{}.yml'.format(ability['ability_id']))) if os.path.exists( os.path.join(payloadPath, '{}.bat'.format(ability['ability_id']))): os.remove( os.path.join(payloadPath, '{}.bat'.format(ability['ability_id']))) status = await self.ac_data_svc.delete_all() await self.ac_data_svc.build_db( os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/ac.sql')) return status async def rest_api(self, request): self.log.debug('Starting Rest call.') await self.auth_svc.check_permissions(request) data = dict(await request.json()) index = data.pop('index') self.log.debug('Index: {}'.format(index)) options = dict( PUT=dict(ac_ability=lambda d: self.import_art_abilities(**d)), POST=dict( ac_ability=lambda d: self.ac_data_svc.explode_art_abilities(**d ), ac_ability_save=lambda d: self.save_art_ability(data=d), ac_variables_save=lambda d: self.save_art_variables(data=d), ac_export_all=lambda d: self.export_all_to_stockpile(**d), ac_export_one=lambda d: self.export_one_to_stockpile(data=d)), DELETE=dict(delete_all=lambda d: self.delete_all(**d))) try: output = await options[request.method][index](data) except Exception as e: self.log.debug('Stopped at api call.') self.log.error(e) return web.json_response(output)
class Parser(BaseParser): def __init__(self, parser_info): self.mappers = parser_info['mappers'] self.used_facts = parser_info['used_facts'] self.parse_mode = 'wdigest' self.log = Logger('parsing_svc') def parse_katz(self, output): """ Parses mimikatz output with the logonpasswords command and returns a list of dicts of the credentials. Args: output: stdout of "mimikatz.exe privilege::debug sekurlsa::logonpasswords exit" Returns: A list of MimikatzSection objects """ sections = output.split('Authentication Id') # split sections using "Authentication Id" as separator creds = [] for section in sections: mk_section = MimikatzBlock() package = {} package_name = '' in_header = True pstate = False for line in section.splitlines(): line = line.strip() if in_header: in_header = self._parse_header(line, mk_section) if in_header: continue # avoid excess parsing work pstate, package_name = self._process_package(line, package, package_name, mk_section) if pstate: pstate = False package = {} self._package_extend(package, package_name, mk_section) # save the current ssp if necessary if mk_section.packages: # save this section creds.append(mk_section) return creds def parse(self, blob): relationships = [] try: parse_data = self.parse_katz(blob) for match in parse_data: if self.parse_mode in match.packages: for mp in self.mappers: relationships.append( Relationship(source=(mp.source, match.packages[self.parse_mode][0]['Username']), edge=mp.edge, target=(mp.target, match.packages[self.parse_mode][0]['Password'])) ) except Exception as error: self.log.warning('Mimikatz parser encountered an error - {}. Continuing...'.format(error)) return relationships """ PRIVATE FUNCTION """ @staticmethod def _parse_header(line, mk_section): if line.startswith('msv'): return False session = re.match(r'^\s*Session\s*:\s*([^\r\n]*)', line) if session: mk_section.session = session.group(1) username = re.match(r'^\s*User Name\s*:\s*([^\r\n]*)', line) if username: mk_section.username = username.group(1) domain = re.match(r'^\s*Domain\s*:\s*([^\r\n]*)', line) if domain: mk_section.domain = domain.group(1) logon_server = re.match(r'^\s*Logon Server\s*:\s*([^\r\n]*)', line) if logon_server: mk_section.logon_server = logon_server.group(1) logon_time = re.match(r'^\s*Logon Time\s*:\s*([^\r\n]*)', line) if logon_time: mk_section.logon_time = logon_time.group(1) sid = re.match(r'^\s*SID\s*:\s*([^\r\n]*)', line) if sid: mk_section.sid = sid.group(1) return True def _process_package(self, line, package, package_name, mk_section): if line.startswith('['): self._package_extend(package, package_name, mk_section) # this might indicate the start of a new account return True, package_name # reset the package elif line.startswith('*'): m = re.match(r'\s*\* (.*?)\s*: (.*)', line) if m: package[m.group(1)] = m.group(2) elif line: match_group = re.match(r'([a-z]+) :', line) # parse out the new section name if match_group: # this is the start of a new ssp self._package_extend(package, package_name, mk_section) # save the current ssp if necessary return True, match_group.group(1) # reset the package return False, package_name @staticmethod def _package_extend(package, package_name, mk_section): if 'Username' in package and package['Username'] != '(null)' and \ (('Password' in package and package['Password'] != '(null)') or 'NTLM' in package): mk_section.packages[package_name].append(package)
start_date=train_start_date, end_date=train_end_date, tickers=train_tickers, device=device) print("Resample ...") buy_samples, sell_samples, none_samples = \ DataPreparator.over_sample( raw_buy_samples, raw_sell_samples, raw_none_samples, seed) os.makedirs(f'data/networks/{run_id}', exist_ok=True) Logger.log(run_id, f"Id: {run_id}") if args.train_buyer > 0: buyer_features = np.array(buy_samples, dtype=np.float32) print("Train buyer samples detector ...") buyer_result = gym.train_auto_encoder('buyer', trader.buyer, buyer_optimizer, buyer_features, buyer_result, max_epochs=500, max_steps=10) print("Reload trader with best training result after training ...") buyer_optimizer, buyer_result = manager.load( 'buyer', trader.buyer, buyer_optimizer, trader.reset_buyer, lambda: manager.create_buyer_optimizer(trader), buyer_result)
def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = [p for p in payload_dirs if os.path.isdir(p)] self.log = Logger('file_svc') self.exfil_dir = exfil_dir
def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = [p for p in payload_dirs if os.path.isdir(p)] self.log = Logger('file_svc') self.exfil_dir = exfil_dir self.log.debug('Downloaded files will come from %s' % self.payload_dirs)
def __init__(self, parser_info): self.mappers = parser_info['mappers'] self.used_facts = parser_info['used_facts'] self.parse_mode = 'wdigest' self.log = Logger('parsing_svc')
def add_service(self, name: str, svc: 'BaseService') -> Logger: self.__class__._services[name] = svc return Logger(name)
def add_service(self, name, svc): self.__class__._services[name] = svc return Logger(name)
def __init__(self): self.log = Logger(name='object')
def __init__(self, parser_info): self.mappers = parser_info['mappers'] self.used_facts = parser_info['used_facts'] self.log = Logger('Parser')
for plug in plugs if plugs else []: if not os.path.isdir('plugins/%s' % plug) or not os.path.isfile('plugins/%s/hook.py' % plug): logging.error('Problem validating the "%s" plugin. Ensure CALDERA was cloned recursively.' % plug) exit(0) modules.append(import_module('plugins.%s.hook' % plug)) return modules if __name__ == '__main__': parser = argparse.ArgumentParser('CALDERA application') parser.add_argument('-E', '--environment', required=True, default='local', help='Select an env. file to use') args = parser.parse_args() with open('conf/%s.yml' % args.environment) as c: config = yaml.load(c) logging.getLogger('aiohttp.access').setLevel(logging.WARNING) logging.getLogger('asyncio').setLevel(logging.FATAL) logging.getLogger().setLevel(config['debug_level']) sys.path.append('') plugin_modules = build_plugins(config['plugins']) utility_svc = UtilityService() data_svc = DataService(CoreDao('core.db')) operation_svc = OperationService(data_svc=data_svc, utility_svc=utility_svc, planner=config['planner']) auth_svc = AuthService(data_svc=data_svc, ssl_cert=SSL_CERT) services = dict( data_svc=data_svc, auth_svc=auth_svc, utility_svc=utility_svc, operation_svc=operation_svc, logger=Logger('plugin'), plugins=plugin_modules ) main(services=services, host=config['host'], port=config['port'], terminal_port=config['terminal_port'], users=config['users'])
help='Select an env. file to use') args = parser.parse_args() with open('conf/%s.yml' % args.environment) as c: config = yaml.load(c) logging.getLogger('aiohttp.access').setLevel(logging.WARNING) logging.getLogger('asyncio').setLevel(logging.FATAL) logging.getLogger().setLevel(config['debug_level']) sys.path.append('') plugin_modules = build_plugins(config['plugins']) utility_svc = UtilityService() data_svc = DataService(CoreDao('core.db')) operation_svc = OperationService(data_svc=data_svc, utility_svc=utility_svc, planner=config['planner']) auth_svc = AuthService(data_svc=data_svc, ssl_cert=SSL_CERT) file_svc = FileSvc(config['stores']) services = dict(data_svc=data_svc, auth_svc=auth_svc, utility_svc=utility_svc, operation_svc=operation_svc, file_svc=file_svc, logger=Logger('plugin'), plugins=plugin_modules) main(services=services, host=config['host'], port=config['port'], terminal_host=config['terminal_host'], terminal_port=config['terminal_port'], users=config['users'])
def __init__(self, payload_dirs, exfil_dir): self.payload_dirs = payload_dirs self.log = Logger('file_svc') self.exfil_dir = exfil_dir
def __init__(self, file_stores): self.file_stores = file_stores self.log = Logger('file_svc')
def __init__(self, parser_info): super().__init__(parser_info) self.mappers = parser_info['mappers'] self.used_facts = parser_info['used_facts'] self.log = Logger('parsing_svc')
def create_logger(name): return Logger(name)
class abilitymanager: def __init__(self, services): self.data_svc = services.get('data_svc') self.auth_svc = services.get('auth_svc') self.log = Logger('abilitymanager') self.log.debug('Ability Manager Plugin logging started.') self.get_conf() self.fs = FileSystemSource(self.ctipath) self.stockPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') def get_conf(self): confPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../conf/amconf.yml') with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) self.log.debug('Getting local configuration from: {}'.format(confPath)) try: with open(confPath, 'r') as c: conf = yaml.load(c, Loader=yaml.Loader) self.ctipath = os.path.expanduser( os.path.join(conf['ctipath'], 'enterprise-attack/')) if 'payloadPath' in conf.keys(): self.payloadPath = os.path.expander(conf['payloadPath']) else: self.payloadPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') if 'abilityPath' in conf.keys(): self.abilityPath = os.path.expander(conf['abilityPath']) else: self.abilityPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') except Exception as e: self.log.debug('Stopped at api call.') self.log.error(e) pass async def get_uuid(self, data): try: return str(uuid.uuid4()) except Exception as e: self.log.debug('Error getting UUID.') self.log.error('e') return 'Failure' async def getMITRETactics(self): tacticList = [] tactics = {} matrix = self.fs.query([ Filter('type', '=', 'x-mitre-matrix'), ]) for i in range(len(matrix)): tactics[matrix[i]['name']] = [] for tactic_id in matrix[i]['tactic_refs']: tactics[matrix[i]['name']].append( self.fs.query([Filter('id', '=', tactic_id)])[0]) for tactic in tactics['Enterprise ATT&CK']: tacticList.append(tactic['name']) return tacticList async def getMITRETechniques(self, data): tactic = data['data'].replace(' ', '-').lower() techniques = [] filter = [ Filter('type', '=', 'attack-pattern'), Filter('kill_chain_phases.phase_name', '=', tactic) ] results = self.fs.query(filter) # This is used in the official documentation. I'm not sure it is needed. doubleCheck = [ t for t in results if { 'kill_chain_name': 'mitre-attack', 'phase_name': tactic, } in t.kill_chain_phases ] for entry in doubleCheck: techniques.append({ 'tactic': tactic, 'name': entry['name'], 'id': entry['external_references'][0]['external_id'] }) return techniques async def getATTACK(self): attackList = [] tactics = await self.getMITRETactics() try: for tactic in tactics: for technique in await self.getMITRETechniques( {'data': tactic}): attackList.append(technique) return attackList except Exception as e: self.log.debug('Failed to parse tactics') self.log.error(e) return [] async def explode_stockpile(self): self.log.debug('Starting stockpile ability explosion') stockAbilities = [] self.log.debug('Checking stockpile path: {}'.format(self.stockPath)) if os.path.exists(self.stockPath): for root, dirs, files in os.walk(self.stockPath): for procFile in files: fullFile = os.path.join(root, procFile) if os.path.splitext(fullFile)[-1].lower() == '.yml': newAbility = {} with open(fullFile, 'r') as yamlFile: try: stockData = yaml.load(yamlFile, Loader=yaml.Loader) except: continue platformData = [] rawAbility = stockData[0] rawPlatform = rawAbility['platforms'] for keyName in rawPlatform.keys(): for test in rawPlatform[keyName]: newTest = {'platform': keyName, 'executor': ''} parserName = '' parserProperty = '' parserScript = '' if 'command' in rawPlatform[keyName][ test].keys(): newTest.update({ 'command': b64encode(rawPlatform[keyName][test] ['command'].encode( 'utf-8')).decode('utf-8') }) if 'cleanup' in rawPlatform[keyName][ test].keys(): newTest.update({ 'cleanup': b64encode(rawPlatform[keyName][test] ['cleanup'].encode( 'utf-8')).decode('utf-8') }) if 'payload' in rawPlatform[keyName][ test].keys(): newTest.update({ 'payload': rawPlatform[keyName][test]['payload'] }) if 'parser' in rawPlatform[keyName][test].keys( ): if rawPlatform[keyName][test]['parser'][ 'name']: parserName = rawPlatform[keyName][ test]['parser']['name'] if rawPlatform[keyName][test]['parser'][ 'property']: parserProperty = rawPlatform[keyName][ test]['parser']['property'] if rawPlatform[keyName][test]['parser'][ 'script']: parserScript = rawPlatform[keyName][ test]['parser']['script'] newTest.update({ 'parser': { 'name': parserName, 'property': parserProperty, 'script': b64encode( parserScript.encode( 'utf-8')).decode('utf-8') } }) if (len(test.split(',')) > 1): for subTest in test.split(','): newTest['executor'] = subTest platformData.append(newTest.copy()) else: newTest['executor'] = test platformData.append(newTest) newAbility = { 'id': rawAbility['id'], 'name': rawAbility['name'], 'description': rawAbility['description'], 'tactic': rawAbility['tactic'], 'technique': rawAbility['technique'], 'platforms': platformData, 'path': fullFile } stockAbilities.append(newAbility) return stockAbilities async def delete_ability(self, data): pathData = data['data'] try: os.remove(pathData) return 'File deleted.' except Exception as e: self.log.error(e) return 'File deletion failed.' async def save_ability(self, data): abilityData = data.pop('data') newYaml = [] newYamlEntry = {} newPlatforms = {} osList = [] # Add the YAML presenter yaml.add_representer(cmdStr, cmd_presenter) # Get the OS names for test in abilityData['platforms']: osList.append(test['platform']) osList = list(set(osList)) try: for osSys in osList: newPlatforms[osSys] = {} for test in abilityData['platforms']: if osSys == test['platform']: newTest = {} command = b64decode(test['command']) command = command.decode('utf-8') if command[0] == '\'': command = command.strip('\'') elif command[0] == '\"': command = command.strip('\"') command = command.replace('\\n', '\n') newTest['command'] = cmdStr(command) # Check for payload if 'payload' in test.keys(): newTest['payload'] = test['payload'] if 'cleanup' in test.keys(): cleanup = b64decode(test['cleanup']) cleanup = cleanup.decode('utf-8') if cleanup[0] == '\'': cleanup = cleanup.strip('\'') elif cleanup[0] == '\"': cleanup = cleanup.strip('\"') cleanup = cleanup.replace('\\n', '\n') newTest['cleanup'] = cmdStr(cleanup) if 'parser' in test.keys(): newParser = {} newParser['name'] = test['parser']['name'] newParser['property'] = test['parser']['property'] newParser['script'] = b64decode( test['parser']['script']).decode('utf-8') newTest['parser'] = newParser newPlatforms[osSys][test['executor']] = newTest else: pass newYamlEntry['id'] = abilityData['id'] newYamlEntry['name'] = abilityData['name'] newYamlEntry['description'] = abilityData['description'] newYamlEntry['tactic'] = abilityData['tactic'] newYamlEntry['technique'] = abilityData['technique'] newYamlEntry['platforms'] = newPlatforms newYaml.append(newYamlEntry) except Exception as e: self.log.error(e) return 'Failed to parse ability data.' #payloadPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile/data/payloads/') #abilityPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../stockpile//data/abilities/') # You can change the output path for testing or to seperate your changes if you like. #payloadPath = '/tmp/' #abilityPath = '/tmp/' # Check and create payloads folder if it does not exist try: if not os.path.exists(self.payloadPath): os.makedirs(self.payloadPath) except Exception as e: self.log.error(e) return False # Check and create ability folder if it does not exist try: if not os.path.exists( os.path.join(self.abilityPath, abilityData['tactic'])): os.makedirs( os.path.join(self.abilityPath, abilityData['tactic'])) except Exception as e: self.log.error(e) return False # Write the YAML file to the correct directory try: with open( os.path.join(self.abilityPath, abilityData['tactic'], '{}.yml'.format(abilityData['id'])), 'w') as newYAMLFile: dump = yaml.dump(newYaml, default_style=None, default_flow_style=False, allow_unicode=True, encoding=None, sort_keys=False) newYAMLFile.write(dump) except Exception as e: self.log.error(e) # Delete the original file if necessary try: if (os.path.dirname(abilityData['path']) != os.path.join( self.abilityPath, abilityData['tactic'])) and ( os.path.basename(abilityData['path']) == '{}.yml'.format(abilityData['id'])): os.remove(abilityData['path']) except Exception as e: self.log.error(e) return 'Test saved successfully. Click the reload button to reload the list of available abilities.' @template('abilitymanager.html') async def landing(self, request): try: fullTactics = [] attackLit = [] await self.auth_svc.check_permissions(request) abilities = await self.explode_stockpile() tactics = set([a['tactic'].lower() for a in abilities]) fullTactics = await self.getMITRETactics() attackList = await self.getATTACK() self.log.debug('Landing call completed.') except Exception as e: self.log.debug('Failed to land.') self.log.error(e) return { 'abilities': json.dumps(abilities), 'tactics': tactics, 'fulltactics': json.dumps(fullTactics), 'techniques': json.dumps(attackList) } async def rest_api(self, request): self.log.debug('Starting Rest call.') await self.auth_svc.check_permissions(request) data = dict(await request.json()) index = data.pop('index') self.log.debug('Index: {}'.format(index)) options = dict( PUT=dict(), POST=dict( am_ability=lambda d: self.explode_stockpile(**d), am_ability_save=lambda d: self.save_ability(data=d), am_ability_delete=lambda d: self.delete_ability(data=d), am_get_uuid=lambda d: self.get_uuid(data=d), am_get_tactics=lambda d: self.getMITRETactics(), am_get_techniques=lambda d: self.getMITRETechniques(data=d), am_get_attack=lambda d: self.getATTACK()), DELETE=dict()) try: output = await options[request.method][index](data) except Exception as e: self.log.error(e) return web.json_response(output)