async def get_transforms_options_func(): t = TransformOperation() method_list = { func: await get_type_hints(getattr(t, func).__annotations__) for func in dir(t) if callable(getattr(t, func)) and not func.startswith("__") } return method_list
async def add_task_to_callback_func(data, cid, user): try: # first see if the operator and callback exists op = await db_objects.get(Operator, username=data['operator']) cb = await db_objects.get(Callback, id=cid) # now check the task and add it if it's valid and valid for this callback's payload type try: cmd = await db_objects.get( Command, cmd=data['command'], payload_type=cb.registered_payload.payload_type) except Exception as e: return { 'status': 'error', 'error': data['command'] + ' is not a registered command', 'cmd': data['command'], 'params': data['params'] } file_meta = "" # some tasks require a bit more processing, so we'll handle that here so it's easier for the implant if cmd.cmd == "upload": upload_config = js.loads(data['params']) # we need to get the file into the database before we can signal for the callback to pull it down try: # see if we actually submitted "file_id /remote/path/here" if 'file_id' in upload_config and upload_config['file_id'] > 0: f = await db_objects.get(FileMeta, id=upload_config['file_id']) # we don't want to lose our tracking on this file, so we'll create a new database entry file_meta = await db_objects.create( FileMeta, total_chunks=f.total_chunks, chunks_received=f.chunks_received, complete=f.complete, path=f.path, operation=f.operation, operator=op) data['file_updates_with_task'].append(file_meta) elif 'file' in upload_config: # we just made the file for this instance, so just use it as the file_meta # in this case it's already added to data['file_updates_with_task'] file_meta = await db_objects.get(FileMeta, id=upload_config['file']) # now normalize the data for the agent since it doesn't care if it was an old or new file_id to upload data['params'] = js.dumps({ 'remote_path': upload_config['remote_path'], 'file_id': file_meta.id }) except Exception as e: print(e) return { 'status': 'error', 'error': 'failed to get file info from the database: ' + str(e), 'cmd': data['command'], 'params': data['params'] } elif cmd.cmd == "download": if '"' in data['params']: data['params'] = data['params'][ 1: -1] # remove "" around the string at this point if they are there elif cmd.cmd == "screencapture": # we need to specify here the name of the file that we'll be creating # since it'll already be saved in a directory structure that indicates the computer name, we'll indicate time data['params'] = data['params'] + " " + datetime.datetime.utcnow( ).strftime('%Y-%m-%d-%H:%M:%S') + ".png" # if the task is for something that doesn't actually go down to the client, we'll handle it a little differently if cmd.cmd == "tasks": # this means we're just listing out the not-completed tasks, so nothing actually goes to the agent task = await db_objects.create(Task, callback=cb, operator=op, command=cmd, params=data['params'], status="processed") raw_rsp = await get_all_not_completed_tasks_for_callback_func( cb.id, user) if raw_rsp['status'] == 'success': rsp = "" for t in raw_rsp['tasks']: rsp += "\nOperator: " + t['operator'] + "\nTask " + str( t['id']) + ": " + t['command'] + " " + t['params'] await db_objects.create(Response, task=task, response=rsp) else: return { 'status': 'error', 'error': 'failed to get tasks', 'cmd': data['command'], 'params': data['params'] } elif cmd.cmd == "clear": # this means we're going to be clearing out some tasks depending on our access levels task = await db_objects.create(Task, callback=cb, operator=op, command=cmd, params=data['params'], status="processed") raw_rsp = await clear_tasks_for_callback_func( {"task": data['params']}, cb.id, user) if raw_rsp['status'] == 'success': rsp = "Removed the following:" for t in raw_rsp['tasks_removed']: rsp += "\nOperator: " + t['operator'] + "\nTask " + str( t['id']) + ": " + t['command'] + " " + t['params'] await db_objects.create(Response, task=task, response=rsp) else: return { 'status': 'error', 'error': raw_rsp['error'], 'cmd': data['command'], 'params': data['params'] } elif cmd.cmd == "load": try: # open the file that contains the code we're going to load in # see how many things we're trying to load and either perform the transforms = TransformOperation() load_transforms = await get_transforms_func( cb.registered_payload.payload_type.ptype, "load") if load_transforms['status'] == "success": transform_output = [] # always start with a list of paths for all of the things we want to load # check if somebody submitted {'cmds':'shell,load, etc', 'file_id': 4} instead of list of commands try: replaced_params = data['params'].replace("'", '"') funcs = js.loads(replaced_params)['cmds'] except Exception as e: funcs = data['params'] data['params'] = funcs for p in data['params'].split(","): transform_output.append("./app/payloads/{}/{}".format( cb.registered_payload.payload_type.ptype, p.strip())) for t in load_transforms['transforms']: try: transform_output = await getattr( transforms, t['name'])(cb.registered_payload, transform_output, t['parameter']) except Exception as e: print(e) return { 'status': 'error', 'error': 'failed to apply transform {}, with message: {}' .format(t['name'], str(e)), 'cmd': data['command'], 'params': data['params'] } # now create a corresponding file_meta file_meta = await db_objects.create(FileMeta, total_chunks=1, chunks_received=1, complete=True, path=transform_output, operation=cb.operation) data['file_updates_with_task'].append(file_meta) params = {"cmds": data['params'], "file_id": file_meta.id} task = await db_objects.create(Task, callback=cb, operator=op, command=cmd, params=params) else: return { 'status': 'error', 'error': 'failed to get transforms for this payload type', 'cmd': data['command'], 'params': data['params'] } except Exception as e: print(e) return { 'status': 'error', 'error': 'failed to open and encode new function', 'cmd': data['command'], 'params': data['params'] } else: task = await db_objects.create(Task, callback=cb, operator=op, command=cmd, params=data['params']) for update_file in data['file_updates_with_task']: # now we can associate the task with the filemeta object update_file.task = task await db_objects.update(update_file) status = {'status': 'success'} task_json = task.to_json() task_json['task_status'] = task_json[ 'status'] # we don't want the two status keys to conflict task_json.pop('status') return {**status, **task_json} except Exception as e: print("failed to get something in add_task_to_callback_func " + str(e)) return { 'status': 'error', 'error': 'Failed to create task: ' + str(e), 'cmd': data['command'], 'params': data['params'] }
async def write_payload(uuid, user): try: payload = await db_objects.get(Payload, uuid=uuid) except Exception as e: return { 'status': 'error', 'error': 'failed to get payload db object to write to disk' } try: if payload.payload_type.file_extension: extension = payload.payload_type.file_extension else: extension = "" base = open('./app/payloads/{}/{}{}'.format(payload.payload_type.ptype, payload.payload_type.ptype, extension)) payload_directory = os.path.dirname(payload.location) pathlib.Path(payload_directory).mkdir(parents=True, exist_ok=True) custom = open(payload.location, 'w') # wrappers won't necessarily have a c2 profile associated with them if not payload.payload_type.wrapper: base_c2 = open('./app/c2_profiles/{}/{}/{}/{}{}'.format( payload.operation.name, payload.c2_profile.name, payload.payload_type.ptype, payload.c2_profile.name, extension)) except Exception as e: print(e) return { 'status': 'error', 'error': 'failed to open all needed files. ' + str(e) } for line in base: if "C2Profile" in line and not payload.payload_type.wrapper: # this means we need to write out the c2 profile and all parameters here await write_c2(custom, base_c2, payload) # this will eventually be write_ptype_params like above, but not yet elif 'XXXX' in line: replaced_line = line.replace("XXXX", uuid) custom.write(replaced_line) elif 'COMMAND DECLARATIONS AND IMPLEMENTATIONS' in line: # go through all the commands and write them to the payload try: commands = await db_objects.execute( PayloadCommand.select().where( PayloadCommand.payload == payload)) for command in commands: # try to open up the corresponding command file cmd_file = open('./app/payloads/{}/{}'.format( payload.payload_type.ptype, command.command.cmd)) custom.write(cmd_file.read()) cmd_file.close() except Exception as e: print(e) return { 'status': 'error', 'error': 'failed to get and write commands to payload on disk' } elif 'WRAPPEDPAYLOADHERE' in line and payload.payload_type.wrapper: # first we need to do the proper encoding, then we write it do the appropriate spot wrapped_payload = open(payload.wrapped_payload.location, 'rb').read() if payload.payload_type.wrapped_encoding_type == "base64": wrapped_payload = base64.b64encode(wrapped_payload).decode( "UTF-8") replaced_line = line.replace("WRAPPEDPAYLOADHERE", str(wrapped_payload)) custom.write(replaced_line) else: custom.write(line) base.close() if not payload.payload_type.wrapper: base_c2.close() custom.close() # now that it's written to disk, we need to potentially do some compilation or extra transforms transform = TransformOperation() transform_request = await get_transforms_func(payload.payload_type.ptype, "create") if transform_request['status'] == "success": transform_list = transform_request['transforms'] # do step 0, prior_output = path of our newly written file transform_output = payload.location for t in transform_list: try: transform_output = await getattr(transform, t['name'])(payload, transform_output, t['parameter']) except Exception as e: print(e) return { 'status': 'error', 'error': 'failed to apply transform {}, with message: {}'.format( t['name'], str(e)) } if transform_output != payload.location: # this means we ended up with a final file in a location other than what we specified print(transform_output) return {'status': 'success', 'path': transform_output} return {'status': 'success', 'path': payload.location}