def pipe_close(peer_type): if peer_type not in ('client', 'server'): raise Exception('Invalid peer type "{}"'.format(peer_type)) data = json.loads(request.form['data']) uuid = data['pipe_id'] if uuid not in pipes: log.error('Attempt to close nonexistent pipe {}', uuid) abort(404) with pipes_lock: pipe = pipes[uuid] try: other_peer_type = 'server' if peer_type == 'client' else 'client' closed_field = peer_type + '_closed' other_closed_field = other_peer_type + '_closed' pipe[closed_field] = True client_opened = peer_type == 'client' or pipe['client_opened'] if not client_opened or pipe[other_closed_field]: del pipes[uuid] encryptors.pop(uuid, None) if peer_type == 'server': PipeLogger.finish(uuid) return json.dumps({'status': 'ok'}) finally: # DictProxy doesn't detect updates to nested dicts. if uuid in pipes: # i.e., it wasn't deleted above pipes[uuid] = pipe
def pipe_create(): data = json.loads(request.form['data']) client_hostname = data['client_hostname'] db = get_db() if not db.clients.find_one({'hostname': client_hostname}): raise Exception('Attempt to create pipe for invalid client {}'.format( client_hostname)) key = data['encryption_key'] iv = data['encryption_iv'] uuid = uuid4().hex encryptors[uuid]['server'] = { 'send': Encryptor(key, iv), 'receive': Encryptor(key, iv) } pipes[uuid] = { 'client_opened': False, 'client_closed': False, 'server_closed': False, 'client_to_server': b'', 'server_to_client': b'', 'created': time.time(), 'activity': None, 'client_hostname': client_hostname, } log.debug('Created pipe {}', uuid) return json.dumps({'pipe_id': uuid})
def pipe_receive(peer_type): if peer_type not in ('client', 'server'): raise Exception('Invalid peer type "{}"'.format(peer_type)) data = json.loads(request.form['data']) uuid = data['pipe_id'] if uuid not in pipes: log.error('Attempt to receive from nonexistent pipe {}', uuid) abort(404) with pipes_lock: pipe = pipes[uuid] pipe['activity'] = time.time() try: other_peer_type = 'server' if peer_type == 'client' else 'client' data_field = other_peer_type + '_to_' + peer_type if pipe[data_field]: encryptor = encryptors[uuid][peer_type]['receive'] encrypted_data = encryptor.encrypt(pipe[data_field]) encoded_data = b64encode(encrypted_data).decode('utf8') ret = json.dumps({'data': encoded_data}) if peer_type == 'server': PipeLogger.log(uuid, 'receive', pipe[data_field]) pipe[data_field] = b'' return ret closed_field = other_peer_type + '_closed' if pipe[closed_field]: return json.dumps({'eof': True}) return json.dumps({'status': 'ok'}) finally: # DictProxy doesn't detect updates to nested dicts. pipes[uuid] = pipe
def submit(): db = get_db() which = [] now = datetime.datetime.utcnow() try: data = json.loads(request.form['data']) except json.decoder.JSONDecodeError as e: log.exception('Failed to parse request data as JSON. Content=<<<{}>>>', request.data) return Response(str(e), status=400) hostname = data['hostname'] spec = {'hostname': hostname} update = { 'submitted_at': now, 'hostname': hostname, } if 'plugins' in data: data['plugins']['submitted_at'] = now update['plugins'] = data['plugins'] which.append('plugins') if data.get('commands', {}): for name, output in data['commands'].items(): output['submitted_at'] = now update['commands.{}'.format(name)] = output which.append('commands') if which: old = db.clients.find_one(spec) update_result = db.clients.update_one(spec, { '$set': update, '$unset': { 'suspended': True } }) if update_result.modified_count == 0: db.clients.save(update) log.info('Added new client: {}', hostname) log.info('Successful submission of {} by {}', ', '.join(which), hostname) if old: new = db.clients.find_one(spec) strip_dates(old) strip_dates(new) new, updates = encrypt_document(new) if updates: db.clients.update_one({'_id': new['_id']}, updates) log.info('Encrypted secret data for {} in document {}', hostname, new['_id']) changes, audit_trail = dict_changes(old, new) for change in changes: log.info('Change for {}: {}', hostname, change) if audit_trail: audit_trail_write({ 'audited_at': now, 'hostname': hostname }, audit_trail) return ('ok') else: log.error('Empty submission from {}', hostname) return ('error')
def wrapper(*args, **kwargs): try: hostname = json.loads(request.form['data'])['hostname'] except: hostname = None ret = f(*args, **kwargs) if hostname: request.environ['REMOTE_ADDR'] = hostname return ret
def acknowledge_patch(): db = get_db() data = json.loads(request.form['data']) _id = data['id'] hostname = data['hostname'] db.patches.update_one( {'_id': ObjectId(_id)}, {'$push': {'completed_hosts': hostname}, '$pull': {'pending_hosts': hostname}}) log.info('{} acknowledged patch {}', hostname, _id) return 'ok'
def decrypt_iterator(document_iterator, document_keys=None, selectors=None, full_documents=False): """Decrypt documents and return decrypted data `selectors` defaults to `get_selectors()` Each yield is a tuple of a document containing the document keys, a document containing the decrypted data in the same structure as the original document, and a list of tuples of selectors that were decrypted and the decrypted datum for each selector. I.e., the same decrypted data is returned in two different forms, to simplify the use of the data by the caller. if `full_documents` is true, then the document in each yield will be the full document returned by the iterator with encrypted fields replaced, rather than a document containing just the decrypted fields. Assumes that the secret key has already been combined _before_ this function is called. Note: There is a yield for every document that is returned by document_iterator, even if nothing was decrypted. If that's the case, then the document keys in the yield will be populated, but the decrypted data document and list will be empty. """ if selectors is None: selectors = get_selectors() for doc in document_iterator: document_keys = {k: doc[k] for k in document_keys or ()} output_dict = doc if full_documents else {} output_tuples = [] for s in selectors: encrypted_data = get_setting(doc, s.enc_mem, check_defaults=False) if not encrypted_data: continue with NamedTemporaryFile('w+b') as unencrypted_file, \ NamedTemporaryFile('w+b') as encrypted_file: encrypted_file.write(b64decode(encrypted_data['data'])) encrypted_file.flush() gpg_command('--decrypt', '-o', unencrypted_file.name, encrypted_file.name) unencrypted_file.seek(0) unencrypted_data = json.loads( unencrypted_file.read().decode('utf-8')) # Order is important here. We unset before we set because the # selector key may be the same for both unencrypted and encrypted # fields. if full_documents: set_setting(output_dict, s.enc_mem, None) set_setting(output_dict, s.plain_mem, unencrypted_data) output_tuples.append((s, unencrypted_data)) yield (document_keys, output_dict if output_tuples else {}, output_tuples)
def run_file(run_path, run_name, results, parse_output=True, delete_after_success=False, submit_failures=False): log.debug('Running {}', run_path) with NamedTemporaryFile('w+') as stderr_file: try: run_output = subprocess.check_output( run_path, stderr=stderr_file.fileno()).decode('utf8') except subprocess.CalledProcessError as e: log.exception('Failed to execute {}', run_path) output = e.output.decode('utf8').strip() if output: log.info('Output of failed script:\n{}', output) stderr_file.seek(0) stderr = stderr_file.read().strip() if stderr: log.info('Stderr of failed script:\n{}', stderr) if submit_failures: results[run_name] = { 'stdout': e.output.decode('utf8'), 'stderr': stderr, 'returncode': e.returncode, } else: return else: if parse_output: try: results[run_name] = json.loads(run_output) except: log.exception('Output of {} failed to parse', run_path) return else: results[run_name] = {'output': run_output.strip()} if delete_after_success: try: os.remove(run_path) sig_file = os.path.join(signatures_dir, run_path + '.sig') try: os.remove(sig_file) except: log.warn('Failed to remove {}', sig_file) except: log.exception('Failed to remove {}', run_path) else: log.info('Removed {}', run_path) finally: log.debug('Finished with {}', run_path)
def wrapper(*args, **kwargs): issue_name = 'deprecated-port' try: hostname = json.loads(request.form['data'])['hostname'] ok = True except: log.error('Failed to parse request data') ok = False hostname = request.remote_addr if app.config['deprecated_port']: log.warn('Host {} connected to deprecated port', hostname) if ok: if open_issue(hostname, issue_name): log.info('Opened {} issue for {}', issue_name, hostname) else: if ok: doc = close_issue(hostname, issue_name) if doc: log.info('Closed {} issue for {}', issue_name, hostname) return f(*args, **kwargs)
def pipe_open(): data = json.loads(request.form['data']) uuid = data['pipe_id'] with pipes_lock: if uuid not in pipes: log.error('Attempt to open nonexistent pipe {}', uuid) abort(404) key = data['encryption_key'] iv = data['encryption_iv'] encryptors[uuid]['client'] = {'send': Encryptor(key, iv), 'receive': Encryptor(key, iv)} try: pipe = pipes[uuid] if pipe['client_opened']: raise Exception('Attempt to open already opened pipe') pipe['client_opened'] = True finally: # DictProxy doesn't detect updates to nested dicts. pipes[uuid] = pipe return json.dumps({'status': 'ok'})
def update(): db = get_db() data = json.loads(request.form['data']) hostname = data['hostname'] old_release = data['old_release'] releases = sorted(r for r in os.listdir(releases_dir) if r.endswith('.asc')) response_data = {} if len(releases) == 0: response_data['status'] = 'current' else: current_release_file = releases[-1] current_release_number = \ int(current_release_file[0:current_release_file.index('.')]) if old_release >= current_release_number: response_data['status'] = 'current' log.debug('{} is current ({})', hostname, current_release_number) else: log.info('Sending release {} to {} (currently at {})', current_release_number, hostname, old_release) response_data['status'] = 'out-of-date' response_data['current_release'] = current_release_number response_data['update'] = open( os.path.join(releases_dir, current_release_file)).read() patches = [{ 'id': str(d['_id']), 'files': d['files'] } for d in db.patches.find({'pending_hosts': hostname}, projection=['files'])] if patches: log.info('Sending patches {} ({}) to {}', ', '.join(p['id'] for p in patches), ', '.join(f['path'] for p in patches for f in p['files']), hostname) response_data['patches'] = patches return json.dumps(response_data)