def prepare_database(): db = get_db() db.client_parameters.create_indexes([ IndexModel([('hostname', ASCENDING), ('parameter', ASCENDING)]), IndexModel([('parameter', ASCENDING)]) ]) db.clients.create_indexes([ IndexModel([('hostname', ASCENDING)], unique=True), IndexModel([('suspended', ASCENDING)]), IndexModel([('plugins.os_info.distname', ASCENDING), ('plugins.os_updates.installed_packages', ASCENDING), (arch_security_flag, ASCENDING)]), IndexModel([(arch_security_flag, ASCENDING), ('plugins.os_updates.patches', ASCENDING)]) ]) db.issues.create_indexes([ IndexModel([('hostname', ASCENDING), ('name', ASCENDING), ('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('hostname', ASCENDING), ('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('name', ASCENDING), ('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('hostname', ASCENDING), ('suspended', ASCENDING)]) ]) db.patches.create_indexes([ IndexModel([('pending_hosts', ASCENDING)]), IndexModel([('files.path', ASCENDING)]) ])
def snooze_handler(args): if not (args.host or args.issue_name or args.all): sys.exit('If you really want to snooze all issues for all hosts,\n' 'you need to specify --all.') if not (args.days or args.hours): args.days = 1 if args.days: then = now + datetime.timedelta(days=args.days) else: then = now + datetime.timedelta(hours=args.hours) hostname = (None if not args.host else args.host[0] if len(args.host) == 1 else { '$in': args.host }) issue_name = (None if not args.issue_name else args.issue_name[0] if len(args.issue_name) == 1 else { '$in': args.issue_name }) ids = snooze_issue(hostname, issue_name, then) if not ids: print('No matching issues.') return with logbook.StreamHandler(sys.stdout, bubble=True): for doc in get_db().issues.find({'_id': {'$in': ids}}): log.info('Snoozed {} {} until {}', doc['hostname'], doc['name'], then)
def prepare_database(): db = get_db() db.client_parameters.create_indexes([ IndexModel([('hostname', ASCENDING), ('parameter', ASCENDING)]), IndexModel([('parameter', ASCENDING)])]) db.clients.create_indexes([ IndexModel([('hostname', ASCENDING)], unique=True), IndexModel([('suspended', ASCENDING)])]) db.issues.create_indexes([IndexModel([('hostname', ASCENDING), ('name', ASCENDING), ('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('hostname', ASCENDING), ('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('name', ASCENDING), ('closed_at', ASCENDING), ('unsnooze_at', ASCENDING)]), IndexModel([('hostname', ASCENDING), ('suspended', ASCENDING)])]) db.patches.create_indexes([IndexModel([('pending_hosts', ASCENDING)]), IndexModel([('files.path', ASCENDING)])])
def encrypt_handler(args): if not get_server_setting('secret_keeping:enabled'): sys.exit('You cannot encrypt when secret-keeping is disabled.') db = get_db() selectors = get_selectors() spec = {'$or': [{s.plain_mongo: {'$exists': True}} for s in selectors]} for doc in db.clients.find(spec): doc, update = encrypt_document(doc) if update: db.clients.update({'_id': doc['_id']}, update) log.info('Encrypted data in client document {} (host {})', doc['_id'], doc['hostname']) print('Encrypted client document {} (host {})'.format( doc['_id'], doc['hostname'])) spec = {'key': {'$in': [s.plain_mongo for s in selectors]}} for doc in db.audit_trail.find(spec): doc, update = encrypt_document(doc, selectors=audit_trail_selectors) if update: update['$set']['key'] = next(s.enc_mongo for s in selectors if s.plain_mongo == doc['key']) db.audit_trail.update({'_id': doc['_id']}, update) log.info('Encrypted data in audit trail document {} (host {})', doc['_id'], doc['hostname']) print('Encrypted audit trail document {} (host {})'.format( doc['_id'], doc['hostname']))
def pipe_create(): data = json.loads(request.form['data']) client_hostname = data['client_hostname'] db = get_db() if not db.clients.find_one({'hostname': client_hostname}): raise Exception('Attempt to create pipe for invalid client {}'.format( client_hostname)) key = data['encryption_key'] iv = data['encryption_iv'] uuid = uuid4().hex encryptors[uuid]['server'] = { 'send': Encryptor(key, iv), 'receive': Encryptor(key, iv) } pipes[uuid] = { 'client_opened': False, 'client_closed': False, 'server_closed': False, 'client_to_server': b'', 'server_to_client': b'', 'created': time.time(), 'activity': None, 'client_hostname': client_hostname, } log.debug('Created pipe {}', uuid) return json.dumps({'pipe_id': uuid})
def submit(): db = get_db() which = [] now = datetime.datetime.utcnow() try: data = json.loads(request.form['data']) except json.decoder.JSONDecodeError as e: log.exception('Failed to parse request data as JSON. Content=<<<{}>>>', request.data) return Response(str(e), status=400) hostname = data['hostname'] spec = {'hostname': hostname} update = { 'submitted_at': now, 'hostname': hostname, } if 'plugins' in data: data['plugins']['submitted_at'] = now update['plugins'] = data['plugins'] which.append('plugins') if data.get('commands', {}): for name, output in data['commands'].items(): output['submitted_at'] = now update['commands.{}'.format(name)] = output which.append('commands') if which: old = db.clients.find_one(spec) update_result = db.clients.update_one(spec, { '$set': update, '$unset': { 'suspended': True } }) if update_result.modified_count == 0: db.clients.save(update) log.info('Added new client: {}', hostname) log.info('Successful submission of {} by {}', ', '.join(which), hostname) if old: new = db.clients.find_one(spec) strip_dates(old) strip_dates(new) new, updates = encrypt_document(new) if updates: db.clients.update_one({'_id': new['_id']}, updates) log.info('Encrypted secret data for {} in document {}', hostname, new['_id']) changes, audit_trail = dict_changes(old, new) for change in changes: log.info('Change for {}: {}', hostname, change) if audit_trail: audit_trail_write({ 'audited_at': now, 'hostname': hostname }, audit_trail) return ('ok') else: log.error('Empty submission from {}', hostname) return ('error')
def flag_impacted_clients(package, dt): spec = {'plugins.os_info.distname': 'arch', 'plugins.os_updates.installed_packages': package, '$or': [{arch_security_flag: {'$exists': False}}, {arch_security_flag: {'$lt': dt}}]} db = get_db() for doc in db.clients.find(spec, projection=['hostname']): log.info('Flagging client {} for update of Arch package {}', doc['hostname'], package) db.clients.update_many(spec, {'$set': {arch_security_flag: dt}})
def acknowledge_patch(): db = get_db() data = json.loads(request.form['data']) _id = data['id'] hostname = data['hostname'] db.patches.update_one( {'_id': ObjectId(_id)}, {'$push': {'completed_hosts': hostname}, '$pull': {'pending_hosts': hostname}}) log.info('{} acknowledged patch {}', hostname, _id) return 'ok'
def check_pending_patches(): issue_name = 'pending-patches' db = get_db() problem_hosts = set() for patch in db.patches.find({'pending_hosts': {'$not': {'$size': 0}}}): for hostname in patch['pending_hosts']: if open_issue(hostname, issue_name): log.info('Opened {} issue for {}', issue_name, hostname) problem_hosts.add(hostname) for doc in close_issue({'$not': {'$in': list(problem_hosts)}}, issue_name): log.info('Closed {} issue for {}', issue_name, doc['hostname'])
def clear_obsolete_flags(): db = get_db() spec = {arch_security_flag: {'$exists': True}, 'plugins.os_updates.patches': False} projection = ['_id', 'hostname', arch_security_flag, 'plugins.submitted_at'] for doc in db.clients.find(spec, projection): if doc['plugins']['submitted_at'] > doc[arch_security_flag]: db.clients.update_one({'_id': doc['_id']}, {'$unset': {arch_security_flag: True}}) log.info('Cleared Arch security updates flag from {}', doc['hostname'])
def not_reporting_filter(issue): db = get_db() peers = get_client_parameter(issue['hostname'], 'user_clients') if not peers: return False # Completely arbitrary: Even peered clients need to check in at least once # per month. if db.clients.find_one({'hostname': issue['hostname']})['submitted_at'] < \ now - datetime.timedelta(days=31): return False peer_times = [d['submitted_at'] for d in db.clients.find({'hostname': {'$in': peers}}, projection=['submitted_at'])] cutoff = problem_checks()['not-reporting']['spec']['submitted_at']['$lt'] return any(t >= cutoff for t in peer_times)
def check_pending_patches(): issue_name = 'pending-patches' db = get_db() problem_hosts = set() for patch in db.patches.find({'pending_hosts': {'$not': {'$size': 0}}}): for hostname in patch['pending_hosts']: if not client_exists(hostname): db.patches.update({'_id': patch['_id']}, {'$pull': {'pending_hosts': hostname}}) log.info('Removed deleted client {} from pending patch {}', hostname, patch['_id']) continue if open_issue(hostname, issue_name): log.info('Opened {} issue for {}', issue_name, hostname) problem_hosts.add(hostname) for doc in close_issue({'$not': {'$in': list(problem_hosts)}}, issue_name): log.info('Closed {} issue for {}', issue_name, doc['hostname'])
def download_arch_security(): db = get_db() collection = db.arch_security_updates collection.create_index([('package', ASCENDING), ('announced_at', ASCENDING)], unique=True) for package, dt, source in rss_feed(): try: collection.insert_one({'package': package, 'announced_at': dt, 'source': source}) except DuplicateKeyError: return else: log.info('Identified Arch security update for {}, ' 'announced at {}', package, dt) yield (package, dt)
def select_handler(args): selectors = get_server_setting(selectors_setting) if isinstance(selectors, str): selectors = [selectors] set_server_setting(selectors_setting, selectors) if selectors is None: selectors = [] set_server_setting(selectors_setting, selectors) errors = False for selector in args.selector: if selector in selectors: sys.stderr.write( 'Selector {} is already added.\n'.format(selector)) errors = True if errors: sys.exit(1) if not args.force: db = get_db() for selector in args.selector: if not db.clients.find_one({selector: { '$exists': True }}, projection=[]): sys.stderr.write( 'Selector {} does not match anything.\n' 'Specify --force to save anyway.\n'.format(selector)) errors = True if errors: sys.exit(1) for selector in args.selector: log.info('Adding secret-keeping selector {}', selector) selectors.extend(args.selector) save_server_settings() set_client_setting(selectors_setting, selectors) save_client_settings() print(restart_note)
def unsnooze_handler(args): if not (args.host or args.issue_name or args.all): sys.exit('If you really want to unsnooze all issues for all hosts,\n' 'you need to specify --all.') hostname = (None if not args.host else args.host[0] if len(args.host) == 1 else {'$in': args.host}) issue_name = (None if not args.issue_name else args.issue_name[0] if len(args.issue_name) == 1 else {'$in': args.issue_name}) ids = unsnooze_issue(hostname, issue_name) if not ids: print('No matching issues.') return with logbook.StreamHandler(sys.stdout, bubble=True): for doc in get_db().issues.find({'_id': {'$in': ids}}): log.info('Unsnoozed {} {} at {}', doc['hostname'], doc['name'], doc['unsnoozed_at'])
def access_handler(args): combine_secret_key() selectors = get_selectors() try: db = get_db() spec = {'$or': [{s.enc_mongo: {'$exists': True}} for s in selectors]} printed_header = not args.audit_trail for keys, dct, tuples in decrypt_iterator(db.clients.find(spec), ('_id', 'hostname'), full_documents=args.full, selectors=selectors): if dct: if not printed_header: print('Clients:\n') printed_header = True pprint.pprint({**keys, **dct}) log.info('Displayed encrypted data in document {} (host {})', keys['_id'], keys['hostname']) if args.audit_trail: if printed_header: print('') spec = {'key': {'$in': [s.enc_mongo for s in selectors]}} printed_header = False for keys, dct, tuples in decrypt_iterator( db.audit_trail.find(spec, sort=(('_id', DESCENDING), )), selectors=audit_trail_selectors, full_documents=True): if dct: if not printed_header: print('Audit trail:\n') printed_header = True dct['key'] = next(s.plain_mongo for s in selectors if s.enc_mongo == dct['key']) pprint.pprint(dct) log.info( 'Displayed encrypted audit trail in document {} ' '(host {})', dct['_id'], dct['hostname']) finally: delete_secret_key()
def decrypt_handler(args): combine_secret_key() selectors = get_selectors() try: db = get_db() spec = {'$or': [{s.enc_mongo: {'$exists': True}} for s in selectors]} for keys, dct, tuples in decrypt_iterator(db.clients.find(spec), ('_id', 'hostname'), selectors=selectors): if dct: spec = {'_id': keys['_id']} update = { '$set': {s.plain_mongo: u for s, u in tuples}, '$unset': {s.enc_mongo: True for s, u in tuples} } db.clients.update(spec, update) log.info('Decrypted data in client document {} (host {})', keys['_id'], keys['hostname']) print('Decrypted client document {} (host {})'.format( keys['_id'], keys['hostname'])) spec = {'key': {'$in': [s.enc_mongo for s in selectors]}} for keys, dct, tuples in decrypt_iterator( db.audit_trail.find(spec), selectors=audit_trail_selectors, full_documents=True): if dct: dct['key'] = next(s.plain_mongo for s in selectors if s.enc_mongo == dct['key']) spec = {'_id': dct['_id']} db.audit_trail.update(spec, dct) log.info('Decrypted data in audit trail document {} (host {})', dct['_id'], dct['hostname']) print('Decrypted audit trail document {} (host {})'.format( dct['_id'], dct['hostname'])) finally: delete_secret_key()
def update(): db = get_db() data = json.loads(request.form['data']) hostname = data['hostname'] old_release = data['old_release'] releases = sorted(r for r in os.listdir(releases_dir) if r.endswith('.asc')) response_data = {} if len(releases) == 0: response_data['status'] = 'current' else: current_release_file = releases[-1] current_release_number = \ int(current_release_file[0:current_release_file.index('.')]) if old_release >= current_release_number: response_data['status'] = 'current' log.debug('{} is current ({})', hostname, current_release_number) else: log.info('Sending release {} to {} (currently at {})', current_release_number, hostname, old_release) response_data['status'] = 'out-of-date' response_data['current_release'] = current_release_number response_data['update'] = open( os.path.join(releases_dir, current_release_file)).read() patches = [{ 'id': str(d['_id']), 'files': d['files'] } for d in db.patches.find({'pending_hosts': hostname}, projection=['files'])] if patches: log.info('Sending patches {} ({}) to {}', ', '.join(p['id'] for p in patches), ', '.join(f['path'] for p in patches for f in p['files']), hostname) response_data['patches'] = patches return json.dumps(response_data)
def client_exists(hostname): db = get_db() return True if db.clients.find_one({'hostname': hostname}, projection=['_id']) else False
def audit_handler(args): def d(dt): return dt.strftime('%m/%d %H:%M') in_a_terminal = os.isatty(sys.stderr.fileno()) if args.update_recent is None: args.update_recent = not in_a_terminal if args.display_recent is None: args.display_recent = in_a_terminal if args.full: args.ignore_grace_period = args.display_recent = \ args.ignore_snoozed = args.ignore_filters = True db = get_db() for check_name, check in problem_checks().items(): if 'spec' not in check: continue problems = [d for d in db.clients.find(check['spec'])] for problem in problems: if open_issue(problem['hostname'], check_name, as_of=problem['plugins']['submitted_at']): log.info('Opened {} issue for {} as of {}', check_name, problem['hostname'], problem['plugins']['submitted_at']) problem_hosts = [d['hostname'] for d in problems] for doc in close_issue({'$not': {'$in': problem_hosts}}, check_name): log.info('Closed {} issue for {}', check_name, doc['hostname']) check_pending_patches() check_ssl_certificates() issues = get_open_issues(include_suspended=args.ignore_suspended) default_alert_frequency = datetime.timedelta(hours=1) for key1, value1 in issues.items(): key1_printed = False email_list = '' for key2, issue in value1.items(): check = problem_checks().get(issue['name']) or {} filter = None if args.ignore_filters else check.get('filter', None) alert_frequency = check.get('alert-frequency', default_alert_frequency) # Not exactly on the hour, to avoid race conditions when running # hourly. if 0 == alert_frequency.total_seconds() % 3600: alert_frequency -= datetime.timedelta(minutes=1) alert_threshold = now - alert_frequency grace_period = check.get('grace-period', datetime.timedelta(0)) filter_ok = not (filter and filter(issue)) alert_ok = (args.display_recent or 'alerted_at' not in issue or issue['alerted_at'] < alert_threshold) grace_ok = (args.ignore_grace_period or business_hours(issue['opened_at'], now) > grace_period) snooze_ok = (args.ignore_snoozed or 'unsnooze_at' not in issue or issue['unsnooze_at'] < now) if issue.get('unsnooze_at', now) > now: snoozed = ' [snoozed until {}]'.format(d(issue['unsnooze_at'])) else: snoozed = '' client = db.clients.find_one({'hostname': key1}, projection=['submitted_at']) if filter_ok and alert_ok and grace_ok and snooze_ok: if not key1_printed: if client: last_reported = d(client['submitted_at']) print('{} [last reported at {}]'.format( key1, last_reported)) else: # This is a global issue, not a per-client issue print(key1) key1_printed = True print(' {} since {}{}'.format(key2, d(issue['opened_at']), snoozed)) email_list += '{} since {}\n'.format(key2, d(issue['opened_at'])) if not in_a_terminal: log.warn('{} {} since {}', key1, key2, issue['opened_at']) if args.update_recent: db.issues.update({'_id': issue['_id']}, {'$set': { 'alerted_at': now }}) if client and args.email and email_list: email = get_client_parameter(key1, 'user_email') if not email: log.warn("Can't send email for {} since user_email not set", key1) continue subject = 'Please address issues on {}'.format(key1) body = dedent("""\ The following issues have been identified on the PenguinDome (device management) client "{}", for which you are the listed owner. Please rectify these issues at your earliest convenience.\n\n""".format(key1)) body += email_list smtp = smtplib.SMTP() smtp.connect() msg = dedent("""\ From: PenguinDome To: {to} Subject: {subject}\n\n""").format(to=email, subject=subject) msg += body smtp.sendmail('PenguinDome', [email], msg) smtp.quit()