def synchronize_hosts(select_query, event_producer, chunk_size, config, interrupt=lambda: False): query = select_query.order_by(Host.id) host_list = query.limit(chunk_size).all() while len(host_list) > 0 and not interrupt(): for host in host_list: serialized_host = serialize_host(host, Timestamps.from_config(config), EGRESS_HOST_FIELDS) event = build_event(EventType.updated, serialized_host) insights_id = host.canonical_facts.get("insights_id") headers = message_headers(EventType.updated, insights_id) # in case of a failed update event, event_producer logs the message. event_producer.write_event(event, str(host.id), headers, Topic.events) synchronize_host_count.inc() yield host.id try: # pace the events production speed as flush completes sending all buffered records. event_producer._kafka_producer.flush(300) except KafkaTimeoutError: raise KafkaTimeoutError( f"KafkaTimeoutError: failure to flush {chunk_size} records within 300 seconds" ) # load next chunk using keyset pagination host_list = query.filter( Host.id > host_list[-1].id).limit(chunk_size).all()
def update_system_profile(input_host, identity, staleness_offset, fields): if not input_host.system_profile_facts: raise InventoryException( title="Invalid request", detail= "Cannot update System Profile, since no System Profile data was provided." ) with session_guard(db.session): if input_host.id: existing_host = find_existing_host_by_id(identity, input_host.id) else: existing_host = find_existing_host(identity, input_host.canonical_facts) if existing_host: logger.debug("Updating system profile on an existing host") logger.debug(f"existing host = {existing_host}") existing_host.update_system_profile( input_host.system_profile_facts) db.session.commit() metrics.update_host_count.inc() logger.debug("Updated system profile for host:%s", existing_host) output_host = serialize_host(existing_host, staleness_offset, fields) insights_id = existing_host.canonical_facts.get("insights_id") return output_host, existing_host.id, insights_id, AddHostResult.updated else: raise InventoryException( title="Invalid request", detail= "Could not find an existing host with the provided facts.")
def create_new_host(input_host, staleness_offset): logger.debug("Creating a new host") input_host.save() db.session.commit() metrics.create_host_count.inc() logger.debug("Created host:%s", input_host) return serialize_host(input_host, staleness_offset), AddHostResults.created
def patch_by_id(host_id_list, body): try: validated_patch_host_data = PatchHostSchema( strict=True).load(body).data except ValidationError as e: logger.exception( f"Input validation error while patching host: {host_id_list} - {body}" ) return ({ "status": 400, "title": "Bad Request", "detail": str(e.messages), "type": "unknown" }, 400) query = _get_host_list_by_id_list(host_id_list) hosts_to_update = query.all() if not hosts_to_update: log_patch_host_failed(logger, host_id_list) return flask.abort(status.HTTP_404_NOT_FOUND) for host in hosts_to_update: host.patch(validated_patch_host_data) if db.session.is_modified(host): db.session.commit() serialized_host = serialize_host(host, staleness_timestamps(), EGRESS_HOST_FIELDS) _emit_patch_event(serialized_host, host.id, host.canonical_facts.get("insights_id")) log_patch_host_success(logger, host_id_list) return 200
def patch_by_id(host_id_list, body): try: validated_patch_host_data = PatchHostSchema( strict=True).load(body).data except ValidationError as e: logger.exception( f"Input validation error while patching host: {host_id_list} - {body}" ) return ({ "status": 400, "title": "Bad Request", "detail": str(e.messages), "type": "unknown" }, 400) query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) hosts_to_update = query.all() if not hosts_to_update: logger.debug("Failed to find hosts during patch operation - hosts: %s", host_id_list) return flask.abort(status.HTTP_404_NOT_FOUND) for host in hosts_to_update: host.patch(validated_patch_host_data) _emit_patch_event( serialize_host(host, staleness_timestamps(), EGRESS_HOST_FIELDS)) db.session.commit() return 200
def build_paginated_host_list_response(total, page, per_page, host_list): timestamps = staleness_timestamps() json_host_list = [serialize_host(host, timestamps) for host in host_list] return { "total": total, "count": len(json_host_list), "page": page, "per_page": per_page, "results": json_host_list, }
def build_paginated_host_list_response(total, page, per_page, host_list, additional_fields=tuple()): timestamps = staleness_timestamps() json_host_list = [serialize_host(host, timestamps, DEFAULT_FIELDS + additional_fields) for host in host_list] return { "total": total, "count": len(json_host_list), "page": page, "per_page": per_page, "results": json_host_list, }
def update_existing_host(existing_host, input_host, staleness_offset, update_system_profile): logger.debug("Updating an existing host") logger.debug(f"existing host = {existing_host}") existing_host.update(input_host, update_system_profile) db.session.commit() metrics.update_host_count.inc() logger.debug("Updated host:%s", existing_host) return serialize_host(existing_host, staleness_offset), AddHostResults.updated
def _update_host(self, added_host_index, new_id, new_modified_on): old_id = self.added_hosts[added_host_index].id old_host = db.session.query(Host).get(old_id) old_host.id = new_id old_host.modified_on = new_modified_on db.session.add(old_host) staleness_offset = Timestamps.from_config( self.app.config["INVENTORY_CONFIG"]) serialized_old_host = serialize_host(old_host, staleness_offset) self.added_hosts[added_host_index] = HostWrapper(serialized_old_host)
def _build_paginated_host_list_response(total, page, per_page, host_list): json_host_list = [ serialize_host(host, staleness_offset()) for host in host_list ] json_output = { "total": total, "count": len(host_list), "page": page, "per_page": per_page, "results": json_host_list, } return _build_json_response(json_output, status=200)
def host_checkin(body): canonical_facts = deserialize_canonical_facts(body) existing_host = find_existing_host(current_identity.account_number, canonical_facts) if existing_host: existing_host._update_modified_date() db.session.commit() serialized_host = serialize_host(existing_host, staleness_timestamps(), EGRESS_HOST_FIELDS) _emit_patch_event(serialized_host, existing_host.id, existing_host.canonical_facts.get("insights_id")) return flask_json_response(serialized_host, 201) else: flask.abort(404, "No hosts match the provided canonical facts.")
def create_new_host(input_host, staleness_offset, fields): logger.debug("Creating a new host") input_host.save() db.session.commit() metrics.create_host_count.inc() logger.debug("Created host:%s", input_host) output_host = serialize_host(input_host, staleness_offset, fields) insights_id = input_host.canonical_facts.get("insights_id") return output_host, input_host.id, insights_id, AddHostResult.created
def update_existing_host(existing_host, input_host, staleness_offset, update_system_profile, fields): logger.debug("Updating an existing host") logger.debug(f"existing host = {existing_host}") existing_host.update(input_host, update_system_profile) db.session.commit() metrics.update_host_count.inc() logger.debug("Updated host:%s", existing_host) output_host = serialize_host(existing_host, staleness_offset, fields) insights_id = existing_host.canonical_facts.get("insights_id") return output_host, existing_host.id, insights_id, AddHostResult.updated
# print(query_results) if args.id: host_id_list = [args.id] print("looking up host using id") query_results = Host.query.filter(Host.id.in_(host_id_list)).all() elif args.hostname: print("looking up host using display_name, fqdn") query_results = Host.query.filter( Host.display_name.comparator.contains(args.hostname) | Host.canonical_facts["fqdn"].astext.contains(args.hostname)).all( ) elif args.insights_id: print("looking up host using insights_id") query_results = Host.query.filter( Host.canonical_facts.comparator.contains( {"insights_id": args.insights_id})).all() elif args.account_number: query_results = Host.query.filter( Host.account == args.account_number).all() staleness_timestamps = Timestamps.from_config(inventory_config()) json_host_list = [ serialize_host(host, staleness_timestamps) for host in query_results ] if args.no_pp: print(json_host_list) else: pp = pprint.PrettyPrinter(indent=4) pp.pprint(json_host_list)
# query_results = Host.query.filter().all() # print(query_results) if args.id: host_id_list = [args.id] print("looking up host using id") query_results = Host.query.filter(Host.id.in_(host_id_list)).all() elif args.hostname: print("looking up host using display_name, fqdn") query_results = Host.query.filter( Host.display_name.comparator.contains(args.hostname) | Host.canonical_facts["fqdn"].astext.contains(args.hostname)).all( ) elif args.insights_id: print("looking up host using insights_id") query_results = Host.query.filter( Host.canonical_facts.comparator.contains( {"insights_id": args.insights_id})).all() elif args.account_number: query_results = Host.query.filter( Host.account == args.account_number).all() json_host_list = [ serialize_host(host, staleness_offset()) for host in query_results ] if args.no_pp: print(json_host_list) else: pp = pprint.PrettyPrinter(indent=4) pp.pprint(json_host_list)
def serialize_db_host(host, inventory_config): staleness_offset = Timestamps.from_config(inventory_config) serialized_host = serialize_host(host, staleness_offset) return HostWrapper(serialized_host)