def test_create_then_delete_check_metadata(event_datetime_mock, event_producer_mock, db_create_host, api_delete_host): host = db_create_host() request_id = generate_uuid() headers = {"x-rh-insights-request-id": request_id} response_status, response_data = api_delete_host(host.id, extra_headers=headers) assert_response_status(response_status, expected_status=200) assert_delete_event_is_valid( event_producer=event_producer_mock, host=host, timestamp=event_datetime_mock, expected_request_id=request_id, expected_metadata={"request_id": request_id}, )
def test_display_name_ignored_for_blacklisted_reporters( reporter, mq_create_or_update_host, db_get_host_by_insights_id): """ Tests the workaround for https://projects.engineering.redhat.com/browse/RHCLOUD-5954 """ insights_id = generate_uuid() host = minimal_host(display_name="test_host", insights_id=insights_id, reporter="puptoo") mq_create_or_update_host(host) host = minimal_host(display_name="yupana_test_host", insights_id=insights_id, reporter=reporter) mq_create_or_update_host(host) record = db_get_host_by_insights_id(insights_id) assert record.display_name == "test_host" assert record.reporter == reporter
def test_get_hosts_only_insights(mq_create_three_specific_hosts, mq_create_or_update_host, api_get): created_hosts_with_insights_id = mq_create_three_specific_hosts host_without_insights_id = minimal_host( subscription_manager_id=generate_uuid()) created_host_without_insights_id = mq_create_or_update_host( host_without_insights_id) url = build_hosts_url(query="?registered_with=insights") response_status, response_data = api_get(url) assert response_status == 200 assert len(response_data["results"]) == 3 result_ids = sorted([host["id"] for host in response_data["results"]]) expected_ids = sorted([host.id for host in created_hosts_with_insights_id]) non_expected_id = created_host_without_insights_id.id assert expected_ids == result_ids assert non_expected_id not in expected_ids
def test_add_host_with_sap_system(event_datetime_mock, mq_create_or_update_host): expected_insights_id = generate_uuid() timestamp_iso = event_datetime_mock.isoformat() system_profile = valid_system_profile() system_profile["sap_system"] = True host = minimal_host(insights_id=expected_insights_id, system_profile=system_profile) expected_results = { "host": {**host.data()}, "platform_metadata": {}, "timestamp": timestamp_iso, "type": "created", } host_keys_to_check = ["display_name", "insights_id", "account", "system_profile"] key, event, headers = mq_create_or_update_host(host, return_all_data=True) assert_mq_host_data(key, event, expected_results, host_keys_to_check)
def test_add_host_with_tag_dict(mq_create_or_update_host, db_get_host_by_insights_id): insights_id = generate_uuid() tags = { "namespace 1": {"key 1": ["value 1"], "key 2": [], "key 3": None}, "namespace 2": {"key 1": ["value 2", "", None]}, "namespace 3": None, "namespace 4": {}, "null": {"key 4": ["value 3"]}, "": {"key 4": ["value 4"]}, } host = minimal_host(insights_id=insights_id, tags=tags) mq_create_or_update_host(host) record = db_get_host_by_insights_id(insights_id) assert record.tags == { "namespace 1": {"key 1": ["value 1"], "key 2": [], "key 3": []}, "namespace 2": {"key 1": ["value 2"]}, "null": {"key 4": ["value 3", "value 4"]}, }
def test_add_host_simple(event_datetime_mock, mq_create_or_update_host): """ Tests adding a host with some simple data """ expected_insights_id = generate_uuid() timestamp_iso = event_datetime_mock.isoformat() host = minimal_host(insights_id=expected_insights_id) expected_results = { "host": {**host.data()}, "platform_metadata": {}, "timestamp": timestamp_iso, "type": "created", } host_keys_to_check = ["display_name", "insights_id", "account"] key, event, headers = mq_create_or_update_host(host, return_all_data=True) assert_mq_host_data(key, event, expected_results, host_keys_to_check)
def test_update_existing_host_display_name_changing_fqdn(db_create_host): old_fqdn = "host1.domain1.com" new_fqdn = "host2.domain2.com" insights_id = generate_uuid() existing_host = db_create_host( extra_data={"canonical_facts": {"fqdn": old_fqdn, "insights_id": insights_id}, "display_name": None} ) # Set the display_name to the old FQDN existing_host.display_name = old_fqdn db.session.commit() assert existing_host.display_name == old_fqdn # Update the host input_host = Host( {"fqdn": new_fqdn, "insights_id": insights_id}, display_name="", reporter="puptoo", stale_timestamp=now() ) existing_host.update(input_host) assert existing_host.display_name == new_fqdn
def test_create_then_delete_with_request_id(event_datetime_mock, event_producer_mock, db_create_host, api_delete_host): host = db_create_host(extra_data={ "system_profile_facts": { "owner_id": SYSTEM_IDENTITY["system"]["cn"] } }) request_id = generate_uuid() headers = {"x-rh-insights-request-id": request_id} response_status, response_data = api_delete_host(host.id, extra_headers=headers) assert_response_status(response_status, expected_status=200) assert_delete_event_is_valid(event_producer=event_producer_mock, host=host, timestamp=event_datetime_mock, expected_request_id=request_id)
def test_add_tags_to_host_by_dict(mq_create_or_update_host, db_get_host_by_insights_id, subtests): insights_id = generate_uuid() # Can't use parametrize here due to the data cleanup on each test run for message_tags, expected_tags in ( ({}, {}), ({ "namespace 1": { "key 1": ["value 1"] } }, { "namespace 1": { "key 1": ["value 1"] } }), ( { "namespace 2": { "key 1": ["value 2"] } }, { "namespace 1": { "key 1": ["value 1"] }, "namespace 2": { "key 1": ["value 2"] } }, ), ): with subtests.test(tags=message_tags): host = minimal_host(insights_id=insights_id, tags=message_tags) mq_create_or_update_host(host) record = db_get_host_by_insights_id(insights_id) assert expected_tags == record.tags
def test_query_variables_insights_id(mocker, query_source_xjoin, graphql_query_empty_response, api_get): insights_id = generate_uuid() url = build_hosts_url(query=f"?insights_id={quote(insights_id)}") response_status, response_data = api_get(url) assert response_status == 200 graphql_query_empty_response.assert_called_once_with( HOST_QUERY, { "order_by": mocker.ANY, "order_how": mocker.ANY, "limit": mocker.ANY, "offset": mocker.ANY, "filter": ({ "insights_id": { "eq": insights_id } }, mocker.ANY), }, )
def test_patch_produces_update_event_with_request_id(event_datetime_mock, event_producer_mock, db_create_host, db_get_host, api_patch): patch_doc = {"display_name": "patch_event_test"} request_id = generate_uuid() headers = {"x-rh-insights-request-id": request_id} host = db_host() created_host = db_create_host(host=host) url = build_hosts_url(host_list_or_id=created_host.id) response_status, response_data = api_patch(url, patch_doc, extra_headers=headers) assert_response_status(response_status, expected_status=200) assert_patch_event_is_valid( host=created_host, event_producer=event_producer_mock, expected_request_id=request_id, expected_timestamp=event_datetime_mock, )
def test_update_existing_host_fix_display_name_using_id(db_create_host): # Create an "existing" host insights_id = generate_uuid() existing_host = db_create_host( SYSTEM_IDENTITY, extra_data={ "canonical_facts": {"insights_id": insights_id}, "display_name": None, "system_profile_facts": {"owner_id": SYSTEM_IDENTITY["system"]["cn"]}, }, ) # Clear the display_name existing_host.display_name = None db.session.commit() assert existing_host.display_name is None # Update the host input_host = Host({"insights_id": insights_id}, display_name="", reporter="puptoo", stale_timestamp=now()) existing_host.update(input_host) assert existing_host.display_name == existing_host.id
def test_handle_message_unicode_not_damaged(mocker, flask_app, subtests): mocker.patch("app.queue.queue.build_event") add_host = mocker.patch("app.queue.queue.add_host", return_value=(mocker.MagicMock(), None, None, None)) operation_raw = "🧜🏿♂️" operation_escaped = json.dumps(operation_raw)[1:-1] messages = ( f'{{"operation": "", "data": {{"display_name": "{operation_raw}{operation_raw}"}}}}', f'{{"operation": "", "data": {{"display_name": "{operation_escaped}{operation_escaped}"}}}}', f'{{"operation": "", "data": {{"display_name": "{operation_raw}{operation_escaped}"}}}}', ) for message in messages: with subtests.test(message=message): host_id = generate_uuid() add_host.reset_mock() add_host.return_value = ({ "id": host_id }, host_id, None, AddHostResult.updated) handle_message(message, mocker.Mock()) add_host.assert_called_once_with( {"display_name": f"{operation_raw}{operation_raw}"})
def test_add_host_with_tag_list(mq_create_or_update_host, db_get_host_by_insights_id): insights_id = generate_uuid() tags = [ {"namespace": "namespace 1", "key": "key 1", "value": "value 1"}, {"namespace": "namespace 1", "key": "key 2", "value": None}, {"namespace": "namespace 2", "key": "key 1", "value": None}, {"namespace": "namespace 2", "key": "key 1", "value": ""}, {"namespace": "namespace 2", "key": "key 1", "value": "value 2"}, {"namespace": "", "key": "key 3", "value": "value 3"}, {"namespace": None, "key": "key 3", "value": "value 4"}, {"namespace": "null", "key": "key 3", "value": "value 5"}, ] host = minimal_host(insights_id=insights_id, tags=tags) mq_create_or_update_host(host) record = db_get_host_by_insights_id(insights_id) assert record.tags == { "namespace 1": {"key 1": ["value 1"], "key 2": []}, "namespace 2": {"key 1": ["value 2"]}, "null": {"key 3": ["value 3", "value 4", "value 5"]}, }
def test_create_host_with_ansible_host(api_create_or_update_host, api_get): # Create a host with ansible_host field host = minimal_host(ansible_host="ansible_host_" + generate_uuid()) multi_response_status, multi_response_data = api_create_or_update_host( [host]) assert_response_status(multi_response_status, 207) create_host_response = get_host_from_multi_response(multi_response_data) assert_host_was_created(create_host_response) created_host_id = create_host_response["host"]["id"] response_status, response_data = api_get(f"{HOST_URL}/{created_host_id}") assert_response_status(response_status, 200) host_response = get_host_from_response(response_data) assert_host_data(actual_host=host_response, expected_host=host, expected_id=created_host_id)
def test_delete_non_existent_host(api_delete_host): host_id = generate_uuid() response_status, response_data = api_delete_host(host_id) assert_response_status(response_status, expected_status=404)
def test_add_host_with_invalid_tags_2(tags, mq_create_or_update_host): insights_id = generate_uuid() host = minimal_host(insights_id=insights_id, tags=tags) with pytest.raises(ValidationException): mq_create_or_update_host(host)
db_get_host, api_patch): host = db_create_host() url = build_hosts_url(host_list_or_id=host.id) response_status, response_data = api_patch(url, patch_doc) assert_response_status(response_status, expected_status=200) record = db_get_host(host.id) for key in patch_doc: assert getattr(record, key) == patch_doc[key] @pytest.mark.parametrize("canonical_facts", [{ "insights_id": generate_uuid() }, { "insights_id": generate_uuid(), "fqdn": generate_uuid() }]) def test_checkin_canonical_facts(event_datetime_mock, event_producer_mock, db_create_host, db_get_host, api_post, canonical_facts): created_host = db_create_host( extra_data={"canonical_facts": canonical_facts}) post_doc = created_host.canonical_facts updated_time = created_host.modified_on response_status, response_data = api_post( build_host_checkin_url(),
( { "gt": "2019-12-16T10:10:06.754201+00:00" }, # fresh { "gt": "2019-12-02T10:10:06.754201+00:00", "lte": "2019-12-09T10:10:06.754201+00:00" }, # stale warning ), ) @pytest.mark.parametrize( "field,value", ( ("fqdn", generate_uuid()), ("display_name", "some display name"), ("hostname_or_id", "some hostname"), ("insights_id", generate_uuid()), ("tags", "some/tag"), ), ) def test_query_variables_staleness_with_search(field, value, mocker, culling_datetime_mock, query_source_xjoin, graphql_query_empty_response, api_get): url = build_hosts_url(query=f"?{field}={quote(value)}") response_status, response_data = api_get(url) assert response_status == 200
def test_update_fields(patch_doc, event_producer_mock, db_create_host, db_get_host, api_patch): host = db_create_host() url = build_hosts_url(host_list_or_id=host.id) response_status, response_data = api_patch(url, patch_doc) assert_response_status(response_status, expected_status=200) record = db_get_host(host.id) for key in patch_doc: assert getattr(record, key) == patch_doc[key] @pytest.mark.parametrize( "canonical_facts", [{"insights_id": generate_uuid()}, {"insights_id": generate_uuid(), "fqdn": generate_uuid()}] ) def test_checkin_canonical_facts( event_datetime_mock, event_producer_mock, db_create_host, db_get_host, api_post, canonical_facts ): created_host = db_create_host(extra_data={"canonical_facts": canonical_facts}) post_doc = created_host.canonical_facts updated_time = created_host.modified_on response_status, response_data = api_post( build_host_checkin_url(), post_doc, extra_headers={"x-rh-insights-request-id": "123456"} ) assert_response_status(response_status, expected_status=201) record = db_get_host(created_host.id)
def test_query_single_non_existent_host(api_get, subtests): url = build_hosts_url(host_list_or_id=generate_uuid()) api_query_test(api_get, subtests, url, [])
def test_delete_duplicates_multiple_scenarios( event_producer, db_create_host, db_create_multiple_hosts, db_get_host, inventory_config, script_function ): chunk_size = inventory_config.script_chunk_size # Customer scenario staleness_timestamps = get_staleness_timestamps() rhsm_id = generate_uuid() bios_uuid = generate_uuid() canonical_facts = { "insights_id": generate_uuid(), "subscription_manager_id": rhsm_id, "bios_uuid": bios_uuid, "satellite_id": rhsm_id, "fqdn": "rozrhjrad01.base.srvco.net", "ip_addresses": ["10.230.230.10", "10.230.230.13"], "mac_addresses": ["00:50:56:ac:56:45", "00:50:56:ac:48:61", "00:00:00:00:00:00"], } host_data = { "stale_timestamp": staleness_timestamps["stale_warning"], "reporter": "puptoo", "canonical_facts": canonical_facts, } customer_host1 = minimal_db_host(**host_data) customer_created_host1 = db_create_host(host=customer_host1).id host_data["canonical_facts"]["ip_addresses"] = ["10.230.230.3", "10.230.230.4"] customer_host2 = minimal_db_host(**host_data) customer_created_host2 = db_create_host(host=customer_host2).id host_data["canonical_facts"]["ip_addresses"] = ["10.230.230.1", "10.230.230.4"] host_data["stale_timestamp"] = staleness_timestamps["fresh"] customer_host3 = minimal_db_host(**host_data) customer_created_host3 = db_create_host(host=customer_host3).id assert db_get_host(customer_created_host1) assert db_get_host(customer_created_host2) assert db_get_host(customer_created_host3) # Matching elevated ID def _gen_canonical_facts(): return { "insights_id": generate_uuid(), "subscription_manager_id": generate_uuid(), "bios_uuid": generate_uuid(), "satellite_id": generate_uuid(), "fqdn": generate_random_string(), } elevated_matching_host_count = 10 elevated_id = generate_uuid() elevated_matching_created_hosts = [] # Hosts with the same amount of canonical facts for _ in range(elevated_matching_host_count): canonical_facts = _gen_canonical_facts() canonical_facts["insights_id"] = elevated_id host = minimal_db_host(canonical_facts=canonical_facts) elevated_matching_created_hosts.append(db_create_host(host=host).id) # Hosts with less canonical facts for _ in range(elevated_matching_host_count): canonical_facts = {"insights_id": elevated_id} host = minimal_db_host(canonical_facts=canonical_facts) elevated_matching_created_hosts.append(db_create_host(host=host).id) # Create a lot of hosts to test that the script deletes duplicates in multiple chunks db_create_multiple_hosts(how_many=chunk_size) # Hosts with more canonical facts for _ in range(elevated_matching_host_count): canonical_facts = _gen_canonical_facts() canonical_facts["insights_id"] = elevated_id canonical_facts["ip_addresses"] = [f"10.0.0.{randint(1, 255)}"] host = minimal_db_host(canonical_facts=canonical_facts) elevated_matching_created_hosts.append(db_create_host(host=host).id) for host in elevated_matching_created_hosts: assert db_get_host(host) # Elevated IDs not matching elevated_not_matching_canonical_facts = _gen_canonical_facts() elevated_not_matching_host_count = 10 elevated_not_matching_created_hosts = [] # Hosts with the same amount of canonical facts for _ in range(elevated_not_matching_host_count): elevated_not_matching_canonical_facts["insights_id"] = generate_uuid() host = minimal_db_host(canonical_facts=elevated_not_matching_canonical_facts) elevated_not_matching_created_hosts.append(db_create_host(host=host).id) # Hosts with less canonical facts for _ in range(elevated_not_matching_host_count): facts = {"insights_id": generate_uuid()} host = minimal_db_host(canonical_facts=facts) elevated_not_matching_created_hosts.append(db_create_host(host=host).id) # Hosts with more canonical facts for _ in range(elevated_not_matching_host_count): elevated_not_matching_canonical_facts["insights_id"] = generate_uuid() elevated_not_matching_canonical_facts["ip_addresses"] = ["10.0.0.10"] host = minimal_db_host(canonical_facts=elevated_not_matching_canonical_facts) elevated_not_matching_created_hosts.append(db_create_host(host=host).id) for host in elevated_not_matching_created_hosts: assert db_get_host(host) # Without elevated IDs - canonical facts matching without_elevated_matching_canonical_facts = { "bios_uuid": generate_uuid(), "satellite_id": generate_uuid(), "fqdn": generate_random_string(), "ip_addresses": ["10.0.0.1"], "mac_addresses": ["aa:bb:cc:dd:ee:ff"], } without_elevated_matching_host_count = 10 without_elevated_matching_created_hosts = [] # Hosts with less canonical facts for fact in without_elevated_matching_canonical_facts: facts = {fact: without_elevated_matching_canonical_facts[fact]} host = minimal_db_host(canonical_facts=facts) without_elevated_matching_created_hosts.append(db_create_host(host=host).id) # Create a lot of hosts to test that the script deletes duplicates in multiple chunks db_create_multiple_hosts(how_many=chunk_size) # Hosts with the same amount of canonical facts for _ in range(without_elevated_matching_host_count): host = minimal_db_host(canonical_facts=without_elevated_matching_canonical_facts) without_elevated_matching_created_hosts.append(db_create_host(host=host).id) for host in without_elevated_matching_created_hosts: assert db_get_host(host) # Without elevated IDs - canonical facts not matching without_elevated_not_matching_canonical_facts = { "bios_uuid": generate_uuid(), "satellite_id": generate_uuid(), "fqdn": generate_random_string(), "ip_addresses": ["0.0.0.0"], "mac_addresses": ["aa:bb:cc:dd:ee:ff"], } without_elevated_not_matching_host_count = 10 without_elevated_not_matching_created_hosts = [] # Hosts with the same amount of canonical facts for _ in range(without_elevated_not_matching_host_count): facts = deepcopy(without_elevated_not_matching_canonical_facts) facts["fqdn"] = generate_random_string() host = minimal_db_host(canonical_facts=facts) without_elevated_not_matching_created_hosts.append(db_create_host(host=host).id) # Hosts with less canonical facts for _ in range(without_elevated_not_matching_host_count): facts = {"fqdn": generate_random_string()} host = minimal_db_host(canonical_facts=facts) without_elevated_not_matching_created_hosts.append(db_create_host(host=host).id) # Hosts with more canonical facts for fact in ELEVATED_IDS: facts = deepcopy(without_elevated_not_matching_canonical_facts) facts["fqdn"] = generate_random_string() facts[fact] = generate_uuid() if fact == "provider_id": facts["provider_type"] = "aws" host = minimal_db_host(canonical_facts=facts) without_elevated_not_matching_created_hosts.append(db_create_host(host=host).id) for host in without_elevated_not_matching_created_hosts: assert db_get_host(host) if script_function == "run": Session = _init_db(inventory_config) sessions = [Session() for _ in range(3)] with multi_session_guard(sessions): deleted_hosts_count = host_delete_duplicates_run( inventory_config, mock.Mock(), *sessions, event_producer, shutdown_handler=mock.Mock(**{"shut_down.return_value": False}), ) assert deleted_hosts_count == elevated_matching_host_count * 3 + without_elevated_matching_host_count + len( without_elevated_matching_canonical_facts ) else: host_delete_duplicates_main(mock.Mock()) assert not db_get_host(customer_created_host1) assert not db_get_host(customer_created_host2) assert db_get_host(customer_created_host3) for i in range(len(elevated_matching_created_hosts) - 1): assert not db_get_host(elevated_matching_created_hosts[i]) assert db_get_host(elevated_matching_created_hosts[-1]) for host in elevated_not_matching_created_hosts: assert db_get_host(host) for i in range(len(without_elevated_matching_created_hosts) - 1): assert not db_get_host(without_elevated_matching_created_hosts[i]) assert db_get_host(without_elevated_matching_created_hosts[-1]) for host in without_elevated_not_matching_created_hosts: assert db_get_host(host)
def test_delete_dupe_more_hosts_than_chunk_size( event_producer_mock, db_get_host, db_create_multiple_hosts, db_create_host, inventory_config ): canonical_facts_1 = { "provider_id": generate_uuid(), "insights_id": generate_uuid(), "subscription_manager_id": generate_uuid(), } canonical_facts_2 = { "provider_id": generate_uuid(), "insights_id": generate_uuid(), "subscription_manager_id": generate_uuid(), } chunk_size = inventory_config.script_chunk_size num_hosts = chunk_size * 3 + 15 # create host before big chunk. Hosts are ordered by modified date so creation # order is important old_host_1 = minimal_db_host(canonical_facts=canonical_facts_1) new_host_1 = minimal_db_host(canonical_facts=canonical_facts_1) created_old_host_1 = db_create_host(host=old_host_1) created_new_host_1 = db_create_host(host=new_host_1) # create big chunk of hosts db_create_multiple_hosts(how_many=num_hosts) # create another host after old_host_2 = minimal_db_host(canonical_facts=canonical_facts_2) new_host_2 = minimal_db_host(canonical_facts=canonical_facts_2) created_old_host_2 = db_create_host(host=old_host_2) created_new_host_2 = db_create_host(host=new_host_2) assert created_old_host_1.id != created_new_host_1.id assert created_old_host_2.id != created_new_host_2.id threadctx.request_id = UNKNOWN_REQUEST_ID_VALUE Session = _init_db(inventory_config) accounts_session = Session() hosts_session = Session() misc_session = Session() with multi_session_guard([accounts_session, hosts_session, misc_session]): num_deleted = host_delete_duplicates_run( inventory_config, mock.Mock(), accounts_session, hosts_session, misc_session, event_producer_mock, shutdown_handler=mock.Mock(**{"shut_down.return_value": False}), ) assert num_deleted == 2 assert db_get_host(created_new_host_1.id) assert not db_get_host(created_old_host_1.id) assert db_get_host(created_new_host_2.id) assert not db_get_host(created_old_host_2.id)
def test_delete_duplicates_elevated_ids_matching( event_producer, db_create_host, db_get_host, inventory_config, tested_id ): def _gen_canonical_facts(): facts = { "provider_id": generate_uuid(), "insights_id": generate_uuid(), "subscription_manager_id": generate_uuid(), "bios_uuid": generate_uuid(), "satellite_id": generate_uuid(), "fqdn": generate_random_string(), } if tested_id == "provider_id": facts["provider_type"] = "aws" if tested_id in ("insights_id", "subscription_manager_id"): facts.pop("provider_id", None) if tested_id == "subscription_manager_id": facts.pop("insights_id", None) return facts host_count = 10 elevated_id = generate_uuid() created_hosts = [] # Hosts with the same amount of canonical facts for _ in range(host_count): canonical_facts = _gen_canonical_facts() canonical_facts[tested_id] = elevated_id host = minimal_db_host(canonical_facts=canonical_facts) created_hosts.append(db_create_host(host=host)) # Hosts with less canonical facts for _ in range(host_count): canonical_facts = {tested_id: elevated_id} host = minimal_db_host(canonical_facts=canonical_facts) created_hosts.append(db_create_host(host=host)) # Hosts with more canonical facts for _ in range(host_count): canonical_facts = _gen_canonical_facts() canonical_facts[tested_id] = elevated_id canonical_facts["ip_addresses"] = [f"10.0.0.{randint(1, 255)}"] host = minimal_db_host(canonical_facts=canonical_facts) created_hosts.append(db_create_host(host=host)) for host in created_hosts: assert db_get_host(host.id) Session = _init_db(inventory_config) sessions = [Session() for _ in range(3)] with multi_session_guard(sessions): deleted_hosts_count = host_delete_duplicates_run( inventory_config, mock.Mock(), *sessions, event_producer, shutdown_handler=mock.Mock(**{"shut_down.return_value": False}), ) assert deleted_hosts_count == host_count * 3 - 1 for i in range(len(created_hosts) - 1): assert not db_get_host(created_hosts[i].id) assert db_get_host(created_hosts[-1].id)
def test_delete_duplicates_without_elevated_not_matching( event_producer, db_create_host, db_get_host, inventory_config, tested_fact ): def _generate_fact(fact_name): if fact_name == "fqdn": return generate_random_string() if fact_name == "ip_addresses": return [f"{randint(1, 255)}.{randint(0, 255)}.{randint(0, 255)}.{randint(1, 255)}"] if fact_name == "mac_addresses": hex_chars = "0123456789abcdef" addr = ":".join([f"{choice(hex_chars)}{choice(hex_chars)}" for _ in range(6)]) return [addr] return generate_uuid() canonical_facts = { "bios_uuid": generate_uuid(), "satellite_id": generate_uuid(), "fqdn": generate_random_string(), "ip_addresses": ["0.0.0.0"], "mac_addresses": ["aa:bb:cc:dd:ee:ff"], } host_count = 10 created_hosts = [] # Hosts with the same amount of canonical facts for _ in range(host_count): facts = deepcopy(canonical_facts) facts[tested_fact] = _generate_fact(tested_fact) host = minimal_db_host(canonical_facts=facts) created_hosts.append(db_create_host(host=host)) # Hosts with less canonical facts for _ in range(host_count): facts = {tested_fact: _generate_fact(tested_fact)} host = minimal_db_host(canonical_facts=facts) created_hosts.append(db_create_host(host=host)) # Hosts with more canonical facts for fact in ELEVATED_IDS: facts = deepcopy(canonical_facts) facts[tested_fact] = _generate_fact(tested_fact) facts[fact] = generate_uuid() if fact == "provider_id": facts["provider_type"] = "aws" host = minimal_db_host(canonical_facts=facts) created_hosts.append(db_create_host(host=host)) for host in created_hosts: assert db_get_host(host.id) Session = _init_db(inventory_config) sessions = [Session() for _ in range(3)] with multi_session_guard(sessions): deleted_hosts_count = host_delete_duplicates_run( inventory_config, mock.Mock(), *sessions, event_producer, shutdown_handler=mock.Mock(**{"shut_down.return_value": False}), ) assert deleted_hosts_count == 0 for host in created_hosts: assert db_get_host(host.id)
def test_delete_duplicates_customer_scenario_1(event_producer, db_create_host, db_get_host, inventory_config): staleness_timestamps = get_staleness_timestamps() rhsm_id = generate_uuid() bios_uuid = generate_uuid() canonical_facts = { "insights_id": generate_uuid(), "subscription_manager_id": rhsm_id, "bios_uuid": bios_uuid, "satellite_id": rhsm_id, "fqdn": "rn001018", "ip_addresses": ["10.230.230.3"], "mac_addresses": ["00:50:56:ab:5a:22", "00:00:00:00:00:00"], } host_data = { "stale_timestamp": staleness_timestamps["stale_warning"], "reporter": "puptoo", "canonical_facts": canonical_facts, } host1 = minimal_db_host(**host_data) created_host1 = db_create_host(host=host1) host_data["canonical_facts"]["ip_addresses"] = ["10.230.230.30"] host_data["canonical_facts"].pop("bios_uuid") host_data["stale_timestamp"] = staleness_timestamps["stale"] host2 = minimal_db_host(**host_data) created_host2 = db_create_host(host=host2) host_data["canonical_facts"]["ip_addresses"] = ["10.230.230.3"] host3 = minimal_db_host(**host_data) created_host3 = db_create_host(host=host3) host_data["reporter"] = "yupana" host_data["canonical_facts"]["ip_addresses"] = ["10.230.230.1"] host_data["canonical_facts"]["mac_addresses"] = ["00:50:56:ab:5a:22"] host_data["canonical_facts"]["bios_uuid"] = bios_uuid host_data["canonical_facts"]["fqdn"] = "rn001018.bcbst.com" host_data["stale_timestamp"] = staleness_timestamps["fresh"] host4 = minimal_db_host(**host_data) created_host4 = db_create_host(host=host4) host_data["reporter"] = "puptoo" host_data["canonical_facts"]["ip_addresses"] = ["10.230.230.15"] host_data["canonical_facts"]["mac_addresses"] = ["00:50:56:ab:5a:22", "00:00:00:00:00:00"] host_data["canonical_facts"].pop("bios_uuid") host_data["canonical_facts"]["fqdn"] = "rn001018" host5 = minimal_db_host(**host_data) created_host5 = db_create_host(host=host5) assert db_get_host(created_host1.id) assert db_get_host(created_host2.id) assert db_get_host(created_host3.id) assert db_get_host(created_host4.id) assert db_get_host(created_host5.id) Session = _init_db(inventory_config) sessions = [Session() for _ in range(3)] with multi_session_guard(sessions): deleted_hosts_count = host_delete_duplicates_run( inventory_config, mock.Mock(), *sessions, event_producer, shutdown_handler=mock.Mock(**{"shut_down.return_value": False}), ) assert deleted_hosts_count == 4 assert not db_get_host(created_host1.id) assert not db_get_host(created_host2.id) assert not db_get_host(created_host3.id) assert not db_get_host(created_host4.id) assert db_get_host(created_host5.id)
url = build_hosts_url(host_list_or_id=host.id) response_status, response_data = api_patch(url, patch_doc) assert_response_status(response_status, expected_status=200) record = db_get_host(host.id) for key in patch_doc: assert getattr(record, key) == patch_doc[key] @pytest.mark.parametrize( "canonical_facts", [ { "insights_id": generate_uuid() }, { "insights_id": generate_uuid(), "rhel_machine_id": generate_uuid() }, { "insights_id": generate_uuid(), "rhel_machine_id": generate_uuid(), "fqdn": generate_uuid() }, ], ) def test_checkin_canonical_facts(event_datetime_mock, event_producer_mock, db_create_host, db_get_host, api_post, canonical_facts):
def test_replace_tags_of_host_by_list(mq_create_or_update_host, db_get_host_by_insights_id, subtests): insights_id = generate_uuid() # Can't use parametrize here due to the data cleanup on each test run for message_tags, expected_tags in ( ([], {}), ( [ { "namespace": "namespace 1", "key": "key 1", "value": "value 1" }, { "namespace": "namespace 2", "key": "key 2", "value": "value 2" }, { "namespace": "null", "key": "key 3", "value": "value 3" }, ], { "namespace 1": { "key 1": ["value 1"] }, "namespace 2": { "key 2": ["value 2"] }, "null": { "key 3": ["value 3"] }, }, ), ( [{ "namespace": "namespace 1", "key": "key 4", "value": "value 4" }], { "namespace 1": { "key 4": ["value 4"] }, "namespace 2": { "key 2": ["value 2"] }, "null": { "key 3": ["value 3"] }, }, ), ( [{ "key": "key 5", "value": "value 5" }], { "namespace 1": { "key 4": ["value 4"] }, "namespace 2": { "key 2": ["value 2"] }, "null": { "key 5": ["value 5"] }, }, ), ( [{ "namespace": None, "key": "key 6", "value": "value 6" }], { "namespace 1": { "key 4": ["value 4"] }, "namespace 2": { "key 2": ["value 2"] }, "null": { "key 6": ["value 6"] }, }, ), ( [{ "namespace": "", "key": "key 7", "value": "value 7" }], { "namespace 1": { "key 4": ["value 4"] }, "namespace 2": { "key 2": ["value 2"] }, "null": { "key 7": ["value 7"] }, }, ), ): with subtests.test(tags=message_tags): host = minimal_host(insights_id=insights_id, tags=message_tags) mq_create_or_update_host(host) record = db_get_host_by_insights_id(insights_id) assert expected_tags == record.tags
def test_add_host_with_tags(event_datetime_mock, mq_create_or_update_host): """ Tests adding a host with message containing tags """ expected_insights_id = generate_uuid() expected_tags = [ { "namespace": "NS1", "key": "key3", "value": "val3" }, { "namespace": "NS3", "key": "key2", "value": "val2" }, { "namespace": "Sat", "key": "prod", "value": None }, { "namespace": "Sat", "key": "dev", "value": "" }, { "namespace": "Sat", "key": "test" }, { "namespace": None, "key": "key", "value": "val1" }, { "namespace": "", "key": "key", "value": "val4" }, { "namespace": "null", "key": "key", "value": "val5" }, { "namespace": None, "key": "only_key", "value": None }, { "key": "just_key" }, { "namespace": " \t\n\r\f\v", "key": " \t\n\r\f\v", "value": " \t\n\r\f\v" }, ] timestamp_iso = event_datetime_mock.isoformat() host = minimal_host(insights_id=expected_insights_id, tags=expected_tags) expected_results = { "host": { **host.data() }, "platform_metadata": {}, "timestamp": timestamp_iso, "type": "created", } host_keys_to_check = ["display_name", "insights_id", "account"] key, event, headers = mq_create_or_update_host(host, return_all_data=True) assert_mq_host_data(key, event, expected_results, host_keys_to_check) assert len(event["host"]["tags"]) == len(expected_tags)
def test_add_host_empty_keys_facts(facts, mq_create_or_update_host): insights_id = generate_uuid() host = minimal_host(insights_id=insights_id, facts=facts) with pytest.raises(ValidationException): mq_create_or_update_host(host)