def test_download_link_fetching(mocker, regions_list, services_list): with open('./test_data/response_mock.txt') as f: response = f.read() client = Client(regions_list, services_list) mocker.patch('CommonServerPython.BaseClient._http_request', return_value=response) assert client.get_azure_download_link() == 'https://download.microsoft.com/download' \ '/7/1/D/71D86715-5596-4529-9B13-DA13A5DE5B63/' \ 'ServiceTags_Public_20200504.json'
def test_filter_duplicate_addresses(list_to_filter, expected_result): """ Given: - A list of objects, where some of the objects has the same value. (The 4 cases are just different permutations and ordering). When: - Removing duplicate objects from the given list. Then: - Ensure the resulted list contains the object with the maximal number of keys for each value. """ client = Client([], []) assert expected_result == client.filter_and_aggregate_values( list_to_filter)
def test_extract_metadata(regions_list, services_list, values_group_section, expected_result): client = Client(regions_list, services_list) assert (client.extract_metadata_of_indicators_group(values_group_section) == expected_result)
def test_build_ip(regions_list, services_list, ip, region, service, expected_result): client = Client(regions_list, services_list) assert (client.build_ip_indicator(ip, region=region, service=service) == expected_result)
def test_extract_indicators(regions_list, services_list, values_group_section, expected_result): client = Client(regions_list, services_list) assert (client.extract_indicators_from_values_dict(values_group_section) == expected_result)
def test_download_link_fetching(regions_list, services_list): client = Client(regions_list, services_list) assert client.get_azure_download_link()