def test_fetch_incidents_handles_incorrect_severity(): """Tests the fetch-incidents function incorrect severity error. """ from SOCRadarIncidents import Client, fetch_incidents mock_socradar_company_id = "0" mock_socradar_api_key = "APIKey" client = Client(base_url=SOCRADAR_API_ENDPOINT, api_key=mock_socradar_api_key, socradar_company_id=mock_socradar_company_id, verify=False, proxy=False) last_run = { 'last_fetch': 1594512000 # Jul 12, 2020 } mock_first_fetch_time = arg_to_datetime(arg='30 days', arg_name='First fetch time') incorrect_severity_levels = ['Incorrect', 'Severity', 'Levels'] with pytest.raises(ValueError): fetch_incidents(client=client, max_results=2, last_run=last_run, first_fetch_time=mock_first_fetch_time, resolution_status='all', fp_status='all', severity=incorrect_severity_levels, incident_main_type=None, incident_sub_type=None)
def test_fetch_incidents(requests_mock): """Tests the fetch-incidents function. Configures requests_mock instance to generate the appropriate SOCRadar Incidents API response, loaded from a local JSON file. Checks the output of the command function with the expected output. """ from SOCRadarIncidents import Client, fetch_incidents mock_socradar_company_id = "0" mock_socradar_api_key = "APIKey" mock_response = util_load_json('test_data/fetch_incidents_response.json') suffix = f'company/{mock_socradar_company_id}/incidents/v2?key={mock_socradar_api_key}' \ f'&severity=Medium%2CHigh' \ f'&limit=2' \ f'&start_date=1594512000' requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{suffix}', json=mock_response) client = Client(base_url=SOCRADAR_API_ENDPOINT, api_key=mock_socradar_api_key, socradar_company_id=mock_socradar_company_id, verify=False, proxy=False) last_run = { 'last_fetch': 1594512000 # Jul 12, 2020 } mock_first_fetch_time = arg_to_datetime(arg='30 days', arg_name='First fetch time') _, new_incidents = fetch_incidents(client=client, max_results=2, last_run=last_run, first_fetch_time=mock_first_fetch_time, resolution_status='all', fp_status='all', severity=['Medium', 'High'], incident_main_type=None, incident_sub_type=None) expected_output = util_load_json( 'test_data/fetch_incidents_expected_output.json') assert new_incidents == expected_output assert len(new_incidents) <= 2
def test_prepare_args_for_compromised_credentials_when_valid_args_are_provided(self): """ Test case scenario when the arguments provided are valid. """ from Flashpoint import prepare_args_for_fetch_compromised_credentials end_date = arg_to_datetime('now') end_date = datetime.datetime.timestamp(end_date) expected_args = { 'limit': 15, 'query': '+basetypes:(credential-sighting) +header_.indexed_at: [1626609765' ' TO {}] +is_fresh:true'.format(int(end_date)), 'skip': 0, 'sort': 'header_.indexed_at:asc' } args = prepare_args_for_fetch_compromised_credentials(max_fetch=15, start_time=START_DATE, is_fresh=True, last_run={}) assert args == expected_args