def test_tac_api(per_test_flask_app, per_test_postgres, logger, mocked_statsd, tmpdir, request, mocked_config, api_version): """Test TAC API call works with the security role created based on abstract role.""" dsn = per_test_postgres.dsn() dsn['user'] = '******' db_config = DBConfig(ignore_env=True, **dsn) with create_db_connection(db_config) as conn, create_db_connection( db_config, autocommit=True) as metadata_conn: with get_importer( GSMADataImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='sample_gsma_import_list_anonymized.txt')) as imp: imp.import_data() current_user = request.node.callspec.params['per_test_flask_app'] if current_user in ['dirbs_poweruser_login', 'dirbs_api_user']: rv = per_test_flask_app.get( url_for('{0}.tac_api'.format(api_version), tac='01234404')) assert rv.status_code == 200 results = json.loads(rv.data.decode('utf-8')) assert results['gsma'] is not None else: with pytest.raises(DatabaseRoleCheckException): per_test_flask_app.get( url_for('{0}.tac_api'.format(api_version), tac='01234404'))
def test_same_import_twice(gsma_tac_db_importer, mocked_config, logger, mocked_statsd, db_conn, metadata_db_conn, tmpdir): """Test Depot not known yet. Verify that if we import twice the same file, same entries are ignored and not added to the historic table. """ expect_success(gsma_tac_db_importer, 3, db_conn, logger) with db_conn.cursor() as cursor: cursor.execute('SELECT * FROM historic_gsma_data') first_import = cursor.rowcount with get_importer(GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams(filename='gsma_dump_small_july_2016.txt', extract=False)) as imp: expect_success(imp, 3, db_conn, logger) with db_conn.cursor() as cursor: cursor.execute('SELECT * FROM historic_gsma_data') second_import = cursor.rowcount assert first_import == second_import == 3
def test_imei_api_pairing_list(per_test_flask_app, per_test_postgres, logger, mocked_statsd, tmpdir, request, mocked_config, api_version): """Test IMEI API call after pairing list import.""" dsn = per_test_postgres.dsn() db_config = DBConfig(ignore_env=True, **dsn) with create_db_connection(db_config) as conn, \ create_db_connection(db_config, autocommit=True) as metadata_conn: with get_importer( GSMADataImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='gsma_dump_small_july_2016.txt')) as imp: imp.import_data() with get_importer( PairingListImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, PairListParams(content='imei,imsi\n' '811111013136460,111018001111111\n' '359000000000000,111015113222222\n' '357756065985824,111015113333333')) as imp: imp.import_data() if api_version == 'v1': rv = per_test_flask_app.get( url_for('{0}.imei_api'.format(api_version), imei='21260934000003')) assert rv.status_code == 200 else: # api version 2.0 rv = per_test_flask_app.get( url_for('{0}.imei_get_pairings_api'.format(api_version), imei='21260934000003')) assert rv.status_code == 200
def test_historic_failure_metrics(mocked_statsd, mocked_config, logger, gsma_tac_db_importer, tmpdir, db_conn, metadata_db_conn): """Test Depot ID TBD. Verify that StatsD is sent stats when a threshold validation failure happens during import. """ gsma_tac_db_importer.import_data() # Try a small import with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='gsma_dump_small_july_2016.txt', import_size_variation_percent=mocked_config. gsma_threshold_config.import_size_variation_percent, import_size_variation_absolute=mocked_config. gsma_threshold_config.import_size_variation_absolute, extract=False)) as gsma_small_importer: expect_failure(gsma_small_importer, exc_message='Failed import size historic check') # Expected call is statsd.gauge(key, 1, delta=True) mocked_statsd.gauge.assert_any_call( 'dirbs.import.gsma_tac.validation_failures.historic_import_size', 1, delta=True)
def test_imei_api_registration_list(per_test_flask_app, per_test_postgres, logger, mocked_statsd, tmpdir, request, mocked_config, api_version): """Test IMEI API call after registration list import.""" dsn = per_test_postgres.dsn() db_config = DBConfig(ignore_env=True, **dsn) with create_db_connection(db_config) as conn, \ create_db_connection(db_config, autocommit=True) as metadata_conn: with get_importer( GSMADataImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='gsma_dump_small_july_2016.txt')) as imp: imp.import_data() with get_importer( RegistrationListImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, RegistrationListParams( content= 'APPROVED_IMEI,make,model,status,model_number,brand_name,' 'device_type,radio_interface,device_id\n' '21260934000003,,,,,,,,1')) as imp: imp.import_data() if api_version == 'v1': rv = per_test_flask_app.get( url_for('{0}.imei_api'.format(api_version), imei='21260934000003')) assert rv.status_code == 200 else: # api version 2.0 rv = per_test_flask_app.get( url_for('{0}.imei_get_api'.format(api_version), imei='21260934000003')) assert rv.status_code == 200
def test_classify(per_test_postgres, db_user, tmpdir, logger, mocked_statsd, monkeypatch, mocked_config): """Test classify works with the security role created based on abstract role.""" dsn = per_test_postgres.dsn() db_config = DBConfig(ignore_env=True, **dsn) with create_db_connection(db_config) as conn, create_db_connection(db_config, autocommit=True) as metadata_conn: with get_importer(OperatorDataImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, OperatorDataParams( content='date,imei,imsi,msisdn\n' '20110101,8888#888622222,123456789012345,123456789012345\n' '20110101,88888888622222,123456789012345,123456789012345\n' '20110101,8888888862222209,123456789012345,123456789012345\n' '20110101,88888862222209**,123456789012345,123456789012345', extract=False, perform_unclean_checks=False, perform_region_checks=False, perform_home_network_check=False, operator='operator1')) as imp: imp.import_data() with get_importer(GSMADataImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, GSMADataParams(filename='gsma_not_found_anonymized.txt')) as imp: imp.import_data() with get_importer(RegistrationListImporter, conn, metadata_conn, db_config, tmpdir, logger, mocked_statsd, RegistrationListParams(filename='sample_registration_list.csv')) as imp: imp.import_data() # Run dirbs-classify using db args from the temp postgres instance runner = CliRunner() monkeypatch.setattr(mocked_config.db_config, 'user', db_user) result = runner.invoke(dirbs_classify_cli, ['--no-safety-check'], obj={'APP_CONFIG': mocked_config}) if db_user in ['dirbs_poweruser_login', 'dirbs_classify_user']: assert result.exit_code == 0 else: assert result.exit_code != 0
def test_repeat_data_upload(gsma_tac_db_importer, mocked_config, logger, mocked_statsd, db_conn, metadata_db_conn, tmpdir): """Test Depot ID 96579/10. Verify that valid GSMA Data can be successfully imported into the database when repeating the import of the same file. """ expect_success(gsma_tac_db_importer, 3, db_conn, logger) with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams(filename='gsma_dump_small_july_2016.txt')) as imp: expect_success(imp, 3, db_conn, logger)
def test_validation_check_override(gsma_tac_db_importer, mocked_config, logger, mocked_statsd, db_conn, metadata_db_conn, tmpdir): """Test Depot ID 96586/17. Verify that the user can override historical checks when importing GSMA Data files. """ expect_success(gsma_tac_db_importer, 24727, db_conn, logger) with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams(filename='gsma_dump_small_july_2016.txt', perform_historic_check=False, extract=False)) as imp: expect_success(imp, 3, db_conn, logger)
def test_extract(db_conn, metadata_db_conn, mocked_config, tmpdir, logger, mocked_statsd): """Test Depot not available because it is not available on the command-line . Verify that a zipped txt file can be imported. """ fn = 'gsma_simple_extraction_anonymized.txt' abs_fn = path.join( path.abspath(path.dirname(__file__) + '/unittest_data/gsma'), fn) zip_name = path.join(str(tmpdir), path.split(fn)[1][:-3] + 'zip') with zipfile.ZipFile(zip_name, mode='w') as zf: zf.write(abs_fn, arcname=path.split(fn)[1]) with get_importer(GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams(filename=zip_name, extract=True)) as imp: imp.import_data()
def test_historical_check_passes(gsma_tac_db_importer, mocked_config, logger, mocked_statsd, db_conn, metadata_db_conn, tmpdir): """Test Depot ID 96583/14. Verify that data is successfully imported if the historical check passes. """ expect_success(gsma_tac_db_importer, 3, db_conn, logger) with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='gsma_dump_large_july_2016.txt', import_size_variation_percent=mocked_config. gsma_threshold_config.import_size_variation_percent, import_size_variation_absolute=mocked_config. gsma_threshold_config.import_size_variation_absolute, extract=False)) as imp: expect_success(imp, 24727, db_conn, logger)
def test_historical_check_fails(gsma_tac_db_importer, mocked_config, logger, mocked_statsd, db_conn, metadata_db_conn, tmpdir): """Test Depot ID 96582/13. Verify that data is not imported if the historical check fails. """ expect_success(gsma_tac_db_importer, 24727, db_conn, logger) # Try a small import with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='gsma_dump_small_july_2016.txt', import_size_variation_percent=mocked_config. gsma_threshold_config.import_size_variation_percent, import_size_variation_absolute=mocked_config. gsma_threshold_config.import_size_variation_absolute, extract=False)) as gsma_small_importer: expect_failure(gsma_small_importer, exc_message='Failed import size historic check')
def check_in_registration_list_helper(imei_list, expect_to_find_in_reg, api_version, flask_app): """Helper function to make a request and check in_registration_list value in the response.""" for i in imei_list: rv = flask_app.get(url_for('{0}.imei_api'.format(api_version), imei=i)) assert rv.status_code == 200 data = json.loads(rv.data.decode('utf-8')) assert data['realtime_checks']['in_registration_list'] is expect_to_find_in_reg @pytest.mark.parametrize('registration_list_importer', [RegistrationListParams(filename='sample_registration_list.csv')], indirect=True) @pytest.mark.parametrize('gsma_tac_db_importer', [GSMADataParams(filename='sample_gsma_import_list_anonymized.txt', extract=False)], indirect=True) def test_check_in_registration_list(flask_app, registration_list_importer, gsma_tac_db_importer, api_version, monkeypatch, mocked_config): """Test Depot not known yet. Verify that IMEI API response contains a Real-time check for IMEI in registration list. """ # APPROVED_IMEI # 10000000000000 # 10000000000001 # 10000000000002 .... # Verify that 10000000000000 (14 digits) in reg_list # Verify that 1000000000000200 (16 digits) in reg_list imei_list = ['10000000000000', '10000000000001', '1000000000000200'] check_in_registration_list_helper(imei_list, False, api_version, flask_app)
with db_conn, db_conn.cursor() as cursor: cursor.execute('SELECT tac FROM gsma_data ORDER BY tac') result_list = [res.tac for res in cursor] assert result.exit_code == 0 assert result_list == ['01234401', '01234402', '01234403', '01234404', '01234405', '01234406', '01234407'] assert len(result_list) == 7 # Test Part 1) assert 'Rows in table prior to import: 2' in logger_stream_contents(logger) # Test Part 2) - self.staging_row_count assert 'Rows supplied in full input file: 8' in logger_stream_contents(logger) # Test Part 2) - import_table_new_row_count=rows_before + rows_inserted - rows_deleted assert 'Rows in table after import: 7' in logger_stream_contents(logger) @pytest.mark.parametrize('gsma_tac_db_importer', [GSMADataParams(filename='gsma_dump_white_spaces.txt')], indirect=True) def test_preprocess_trim(gsma_tac_db_importer, logger, db_conn): """Test Depot ID not known yet. Verify that a white space at the start or the end of a field can be handled and imported successfully. The expected behaviour is that the white space is stripped out. """ expect_success(gsma_tac_db_importer, 2, db_conn, logger) with db_conn, db_conn.cursor() as cursor: cursor.execute('SELECT tac, manufacturer, bands, model_name FROM gsma_data ORDER BY tac') result = [(x.tac, x.manufacturer, x.bands, x.model_name) for x in cursor.fetchall()] assert result == [('21782434', None, 'a0a0db6e9eccb4a8c3a85452b79db6c793398d6a', '927824c30540c400f59b6c02aeb0a30d5033eb1a'), ('38245933', '326d9e7920b30b698f189a83d2be6f4384496ebc', '6cc923523f 690fe51b51efc747451bfbbe1994d9',
# Expected call is statsd.gauge(key, 1, delta=True) mocked_statsd.gauge.assert_any_call( 'dirbs.import.operator.test_operator.validation_failures.prevalidation', 1, delta=True) @pytest.mark.parametrize('operator_data_importer', [ OperatorDataParams( filename='operator1_imeizerocheck1_20160701_20160731.csv', extract=False) ], indirect=True) @pytest.mark.parametrize( 'gsma_tac_db_importer', [GSMADataParams(filename='gsma_dump_small_imeicheck_2016.txt')], indirect=True) def test_file_level_failure_metrics(mocked_statsd, logger, operator_data_importer, gsma_tac_db_importer): """Test Depot ID TBD. Verify that StatsD is sent stats when a file-level validation failure happens during import. """ gsma_tac_db_importer.import_data() expect_failure(operator_data_importer) # Expected call is statsd.gauge(key, 1, delta=True) mocked_statsd.gauge.assert_any_call( 'dirbs.import.operator.test_operator.validation_failures.leading_zero', 1, delta=True)
obj={'APP_CONFIG': mocked_config}) assert result.exit_code == 0 @pytest.mark.parametrize( 'operator_data_importer, stolen_list_importer, pairing_list_importer, ' 'gsma_tac_db_importer, registration_list_importer', [(OperatorDataParams( filename= 'testData1-operator-operator1-anonymized_20161101_20161130.csv', operator='operator1', perform_unclean_checks=False, extract=False), StolenListParams(filename='testData1-sample_stolen_list-anonymized.csv'), PairListParams(filename='testData1-sample_pairinglist-anonymized.csv'), GSMADataParams( filename='testData1-gsmatac_operator4_operator1_anonymized.txt'), RegistrationListParams(filename='sample_registration_list.csv'))], indirect=True) def test_cli_repartition(postgres, mocked_config, db_conn, operator_data_importer, registration_list_importer, pairing_list_importer, stolen_list_importer, gsma_tac_db_importer, tmpdir, logger, metadata_db_conn, mocked_statsd): """Test that the dirbs-db partition script runs without an error.""" import_data(operator_data_importer, 'operator_data', 17, db_conn, logger) import_data(gsma_tac_db_importer, 'gsma_data', 13, db_conn, logger) import_data(stolen_list_importer, 'stolen_list', 21, db_conn, logger) import_data(registration_list_importer, 'registration_list', 20, db_conn, logger) import_data(pairing_list_importer, 'pairing_list', 7, db_conn, logger)
def test_prune_classification_state(db_conn, metadata_db_conn, tmpdir, logger, mocked_config, operator_data_importer, stolen_list_importer, monkeypatch, gsma_tac_db_importer, postgres, mocked_statsd): """Test Depot ID not known yet. A regulator/partner should be able to run a CLI command to prune classification_state table. It will remove any classification state data related to obsolete conditions and data with end_date is earlier than the start of the retention window. """ # Step 1: # import gsma_dump empty non tac and classify for all the conditions # ['gsma_not_found', 'local_stolen', 'duplicate_mk1', 'malformed_imei', 'not_on_registration_list', ..] # classification_state_table contains records for cond_name "gsma_not_found". They all have end_date==None # Step 2 - TEST RETENTION WINDOW: # CLI prune will delete rows where the end_date is earlier than the start of the retention window. # retention_months=6 # curr_date = datetime.date(2017, 7, 13) # Import different gsma_db and classify to have different end date for gsma_not_found records in # the classification table. # Step 3 - TEST CONDITIONS NOT EXISTING: # CLI prune for classification_state will look at the current configured conditions and # remove any entries corresponding to cond_names that no longer exist in the config. # Load a new yaml file without stolen_list condition and run the prune CLI command to test. # -- yaml cond config list: # ['gsma_not_found', 'malformed_imei', 'not_on_registration_list'] # -- classification_state condition list: # ['gsma_not_found', 'local_stolen', 'malformed_imei', 'not_on_registration_list'] # Step 1 operator_data_importer.import_data() stolen_list_importer.import_data() gsma_tac_db_importer.import_data() runner = CliRunner() db_conn.commit() runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date', '20170713'], obj={'APP_CONFIG': mocked_config}) with db_conn.cursor() as cur: cur.execute( 'SELECT imei_norm, cond_name, end_date FROM classification_state ORDER BY cond_name, imei_norm' ) res_list = cur.fetchall() assert len(res_list) == 32 assert [(x.imei_norm, x.cond_name, x.end_date) for x in res_list] == \ [('21111111111111', 'gsma_not_found', None), ('21111111111112', 'gsma_not_found', None), ('8888#888622222', 'gsma_not_found', None), ('88888862222209', 'gsma_not_found', None), ('88888888622222', 'gsma_not_found', None), ('12432807272315', 'local_stolen', None), ('12640904324427', 'local_stolen', None), ('12640904372723', 'local_stolen', None), ('12727231272313', 'local_stolen', None), ('12875502464321', 'local_stolen', None), ('12875502572723', 'local_stolen', None), ('12875507272312', 'local_stolen', None), ('12904502843271', 'local_stolen', None), ('12909602432585', 'local_stolen', None), ('12909602872723', 'local_stolen', None), ('12922902206948', 'local_stolen', None), ('12922902243260', 'local_stolen', None), ('12922902432742', 'local_stolen', None), ('12922902432776', 'local_stolen', None), ('12957272313271', 'local_stolen', None), ('17272317272723', 'local_stolen', None), ('56773605727231', 'local_stolen', None), ('64220204327947', 'local_stolen', None), ('64220297727231', 'local_stolen', None), ('72723147267231', 'local_stolen', None), ('72723147267631', 'local_stolen', None), ('8888#888622222', 'malformed_imei', None), ('21111111111111', 'not_on_registration_list', None), ('21111111111112', 'not_on_registration_list', None), ('8888#888622222', 'not_on_registration_list', None), ('88888862222209', 'not_on_registration_list', None), ('88888888622222', 'not_on_registration_list', None)] # Step 2 # all records have end_date == None. Classify twice to have records with different end_date # first classification with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='gsma_not_found_anonymized.txt')) as imp: expect_success(imp, 1, db_conn, logger) runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date', '20170713'], obj={'APP_CONFIG': mocked_config}) # with db_conn.cursor() as cur: cur.execute("""SELECT imei_norm, cond_name, end_date FROM classification_state ORDER BY cond_name, imei_norm""") res_list = cur.fetchall() gsma_not_found_list = [(x.imei_norm, x.cond_name, x.end_date) for x in res_list if x.cond_name == 'gsma_not_found'] assert gsma_not_found_list == [ ('21111111111111', 'gsma_not_found', None), ('21111111111112', 'gsma_not_found', None), ('8888#888622222', 'gsma_not_found', None), ('88888862222209', 'gsma_not_found', None), ('88888888622222', 'gsma_not_found', datetime.date(2017, 7, 13)) ] # second classification with get_importer( GSMADataImporter, db_conn, metadata_db_conn, mocked_config.db_config, tmpdir, logger, mocked_statsd, GSMADataParams( filename='prune_classification_state_gsma.txt')) as imp: expect_success(imp, 1, db_conn, logger) runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date', '20160101'], obj={'APP_CONFIG': mocked_config}) # with db_conn.cursor() as cur: cur.execute('SELECT imei_norm, cond_name, end_date ' ' FROM classification_state ' 'ORDER BY cond_name, imei_norm, end_date') res_list = cur.fetchall() gsma_not_found_list = [(x.imei_norm, x.cond_name, x.end_date) for x in res_list if x.cond_name == 'gsma_not_found'] assert gsma_not_found_list == [ ('21111111111111', 'gsma_not_found', datetime.date(2016, 1, 1)), ('21111111111112', 'gsma_not_found', datetime.date(2016, 1, 1)), ('8888#888622222', 'gsma_not_found', None), ('88888862222209', 'gsma_not_found', None), ('88888888622222', 'gsma_not_found', datetime.date(2017, 7, 13)), ('88888888622222', 'gsma_not_found', None) ] # Step 3 # Expect not to be in classification_state table after prune: # IMEIs 21111111111111 and 21111111111112 for condition gsma_not found (due to end_date) # IMEIs for condition stolen_list (due to condition no longer exist) # this commit is to remove locks from the classification_state table so that # the table can be dropped inside the prune. The locks were activated by the CLI to classify. db_conn.commit() cond_dict_list = [{ 'label': 'gsma_not_found', 'reason': 'TAC not found in GSMA TAC database', 'grace_period_days': 30, 'blocking': True, 'dimensions': [{ 'module': 'gsma_not_found' }] }, { 'label': 'malformed_imei', 'reason': 'Invalid characters detected in IMEI', 'grace_period_days': 0, 'blocking': False, 'dimensions': [{ 'module': 'malformed_imei' }] }, { 'label': 'not_on_registration_list', 'reason': 'IMEI not found on local registration list', 'grace_period_days': 0, 'blocking': True, 'max_allowed_matching_ratio': 1.0, 'dimensions': [{ 'module': 'not_on_registration_list' }] }] monkeypatch.setattr(mocked_config, 'conditions', from_cond_dict_list_to_cond_list(cond_dict_list)) with db_conn.cursor() as cur: result = runner.invoke( dirbs_prune_cli, ['--curr-date', '20170913', 'classification_state'], obj={'APP_CONFIG': mocked_config}) assert result.exit_code == 0 # ITEMS REMOVED # [('17272317272723', 'local_stolen', None), ('12909602872723', 'local_stolen', None), # ('12875502572723', 'local_stolen', None), ('12875507272312', 'local_stolen', None), # ('64220297727231', 'local_stolen', None), ('12909602432585', 'local_stolen', None), # ('64220204327947', 'local_stolen', None), ('72723147267631', 'local_stolen', None), # ('72723147267231', 'local_stolen', None), ('12922902243260', 'local_stolen', None), # ('12875502464321', 'local_stolen', None), ('12922902432776', 'local_stolen', None), # ('12957272313271', 'local_stolen', None), ('12640904324427', 'local_stolen', None), # ('12904502843271', 'local_stolen', None), ('12922902432742', 'local_stolen', None), # ('12432807272315', 'local_stolen', None), ('12922902206948', 'local_stolen', None), # ('56773605727231', 'local_stolen', None), ('12727231272313', 'local_stolen', None), # ('12640904372723', 'local_stolen', None), # ('21111111111111', 'gsma_not_found', datetime.date(2016, 1, 1)), # ('21111111111112', 'gsma_not_found', datetime.date(2016, 1, 1))] cur.execute('SELECT imei_norm, cond_name, end_date ' 'FROM classification_state ' 'ORDER BY cond_name, imei_norm, end_date') res_list = cur.fetchall() pruned_class_state_table = [(x.imei_norm, x.cond_name, x.end_date) for x in res_list] assert pruned_class_state_table == [ ('8888#888622222', 'gsma_not_found', None), ('88888862222209', 'gsma_not_found', None), ('88888888622222', 'gsma_not_found', datetime.date(2017, 7, 13)), ('88888888622222', 'gsma_not_found', None), ('8888#888622222', 'malformed_imei', None), ('21111111111111', 'not_on_registration_list', None), ('21111111111112', 'not_on_registration_list', None), ('8888#888622222', 'not_on_registration_list', None), ('88888862222209', 'not_on_registration_list', None), ('88888888622222', 'not_on_registration_list', None) ]
content='date,imei,imsi,msisdn\n' '20110101,8888#888622222,123456789012345,123456789012345\n' '20110101,88888888622222,123456789012345,123456789012345\n' '20110101,21111111111111,125456789012345,123456789012345\n' '20110101,21111111111112,125456789012345,123456789012345\n' '20110101,88888862222209,123456789012345,123456789012345', extract=False, perform_unclean_checks=False, perform_region_checks=False, perform_home_network_check=False, operator='operator1') ], indirect=True) @pytest.mark.parametrize( 'gsma_tac_db_importer', [GSMADataParams(filename='gsma_dump_emptynontac_july_2016.txt')], indirect=True) @pytest.mark.parametrize( 'stolen_list_importer', [StolenListParams(filename='testData1-sample_stolen_list-anonymized.csv')], indirect=True) def test_prune_classification_state(db_conn, metadata_db_conn, tmpdir, logger, mocked_config, operator_data_importer, stolen_list_importer, monkeypatch, gsma_tac_db_importer, postgres, mocked_statsd): """Test Depot ID not known yet. A regulator/partner should be able to run a CLI command to prune classification_state table. It will remove any classification state data related to obsolete conditions and data with end_date is earlier than the start of the retention window.
assert rv.status_code == 405 assert b'Method Not Allowed' in rv.data def test_method_put_not_allowed(flask_app, api_version): """Test Depot ID not known yet. Verify the version API does not support HTTP PUT and returns HTTP 405 METHOD NOT ALLOWED. """ rv = flask_app.delete(url_for('{0}.version_api'.format(api_version))) assert rv.status_code == 405 assert b'Method Not Allowed' in rv.data @pytest.mark.parametrize('gsma_tac_db_importer', [ GSMADataParams(filename='sample_gsma_import_list_anonymized.txt', extract=False) ], indirect=True) def test_exempted_device_types_raises_exception(flask_app, db_conn, api_version, monkeypatch, mocked_config, gsma_tac_db_importer): """Verify the exempted_device_type_config is validated for API requests.""" gsma_tac_db_importer.import_data() db_conn.commit() # Verify an exception is thrown if an invalid device type is specified in config. monkeypatch.setattr(mocked_config.region_config, 'exempted_device_types', ['Vehicle', 'Car']) with pytest.raises(Exception): rv = flask_app.get(url_for('{0}.version_api'.format(api_version)))