Exemple #1
0
def test_imei_api_registration_list(per_test_flask_app, per_test_postgres,
                                    logger, mocked_statsd, tmpdir, request,
                                    mocked_config, api_version):
    """Test IMEI API call after registration list import."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, \
            create_db_connection(db_config, autocommit=True) as metadata_conn:
        with get_importer(
                GSMADataImporter, conn, metadata_conn, db_config, tmpdir,
                logger, mocked_statsd,
                GSMADataParams(
                    filename='gsma_dump_small_july_2016.txt')) as imp:
            imp.import_data()

        with get_importer(
                RegistrationListImporter, conn, metadata_conn, db_config,
                tmpdir, logger, mocked_statsd,
                RegistrationListParams(
                    content=
                    'APPROVED_IMEI,make,model,status,model_number,brand_name,'
                    'device_type,radio_interface,device_id\n'
                    '21260934000003,,,,,,,,1')) as imp:
            imp.import_data()

    if api_version == 'v1':
        rv = per_test_flask_app.get(
            url_for('{0}.imei_api'.format(api_version), imei='21260934000003'))
        assert rv.status_code == 200
    else:  # api version 2.0
        rv = per_test_flask_app.get(
            url_for('{0}.imei_get_api'.format(api_version),
                    imei='21260934000003'))
        assert rv.status_code == 200
Exemple #2
0
def test_imei_api_pairing_list(per_test_flask_app, per_test_postgres, logger,
                               mocked_statsd, tmpdir, request, mocked_config,
                               api_version):
    """Test IMEI API call after pairing list import."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, \
            create_db_connection(db_config, autocommit=True) as metadata_conn:
        with get_importer(
                GSMADataImporter, conn, metadata_conn, db_config, tmpdir,
                logger, mocked_statsd,
                GSMADataParams(
                    filename='gsma_dump_small_july_2016.txt')) as imp:
            imp.import_data()

        with get_importer(
                PairingListImporter, conn, metadata_conn, db_config, tmpdir,
                logger, mocked_statsd,
                PairListParams(content='imei,imsi\n'
                               '811111013136460,111018001111111\n'
                               '359000000000000,111015113222222\n'
                               '357756065985824,111015113333333')) as imp:
            imp.import_data()

    if api_version == 'v1':
        rv = per_test_flask_app.get(
            url_for('{0}.imei_api'.format(api_version), imei='21260934000003'))
        assert rv.status_code == 200
    else:  # api version 2.0
        rv = per_test_flask_app.get(
            url_for('{0}.imei_get_pairings_api'.format(api_version),
                    imei='21260934000003'))
        assert rv.status_code == 200
def test_historical_check_percentage(device_association_list_importer, logger, mocked_statsd, db_conn,
                                     metadata_db_conn, mocked_config, tmpdir):
    """Verify that the list data is not imported if it fails the historical check."""
    expect_success(device_association_list_importer, 5, db_conn, logger)

    # size increased, importer succeeds
    with get_importer(DeviceAssociationListImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      DeviceAssociationListParams(filename='sample_association_list_large.csv',
                                                  import_size_variation_percent=mocked_config
                                                           .associations_threshold_config
                                                           .import_size_variation_percent,
                                                  import_size_variation_absolute=mocked_config
                                                           .associations_threshold_config
                                                           .import_size_variation_absolute
                                                  )) as imp:
        expect_success(imp, 99, db_conn, logger)

    # importing file with drop in size greater then 5%
    with get_importer(DeviceAssociationListImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      DeviceAssociationListParams(filename='sample_association_list_large_drop_5_percent_greater.csv',
                                                  import_size_variation_percent=mocked_config
                                                           .associations_threshold_config
                                                           .import_size_variation_percent,
                                                  import_size_variation_absolute=mocked_config
                                                           .associations_threshold_config
                                                           .import_size_variation_absolute
                                                  )) as imp:
        expect_failure(imp, exc_message='Failed import size historic check')

    # importing file with drop in size greater then 5%
    with get_importer(DeviceAssociationListImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      DeviceAssociationListParams(filename='sample_association_list_large_drop_5_percent_less.csv',
                                                  import_size_variation_percent=mocked_config
                                                           .associations_threshold_config
                                                           .import_size_variation_percent,
                                                  import_size_variation_absolute=mocked_config
                                                           .associations_threshold_config
                                                           .import_size_variation_absolute
                                                  )) as imp:
        expect_success(imp, 95, db_conn, logger)
def test_classify(per_test_postgres, db_user, tmpdir, logger, mocked_statsd, monkeypatch, mocked_config):
    """Test classify works with the security role created based on abstract role."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, create_db_connection(db_config, autocommit=True) as metadata_conn:
        with get_importer(OperatorDataImporter,
                          conn,
                          metadata_conn,
                          db_config,
                          tmpdir,
                          logger,
                          mocked_statsd,
                          OperatorDataParams(
                              content='date,imei,imsi,msisdn\n'
                                      '20110101,8888#888622222,123456789012345,123456789012345\n'
                                      '20110101,88888888622222,123456789012345,123456789012345\n'
                                      '20110101,8888888862222209,123456789012345,123456789012345\n'
                                      '20110101,88888862222209**,123456789012345,123456789012345',
                              extract=False,
                              perform_unclean_checks=False,
                              perform_region_checks=False,
                              perform_home_network_check=False,
                              operator='operator1')) as imp:
            imp.import_data()

        with get_importer(GSMADataImporter,
                          conn,
                          metadata_conn,
                          db_config,
                          tmpdir,
                          logger,
                          mocked_statsd,
                          GSMADataParams(filename='gsma_not_found_anonymized.txt')) as imp:
            imp.import_data()

        with get_importer(RegistrationListImporter,
                          conn,
                          metadata_conn,
                          db_config,
                          tmpdir,
                          logger,
                          mocked_statsd,
                          RegistrationListParams(filename='sample_registration_list.csv')) as imp:
            imp.import_data()

    # Run dirbs-classify using db args from the temp postgres instance
    runner = CliRunner()
    monkeypatch.setattr(mocked_config.db_config, 'user', db_user)
    result = runner.invoke(dirbs_classify_cli, ['--no-safety-check'], obj={'APP_CONFIG': mocked_config})

    if db_user in ['dirbs_poweruser_login', 'dirbs_classify_user']:
        assert result.exit_code == 0
    else:
        assert result.exit_code != 0
def test_historical_check_percentage(pairing_list_importer, logger, mocked_statsd, db_conn, metadata_db_conn,
                                     mocked_config, tmpdir):
    """Test Depot ID 96662/5.

    Verify that pairing list data is not imported if it fails the historical check.
    """
    expect_success(pairing_list_importer, 5, db_conn, logger)

    # size increased, the importer succeeds.
    with get_importer(PairingListImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      PairListParams(filename='sample_pairing_list_large.csv',
                                     import_size_variation_percent=mocked_config.pairing_threshold_config.
                                     import_size_variation_percent,
                                     import_size_variation_absolute=mocked_config.pairing_threshold_config.
                                     import_size_variation_absolute)) as imp:
        expect_success(imp, 99, db_conn, logger)

    # importing file with drop in size greater than 5%
    with get_importer(PairingListImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      PairListParams(filename=data_file_to_test(90, imei_imsi=True),
                                     import_size_variation_percent=mocked_config.pairing_threshold_config.
                                     import_size_variation_percent,
                                     import_size_variation_absolute=mocked_config.pairing_threshold_config.
                                     import_size_variation_absolute)) as imp:
        expect_failure(imp, exc_message='Failed import size historic check')

    # importing file with drop in size less than 5%
    with get_importer(PairingListImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      PairListParams(filename=data_file_to_test(95, imei_imsi=True),
                                     import_size_variation_percent=mocked_config.pairing_threshold_config.
                                     import_size_variation_percent,
                                     import_size_variation_absolute=mocked_config.pairing_threshold_config.
                                     import_size_variation_absolute)) as imp:
        expect_success(imp, 95, db_conn, logger)
Exemple #6
0
def test_historical_check_percentage_succeeds(stolen_list_importer, logger,
                                              mocked_statsd, db_conn,
                                              mocked_config, metadata_db_conn,
                                              tmpdir):
    """Test Depot ID not known yet.

    Verify that a local stolen data is successfully imported after having imported two files where the
    second file has 80% size of the first one and the threshold value is 75.
    """
    expect_success(stolen_list_importer, 100, db_conn, logger)

    with get_importer(
            StolenListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            StolenListParams(
                filename=data_file_to_test(
                    80,
                    imei_custom_header='imei,reporting_date,'
                    'status',
                    imei_imsi=False),
                import_size_variation_percent=mocked_config.
                stolen_threshold_config.import_size_variation_percent,
                import_size_variation_absolute=mocked_config.
                stolen_threshold_config.import_size_variation_absolute)
    ) as imp:
        expect_success(imp, 80, db_conn, logger)
Exemple #7
0
def test_tac_api(per_test_flask_app, per_test_postgres, logger, mocked_statsd,
                 tmpdir, request, mocked_config, api_version):
    """Test TAC API call works with the security role created based on abstract role."""
    dsn = per_test_postgres.dsn()
    dsn['user'] = '******'
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, create_db_connection(
            db_config, autocommit=True) as metadata_conn:
        with get_importer(
                GSMADataImporter, conn, metadata_conn, db_config, tmpdir,
                logger, mocked_statsd,
                GSMADataParams(
                    filename='sample_gsma_import_list_anonymized.txt')) as imp:
            imp.import_data()

    current_user = request.node.callspec.params['per_test_flask_app']
    if current_user in ['dirbs_poweruser_login', 'dirbs_api_user']:
        rv = per_test_flask_app.get(
            url_for('{0}.tac_api'.format(api_version), tac='01234404'))
        assert rv.status_code == 200
        results = json.loads(rv.data.decode('utf-8'))
        assert results['gsma'] is not None
    else:
        with pytest.raises(DatabaseRoleCheckException):
            per_test_flask_app.get(
                url_for('{0}.tac_api'.format(api_version), tac='01234404'))
Exemple #8
0
def test_same_import_twice(gsma_tac_db_importer, mocked_config, logger, mocked_statsd, db_conn,
                           metadata_db_conn, tmpdir):
    """Test Depot not known yet.

    Verify that if we import twice the same file, same entries are ignored and not added to the historic table.
    """
    expect_success(gsma_tac_db_importer, 3, db_conn, logger)
    with db_conn.cursor() as cursor:
        cursor.execute('SELECT * FROM historic_gsma_data')
        first_import = cursor.rowcount

    with get_importer(GSMADataImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      GSMADataParams(filename='gsma_dump_small_july_2016.txt',
                                     extract=False)) as imp:
        expect_success(imp, 3, db_conn, logger)

    with db_conn.cursor() as cursor:
        cursor.execute('SELECT * FROM historic_gsma_data')
        second_import = cursor.rowcount

    assert first_import == second_import == 3
Exemple #9
0
def test_unobserved_valid_imeis(flask_app, gsma_tac_db_importer, operator_data_importer, classification_data,
                                db_conn, metadata_db_conn, mocked_config, tmpdir, logger, mocked_statsd, api_version):
    """Test Depot ID 96544/1.

    Verify the IMEI API supports HTTP GET and responds with correct
    HTTP Status codes and response body.
    """
    gsma_tac_db_importer.import_data()
    operator_data_importer.import_data()
    with get_importer(OperatorDataImporter,
                      db_conn,
                      metadata_db_conn,
                      mocked_config.db_config,
                      tmpdir,
                      logger,
                      mocked_statsd,
                      OperatorDataParams(
                          filename='testData1-operator-operator4-anonymized_20161101_20161130.csv',
                          extract=False,
                          operator='operator4',
                          perform_unclean_checks=False,
                          perform_region_checks=False,
                          perform_home_network_check=False)) as new_imp:
        new_imp.import_data()

    rv = flask_app.get(url_for('{0}.imei_api'.format(api_version), imei='3884773337002633'))
    assert rv.status_code == 200
    data = json.loads(rv.data.decode('utf-8'))
    assert data['imei_norm'] == '38847733370026'
    for k, v in data['classification_state']['blocking_conditions'].items():
        assert v is False
    for k, v in data['classification_state']['informative_conditions'].items():
        assert v is False
    assert data['realtime_checks']['invalid_imei'] is False
def test_historical_check_percentage_succeeds(registration_list_importer,
                                              logger, mocked_statsd, db_conn,
                                              metadata_db_conn, mocked_config,
                                              tmpdir):
    """Test Depot ID not known yet.

    Verify that a local import data is successfully imported after having imported two files where the
    second file has 80% size of the first one and the threshold value is 75.
    """
    expect_success(registration_list_importer, 100, db_conn, logger)

    with get_importer(
            RegistrationListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            RegistrationListParams(
                filename=data_file_to_test(80,
                                           imei_custom_header='approved_imei,'
                                           'make,model,'
                                           'status,model_number,'
                                           'brand_name,device_type,'
                                           'radio_interface,'
                                           'device_id'),
                import_size_variation_percent=mocked_config.
                import_threshold_config.import_size_variation_percent,
                import_size_variation_absolute=mocked_config.
                import_threshold_config.import_size_variation_absolute)
    ) as imp:
        expect_success(imp, 80, db_conn, logger)
def test_delta_file_prevalidation(logger, db_conn, metadata_db_conn,
                                  mocked_config, tmpdir, mocked_statsd,
                                  pairing_list_importer):
    """Test Depot not available yet.

    Test pre-validation schemas.
    """
    # update change-type is allowed only for stolen
    expect_failure(
        pairing_list_importer,
        exc_message=
        'Pre-validation failed: b\'Error:   regex("^(add|remove)$") fails for line: 2, '
        'column: change_type, value: "update"\\nFAIL')
    # change_type must be lower case
    with get_importer(
            PairingListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            PairListParams(content='imei,imsi,msisdn,change_type\n'
                           '12345678901234,11111111111111,222222222222222,ADD',
                           delta=True)) as imp:
        expect_failure(
            imp,
            exc_message=
            'Pre-validation failed: b\'Error:   regex("^(add|remove)$") fails for line: '
            '1, column: change_type, value: "ADD"\\nFAIL')
Exemple #12
0
def test_report(per_test_postgres, tmpdir, db_user, logger, mocked_statsd,
                mocked_config, monkeypatch):
    """Test catalog works with the security role created based on abstract role."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, create_db_connection(
            db_config, autocommit=True) as metadata_conn:
        with get_importer(
                OperatorDataImporter, conn, metadata_conn, db_config, tmpdir,
                logger, mocked_statsd,
                OperatorDataParams(
                    filename=
                    'testData1-operator-operator1-anonymized_20161101_20161130.csv',
                    operator='operator1',
                    perform_unclean_checks=False,
                    extract=False)) as imp:
            imp.import_data()

    runner = CliRunner()
    output_dir = str(tmpdir)
    monkeypatch.setattr(mocked_config.db_config, 'user', db_user)
    result = runner.invoke(dirbs_report_cli, [
        'standard', '--disable-retention-check', '--disable-data-check', '11',
        '2016', output_dir
    ],
                           obj={'APP_CONFIG': mocked_config})

    if db_user in ['dirbs_poweruser_login', 'dirbs_report_user']:
        assert result.exit_code == 0
    else:
        assert result.exit_code != 0
Exemple #13
0
def test_prune(per_test_postgres, tmpdir, logger, mocked_statsd, db_user,
               mocked_config, monkeypatch):
    """Test prune works with the poweruser security role."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, create_db_connection(
            db_config, autocommit=True) as metadata_conn:
        with get_importer(
                OperatorDataImporter, conn, metadata_conn, db_config, tmpdir,
                logger, mocked_statsd,
                OperatorDataParams(
                    filename=
                    'testData1-operator-operator4-anonymized_20161101_20161130.csv',
                    operator='1',
                    extract=False,
                    perform_leading_zero_check=False,
                    mcc_mnc_pairs=[{
                        'mcc': '111',
                        'mnc': '04'
                    }],
                    perform_unclean_checks=False,
                    perform_file_daterange_check=False)) as imp:
            imp.import_data()
            conn.commit()

    runner = CliRunner()
    monkeypatch.setattr(mocked_config.db_config, 'user', db_user)
    result = runner.invoke(dirbs_prune_cli, ['triplets'],
                           obj={'APP_CONFIG': mocked_config})
    if db_user in ['dirbs_poweruser_login']:
        assert result.exit_code == 0
    else:
        assert result.exit_code != 0
Exemple #14
0
def test_historic_failure_metrics(mocked_statsd, mocked_config, logger,
                                  gsma_tac_db_importer, tmpdir, db_conn,
                                  metadata_db_conn):
    """Test Depot ID TBD.

    Verify that StatsD is sent stats when a threshold validation failure happens during import.
    """
    gsma_tac_db_importer.import_data()

    # Try a small import
    with get_importer(
            GSMADataImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            GSMADataParams(
                filename='gsma_dump_small_july_2016.txt',
                import_size_variation_percent=mocked_config.
                gsma_threshold_config.import_size_variation_percent,
                import_size_variation_absolute=mocked_config.
                gsma_threshold_config.import_size_variation_absolute,
                extract=False)) as gsma_small_importer:
        expect_failure(gsma_small_importer,
                       exc_message='Failed import size historic check')

    # Expected call is statsd.gauge(key, 1, delta=True)
    mocked_statsd.gauge.assert_any_call(
        'dirbs.import.gsma_tac.validation_failures.historic_import_size',
        1,
        delta=True)
def test_listgen(per_test_postgres, tmpdir, logger, mocked_statsd, db_user, mocked_config, monkeypatch,
                 classification_data):
    """Test that the dirbs-listgen instance runs without an error."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, create_db_connection(db_config, autocommit=True) as metadata_conn:
        with get_importer(OperatorDataImporter,
                          conn,
                          metadata_conn,
                          db_config,
                          tmpdir,
                          logger,
                          mocked_statsd,
                          OperatorDataParams(
                              content='date,imei,imsi,msisdn\n'
                                      '20160203,811111013136460,111018001111111,223338000000\n'
                                      '20160203,359000000000000,111015113222222,223355000000\n'
                                      '20160203,357756065985824,111015113333333,223355111111',
                              cc=['22', '74'],
                              mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}],
                              operator='operator1',
                              extract=False)) as imp:
            imp.import_data()

        with get_importer(PairingListImporter,
                          conn,
                          metadata_conn,
                          db_config,
                          tmpdir,
                          logger,
                          mocked_statsd,
                          PairListParams(
                              content='imei,imsi,msisdn\n'
                                      '811111013136460,111018001111111,234555555555550\n'
                                      '359000000000000,111015113222222,234555555555551\n'
                                      '357756065985824,111015113333333,234555555555552')) as imp:
            imp.import_data()

    # Now run listgen as requested user
    runner = CliRunner()
    monkeypatch.setattr(mocked_config.db_config, 'user', db_user)
    output_dir = str(tmpdir)
    result = runner.invoke(dirbs_listgen_cli, [output_dir], obj={'APP_CONFIG': mocked_config})
    if db_user in ['dirbs_poweruser_login', 'dirbs_listgen_user']:
        assert result.exit_code == 0
    else:
        assert result.exit_code != 0
def test_imei_api(per_test_flask_app, per_test_postgres, logger, mocked_statsd, tmpdir, request, mocked_config,
                  api_version):
    """Test IMEI API call works with the security role created based on abstract role."""
    dsn = per_test_postgres.dsn()
    db_config = DBConfig(ignore_env=True, **dsn)
    with create_db_connection(db_config) as conn, \
            create_db_connection(db_config, autocommit=True) as metadata_conn:
        with get_importer(OperatorDataImporter,
                          conn,
                          metadata_conn,
                          db_config,
                          tmpdir,
                          logger,
                          mocked_statsd,
                          OperatorDataParams(
                              filename='testData1-operator-operator1-anonymized_20161101_20161130.csv',
                              operator='operator1',
                              perform_unclean_checks=False,
                              extract=False)) as imp:
            imp.import_data()

    current_user = request.node.callspec.params['per_test_flask_app']

    if api_version == 'v1':
        if current_user in ['dirbs_poweruser_login', 'dirbs_api_user']:
            rv = per_test_flask_app.get(url_for('{0}.imei_api'.format(api_version),
                                                imei='388260336982806', include_seen_with=1))
            assert rv.status_code == 200
            assert json.loads(rv.data.decode('utf-8'))['seen_with'] == \
                                                      [{'imsi': '11101400135251', 'msisdn': '22300825684694'},
                                                       {'imsi': '11101400135252', 'msisdn': '22300825684692'}]
            assert json.loads(rv.data.decode('utf-8'))['realtime_checks']['ever_observed_on_network'] is True

        else:
            with pytest.raises(DatabaseRoleCheckException):
                per_test_flask_app.get(url_for('{0}.imei_api'.format(api_version),
                                               imei='388260336982806', include_seen_with=1))
    else:  # api version 2.0
        if current_user in ['dirbs_poweruser_login', 'dirbs_api_user']:
            rv = per_test_flask_app.get(url_for('{0}.imei_get_subscribers_api'.format(api_version),
                                                imei='388260336982806'))
            assert rv.status_code == 200
            data = json.loads(rv.data.decode('utf-8'))
            assert len(data['subscribers']) != 0
            assert data['subscribers'] == [
                {
                    'imsi': '11101400135251',
                    'last_seen': '2016-11-01',
                    'msisdn': '22300825684694'
                },
                {
                    'imsi': '11101400135252',
                    'last_seen': '2016-11-02',
                    'msisdn': '22300825684692'
                }]
        else:
            with pytest.raises(DatabaseRoleCheckException):
                per_test_flask_app.get(url_for('{0}.imei_get_subscribers_api'.format(api_version),
                                               imei='388260336982806'))
Exemple #17
0
def subscribers_list_importer(db_conn, metadata_db_conn, mocked_config, tmpdir,
                              logger, mocked_statsd, request):
    """Subscribers list importer fixture. Parameters for importer come in via request.param."""
    subscribers_list_params = request.param
    with get_importer(SubscribersListImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      subscribers_list_params) as imp:
        yield imp
def golden_list_importer(db_conn, metadata_db_conn, mocked_config, tmpdir,
                         logger, mocked_statsd, request):
    """Golden list importer fixture. Parameters for importer come in via request.param."""
    golden_list_imp_params = request.param
    with get_importer(GoldenListImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      golden_list_imp_params) as imp:
        yield imp
def pairing_list_importer(db_conn, metadata_db_conn, mocked_config, tmpdir,
                          logger, mocked_statsd, request):
    """Pairing list importer fixture. Parameters for importer come in via request.param."""
    pairing_list_imp_params = request.param
    with get_importer(PairingListImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      pairing_list_imp_params) as imp:
        yield imp
def gsma_tac_db_importer(db_conn, metadata_db_conn, mocked_config, tmpdir,
                         logger, mocked_statsd, request):
    """GSMA TAC DB importer fixture. Parameters for importer come in via request.param."""
    gsma_tac_db_params = request.param
    with get_importer(GSMADataImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      gsma_tac_db_params) as imp:
        yield imp
Exemple #21
0
def barred_tac_list_importer(db_conn, metadata_db_conn, mocked_config, tmpdir,
                             logger, mocked_statsd, request):
    """Barred tac list importer fixture. Parameters for importer come in via request.param."""
    barred_tac_list_imp_params = request.param
    with get_importer(BarredTacListImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      barred_tac_list_imp_params) as imp:
        yield imp
Exemple #22
0
def device_association_list_importer(db_conn, metadata_db_conn, mocked_config,
                                     tmpdir, logger, mocked_statsd, request):
    """Device association list importer fixture. Params for importer come in via request.param."""
    association_list_params = request.param
    with get_importer(DeviceAssociationListImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      association_list_params) as imp:
        yield imp
def operator_data_importer(db_conn, metadata_db_conn, mocked_config, tmpdir,
                           logger, mocked_statsd, request):
    """Operator data importer fixture. Parameters for importer come in via request.param."""
    op_data_params = request.param
    with get_importer(OperatorDataImporter, db_conn, metadata_db_conn,
                      mocked_config.db_config, tmpdir, logger, mocked_statsd,
                      op_data_params) as imp:
        yield imp
def test_repeat_import(pairing_list_importer, logger, mocked_statsd, db_conn,
                       metadata_db_conn, mocked_config, tmpdir):
    """Test Depot not known yet. Test same import doesn't affect db."""
    expect_success(pairing_list_importer, 5, db_conn, logger)

    with get_importer(
            PairingListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            PairListParams(filename='sample_pairinglist.csv')) as imp:
        expect_success(imp, 5, db_conn, logger)
def test_historical_check_empty(monitoring_list_importer, logger,
                                mocked_statsd, db_conn, metadata_db_conn,
                                mocked_config, tmpdir):
    """Verify that empty file import fails after importing a non empty file."""
    expect_success(monitoring_list_importer, 20, db_conn, logger)
    with get_importer(
            MonitoringListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            MonitoringListParams(
                filename='empty_monitoringlist_historical_check.csv')) as imp:
        expect_failure(imp, exc_message='Failed import size historic check')
def test_repeat_import(barred_list_importer, logger, mocked_statsd, db_conn,
                       metadata_db_conn, mocked_config, tmpdir):
    """Verify that valid barred list data can be successfully imported into the database.

    when repeating the import of the same file.
    """
    expect_success(barred_list_importer, 21, db_conn, logger)
    with get_importer(
            BarredListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            BarredListParams(filename='sample_barred_list_v1.csv')) as imp:
        expect_success(imp, 21, db_conn, logger)
Exemple #27
0
def test_repeat_import(device_association_list_importer, logger, mocked_statsd,
                       db_conn, metadata_db_conn, mocked_config, tmpdir):
    """Verify that same import doesn't affect db."""
    expect_success(device_association_list_importer, 5, db_conn, logger)

    # importing same file
    with get_importer(
            DeviceAssociationListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            DeviceAssociationListParams(
                filename='sample_associationlist.csv')) as imp:
        expect_success(imp, 5, db_conn, logger)
def test_repeat_import(subscribers_list_importer, logger, mocked_statsd,
                       db_conn, metadata_db_conn, mocked_config, tmpdir):
    """Verify that same import doesn't affect db."""
    expect_success(subscribers_list_importer, 5, db_conn, logger)

    # importing same file
    with get_importer(
            SubscribersListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            SubscribersListParams(
                filename='sample_subscriberslist.csv')) as imp:
        expect_success(imp, 5, db_conn, logger)
def test_historical_check_empty(subscribers_list_importer, logger,
                                mocked_statsd, db_conn, metadata_db_conn,
                                mocked_config, tmpdir):
    """Verify that subscribers list data is not imported if it fails historical check."""
    expect_success(subscribers_list_importer, 5, db_conn, logger)

    # attempting to import empty subscribers list
    with get_importer(
            SubscribersListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            SubscribersListParams(
                filename='sample_subscriberslist_historial_check.csv')) as imp:
        expect_failure(imp, exc_message='Failed import size historic check')
def test_override_historical_check(subscribers_list_importer, logger,
                                   mocked_statsd, db_conn, metadata_db_conn,
                                   mocked_config, tmpdir):
    """Verify that the user can override historical checks when importing subscribers list data."""
    expect_success(subscribers_list_importer, 5, db_conn, logger)

    with get_importer(
            SubscribersListImporter, db_conn, metadata_db_conn,
            mocked_config.db_config, tmpdir, logger, mocked_statsd,
            SubscribersListParams(
                filename='sample_subscriberslist_historial_check.csv',
                perform_historic_check=False)) as imp:
        expect_success(imp, 0, db_conn, logger)