def install_mock_data(ctx): """ Overload the 'real' addon model and mapping URLs responses so that we always the fixture data at the top of this test module. """ addon_space = [{ "id": "addon1.id", "name": "addon1.name", "isWebextension": True }, { "id": "addon2.id", "name": "addon2.name", "isWebextension": True }, { "id": "addon3.id", "name": "addon3.name", "isWebextension": True }, { "id": "addon4.id", "name": "addon4.name", "isWebextension": True }, { "id": "addon5.id", "name": "addon5.name", "isWebextension": True }] fake_addon_matrix = [] for i, addon in enumerate(addon_space): row = { "id": positive_hash(addon['id']), "features": [0, 0.2, 0.0, 0.1, 0.15] } row['features'][i] = 1.0 fake_addon_matrix.append(row) fake_mapping = {} for addon in addon_space: java_hash = positive_hash(addon['id']) fake_mapping[str(java_hash)] = addon conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=ITEM_MATRIX_CONFIG[0]) conn.Object(ITEM_MATRIX_CONFIG[0], ITEM_MATRIX_CONFIG[1]).put(Body=json.dumps(fake_addon_matrix)) conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=ADDON_MAPPING_CONFIG[0]) conn.Object(ADDON_MAPPING_CONFIG[0], ADDON_MAPPING_CONFIG[1]).put(Body=json.dumps(fake_mapping)) ctx['collaborative_addon_mapping'] = LazyJSONLoader( ctx, ADDON_MAPPING_CONFIG[0], ADDON_MAPPING_CONFIG[1]) ctx['collaborative_item_matrix'] = LazyJSONLoader(ctx, ITEM_MATRIX_CONFIG[0], ITEM_MATRIX_CONFIG[1]) return ctx
def install_mocks(ctx): ctx = ctx.child() class MockProfileFetcher: def get(self, client_id): return {'client_id': client_id} ctx['profile_fetcher'] = MockProfileFetcher() ctx['recommender_factory'] = MockRecommenderFactory() DATA = {'ensemble_weights': {'collaborative': 1000, 'similarity': 100, 'locale': 10}} S3_BUCKET = 'telemetry-parquet' ENSEMBLE_WEIGHTS = 'taar/ensemble/ensemble_weight.json' conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=S3_BUCKET) conn.Object(S3_BUCKET, ENSEMBLE_WEIGHTS).put(Body=json.dumps(DATA)) ctx['ensemble_weights'] = LazyJSONLoader(ctx, S3_BUCKET, ENSEMBLE_WEIGHTS) return ctx
def install_no_data(ctx): ctx = ctx.child() conn = boto3.resource("s3", region_name="us-west-2") conn.create_bucket(Bucket=TAAR_SIMILARITY_BUCKET) conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY).put(Body="") conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY).put(Body="") ctx["similarity_donors_pool"] = LazyJSONLoader(ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY) ctx["similarity_lr_curves"] = LazyJSONLoader(ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY) return ctx
def install_categorical_data(ctx): ctx = ctx.child() conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=S3_BUCKET) conn.Object(S3_BUCKET, DONOR_LIST_KEY).put(Body=json.dumps(CATEGORICAL_FEATURE_FIXTURE_DATA)) conn.Object(S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY).put(Body=json.dumps(generate_fake_lr_curves(1000))) ctx['similarity_donors_pool'] = LazyJSONLoader(ctx, S3_BUCKET, DONOR_LIST_KEY) ctx['similarity_lr_curves'] = LazyJSONLoader(ctx, S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY) return ctx
def install_no_data(ctx): ctx = ctx.child() conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=S3_BUCKET) conn.Object(S3_BUCKET, DONOR_LIST_KEY).put(Body="") conn.Object(S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY).put(Body="") ctx['similarity_donors_pool'] = LazyJSONLoader(ctx, S3_BUCKET, DONOR_LIST_KEY) ctx['similarity_lr_curves'] = LazyJSONLoader(ctx, S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY) return ctx
def install_none_mock_data(ctx): """ Overload the 'real' addon model and mapping URLs responses so that we always get 404 errors. """ conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=ITEM_MATRIX_CONFIG[0]) conn.Object(ITEM_MATRIX_CONFIG[0], ITEM_MATRIX_CONFIG[1]).put(Body="") ctx['collaborative_item_matrix'] = LazyJSONLoader(ctx, ITEM_MATRIX_CONFIG[0], ITEM_MATRIX_CONFIG[1]) # Don't reuse connections with moto. badness happens conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=ADDON_MAPPING_CONFIG[0]) conn.Object(ADDON_MAPPING_CONFIG[0], ADDON_MAPPING_CONFIG[1]).put(Body="") ctx['collaborative_addon_mapping'] = LazyJSONLoader( ctx, ADDON_MAPPING_CONFIG[0], ADDON_MAPPING_CONFIG[1]) return ctx
def install_mock_data(ctx): ctx = ctx.child() conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=ADDON_LIST_BUCKET) conn.Object(ADDON_LIST_BUCKET, ADDON_LIST_KEY).put(Body=json.dumps(FAKE_LOCALE_DATA)) ctx['locale_mock_data'] = LazyJSONLoader(ctx, ADDON_LIST_BUCKET, ADDON_LIST_KEY) return ctx
def install_no_curated_data(ctx): ctx = ctx.child() conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=S3_BUCKET) conn.Object(S3_BUCKET, CURATED_WHITELIST).put(Body="") ctx['curated_whitelist_data'] = LazyJSONLoader(ctx, S3_BUCKET, CURATED_WHITELIST) return ctx
def install_categorical_data(ctx): ctx = ctx.child() conn = boto3.resource("s3", region_name="us-west-2") try: conn.create_bucket(Bucket=TAAR_SIMILARITY_BUCKET) except Exception: pass conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY).put( Body=json.dumps(CATEGORICAL_FEATURE_FIXTURE_DATA)) conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY).put( Body=json.dumps(generate_fake_lr_curves(1000))) ctx["similarity_donors_pool"] = LazyJSONLoader(ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY) ctx["similarity_lr_curves"] = LazyJSONLoader(ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY) return ctx
def install_mock_ensemble_data(ctx): DATA = {'ensemble_weights': EXPECTED} S3_BUCKET = 'telemetry-parquet' ENSEMBLE_WEIGHTS = 'taar/ensemble/ensemble_weight.json' conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=S3_BUCKET) conn.Object(S3_BUCKET, ENSEMBLE_WEIGHTS).put(Body=json.dumps(DATA)) ctx['ensemble_weights'] = LazyJSONLoader(ctx, S3_BUCKET, ENSEMBLE_WEIGHTS) return ctx
def install_mock_curated_data(ctx): mock_data = [] for i in range(20): mock_data.append(str(i) * 16) ctx = ctx.child() conn = boto3.resource('s3', region_name='us-west-2') conn.create_bucket(Bucket=S3_BUCKET) conn.Object(S3_BUCKET, CURATED_WHITELIST).put(Body=json.dumps(mock_data)) ctx['curated_whitelist_data'] = LazyJSONLoader(ctx, S3_BUCKET, CURATED_WHITELIST) return ctx
def install_categorical_data(ctx): ctx = ctx.child() conn = boto3.resource("s3", region_name="us-west-2") try: conn.create_bucket(Bucket=TAAR_SIMILARITY_BUCKET) except Exception: pass conn.Object( TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY).put(Body=json.dumps({"test": "donor_key"})) ctx["similarity_donors_pool"] = LazyJSONLoader(ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY) return ctx