def test_bigquery_write(mocker, uploader): bq_client = mocker.MagicMock() mocker.patch.object(uploader, "_get_bq_client") uploader._get_bq_client.return_value = bq_client table = mocker.MagicMock() bq_client.get_table.return_value = table now = datetime.datetime.now().timestamp() account_config = AccountConfig("account_id", False, "ga_account_id", "", "") destination = Destination( "dest1", DestinationType.GA_MEASUREMENT_PROTOCOL, ["web_property", "view", "c", "list", "d", "buyers_custom_dim"]) source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"]) execution = Execution(account_config, source, destination) uploader._do_process(Batch(execution, [{"uuid": "uuid-1"}, {"uuid": "uuid-2"}]), now) bq_client.insert_rows.assert_called_once_with( table, [{"uuid": "uuid-1", "timestamp": now}, {"uuid": "uuid-2", "timestamp": now}], (SchemaField("uuid", "string"), SchemaField("timestamp", "timestamp")))
def test_conversion_upload_match_id(mocker, uploader): mocker.patch.object(uploader, '_get_dcm_service') floodlight_activity_id = 'floodlight_activity_id' floodlight_configuration_id = 'floodlight_configuration_id' source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination( 'dest1', DestinationType.CM_OFFLINE_CONVERSION, (floodlight_activity_id, floodlight_configuration_id)) execution = Execution(_account_config, source, destination) current_time = time.time() mocker.patch.object(time, 'time') time.time.return_value = current_time uploader._do_process(Batch(execution, [{'matchId': 'abc'}]), current_time) expected_body = { 'conversions': [{ 'matchId': 'abc', 'floodlightActivityId': floodlight_activity_id, 'floodlightConfigurationId': floodlight_configuration_id, 'ordinal': math.floor(current_time * 10e5), 'timestampMicros': math.floor(current_time * 10e5) }], } uploader._get_dcm_service().conversions().batchinsert.assert_any_call( profileId='dcm_profile_id', body=expected_body)
def test_data_source_not_found(mocker, eraser, caplog): service = mocker.MagicMock() mocker.patch.object(eraser, '_get_analytics_service') eraser._get_analytics_service.return_value = service mocker.patch.object(eraser, '_is_table_empty') eraser._is_table_empty.return_value = False service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'wrong_name' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Act try: next(eraser.process(Batch(execution, []))) except StopIteration: pass assert 'data_import_name - data import not found, please configure it in Google Analytics' in caplog.text
def test_error_on_api_call(mocker, uploader, caplog): caplog.set_level(logging.INFO, 'megalista.CampaignManagerConversionsUploader') mocker.patch.object(uploader, '_get_dcm_service') service = mocker.MagicMock() uploader._get_dcm_service.return_value = service service.conversions().batchinsert().execute.return_value = { 'hasFailures': True, 'status': [{ 'errors': [{ 'code': '123', 'message': 'error_returned' }] }] } source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination('dest1', DestinationType.CM_OFFLINE_CONVERSION, ['a', 'b']) execution = Execution(_account_config, source, destination) uploader._do_process(Batch(execution, [{'gclid': '123'}]), time.time()) assert 'Error(s) inserting conversions:' in caplog.text assert '[123]: error_returned' in caplog.text
def test_fail_missing_destination_metadata(uploader, mocker): mocker.patch.object(uploader, '_get_ssd_service') source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination('dest1', DestinationType.ADS_SSD_UPLOAD, ['1']) execution = Execution(_account_config, source, destination) batch = Batch(execution, []) uploader.process(batch) uploader._get_ssd_service.assert_not_called()
def _read_destination(sheets_config, sheet_id): range = sheets_config.get_range(sheet_id, 'DestinationsRange') destinations = {} if 'values' in range: for row in range['values']: destination = Destination(row[0], DestinationType[row[1]], row[2:]) destinations[destination.destination_name] = destination else: logging.getLogger("megalista.SpreadsheetExecutionSource").warn( "No destinations found!") return destinations
def test_elements_uploading(mocker, uploader): service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }] } execution = Execution( _account_config, Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Add mock to side effect of uploadData() my_mock = mocker.MagicMock() service.management().uploads().uploadData.side_effect = my_mock # Act uploader.process( Batch(execution, [{ 'user_id': '12', 'cd1': 'value1a', 'cd2': 'value2a' }, { 'user_id': '34', 'cd1': 'value1b', 'cd2': 'value2b' }, { 'user_id': '56', 'cd1': None, 'cd2': '' }])) # Called once my_mock.assert_called_once() # Intercept args called _, kwargs = my_mock.call_args # Check if really sent values from custom field media_bytes = kwargs['media_body'].getbytes(0, -1) print(media_bytes) assert media_bytes == b'ga:user_id,ga:cd1,ga:cd2\n' \ b'12,value1a,value2a\n' \ b'34,value1b,value2b\n' \ b'56,,'
def test_exception_no_event_nor_user_property(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'False', 'False', '', 'some_id', '']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) with pytest.raises( ValueError, match='GA4 MP should be called either for sending events'): next(uploader.process(Batch(execution, [])))
def test_exception_app_and_web(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'False', 'True', '', 'some_app_id', 'some_web_id']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) with pytest.raises( ValueError, match='GA4 MP should be called either with a firebase_app_id'): next(uploader.process(Batch(execution, [{ 'name': 'event_name', }])))
def test_files_deleted(mocker, eraser): service = mocker.MagicMock() mocker.patch.object(eraser, '_get_analytics_service') eraser._get_analytics_service.return_value = service mocker.patch.object(eraser, '_is_table_empty') eraser._is_table_empty.return_value = False service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }, { 'id': 2, 'name': 'data_import_name2' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Add mock to side effect of list uploads service.management().uploads().list().execute.return_value = { 'items': [{ 'id': 'ab' }, { 'id': 'cd' }] } # Add mock to side effect of deleteUploadData delete_call_mock = mocker.MagicMock() service.management().uploads( ).deleteUploadData.side_effect = delete_call_mock # Act next(eraser.process(Batch(execution, []))) # Called once delete_call_mock.assert_called_once() # Intercept args called _, kwargs = delete_call_mock.call_args # Check if really sent values from custom field ids = kwargs['body']
def _read_destination(entries): def create_metadata_list(entry): metadata_list = { 'ADS_OFFLINE_CONVERSION': ['gads_conversion_name'], 'ADS_SSD_UPLOAD': ['gads_conversion_name', 'gads_external_upload_id'], 'ADS_CUSTOMER_MATCH_CONTACT_INFO_UPLOAD': ['gads_audience_name', 'gads_operation', 'gads_hash'], 'ADS_CUSTOMER_MATCH_MOBILE_DEVICE_ID_UPLOAD': ['gads_audience_name', 'gads_operation'], 'ADS_CUSTOMER_MATCH_USER_ID_UPLOAD': ['gads_audience_name', 'gads_operation'], 'GA_MEASUREMENT_PROTOCOL': [ 'google_analytics_property_id', 'google_analytics_non_interaction' ], 'CM_OFFLINE_CONVERSION': [ 'campaign_manager_floodlight_activity_id', 'campaign_manager_floodlight_configuration_id' ], 'APPSFLYER_S2S_EVENTS': ['appsflyer_app_id'], } entry_type = entry['type'] metadata = metadata_list.get(entry_type, None) if not metadata: raise Exception(f'Upload type not implemented: {entry_type}') entry_metadata = [] for m in metadata: if m in entry: entry_metadata.append(entry[m]) else: raise Exception( f'Missing field in Firestore document for {entry_type}: {m}' ) return entry_metadata destinations = {} if entries: for entry in entries: destination = Destination(entry['id'] + '_destination', DestinationType[entry['type']], create_metadata_list(entry)) destinations[destination.destination_name] = destination else: logging.getLogger("megalista.FirestoreExecutionSource").warn( "No destinations found!") return destinations
def test_list_creation_not_mcc(mocker, uploader): ads_account_id = 'xxx-yyy-zzzz' ga_account_id = 'acc' service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().remarketingAudience().insert().execute.return_value = { 'id': 1 } execution = Execution( AccountConfig(ads_account_id, False, ga_account_id, '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination( 'dest1', DestinationType.GA_USER_LIST_UPLOAD, ['web_property', 'view', 'c', 'list', 'd', 'buyers_custom_dim'])) uploader.process(Batch(execution, [])) service.management().remarketingAudience().insert.assert_any_call( accountId=ga_account_id, webPropertyId='web_property', body={ 'name': 'list', 'linkedViews': ['view'], 'linkedAdAccounts': [{ 'type': 'ADWORDS_LINKS', 'linkedAccountId': ads_account_id }], 'audienceType': 'SIMPLE', 'audienceDefinition': { 'includeConditions': { 'kind': 'analytics#includeConditions', 'isSmartList': False, 'segment': 'users::condition::%s==buyer' % 'buyers_custom_dim', 'membershipDurationDays': 365 } } })
def test_avoid_list_creation_when_name_blank(mocker, uploader): ads_account_id = 'xxx-yyy-zzzz' ga_account_id = 'acc' service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service execution = Execution( AccountConfig(ads_account_id, True, ga_account_id, '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, ['web_property', 'view', 'c', '', 'd', 'buyers_custom_dim'])) uploader.process(Batch(execution, [])) service.management().remarketingAudience().insert.assert_not_called()
def test_succesful_web_user_property_call_with_user_id(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'False', 'True', '', '', 'some_id']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) next( uploader.process( Batch(execution, [{ 'user_ltv': '42', 'user_id': 'Id42', 'client_id': 'someId' }]))) assert m.call_count == 1 assert m.last_request.json()['user_id'] == 'Id42'
def test_elements_uploading_custom_field(mocker, uploader): service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, [ 'web_property', 'b', 'data_import_name', 'd', 'user_id_custom_dim', 'buyer_custom_dim', 'my_field' ])) # Add mock to side effect of uploadData() my_mock = mocker.MagicMock() service.management().uploads().uploadData.side_effect = my_mock # Act uploader.process(Batch(execution, [{ 'user_id': '12', 'my_field': '11' }, { 'user_id': '34', 'my_field': '22' }])) # Called once my_mock.assert_called_once() # Intercept args called _, kwargs = my_mock.call_args # Check if really sent values from custom field media_bytes = kwargs['media_body'].getbytes(0, -1) assert media_bytes == b'user_id_custom_dim,buyer_custom_dim\n12,11\n34,22'
def test_conversion_upload(mocker, uploader): mocker.patch.object(uploader, '_get_oc_service') conversion_name = 'user_list' destination = Destination('dest1', DestinationType.ADS_OFFLINE_CONVERSION, ['user_list']) source = Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']) execution = Execution(_account_config, source, destination) time1 = '2020-04-09T14:13:55.0005' time1_result = '20200409 141355 America/Sao_Paulo' time2 = '2020-04-09T13:13:55.0005' time2_result = '20200409 131355 America/Sao_Paulo' batch = Batch(execution, [{ 'time': time1, 'amount': '123', 'gclid': '456' }, { 'time': time2, 'amount': '234', 'gclid': '567' }]) uploader.process(batch) uploader._get_oc_service.return_value.mutate.assert_any_call([{ 'operator': 'ADD', 'operand': { 'conversionName': conversion_name, 'conversionTime': time1_result, 'conversionValue': '123', 'googleClickId': '456' } }, { 'operator': 'ADD', 'operand': { 'conversionName': conversion_name, 'conversionTime': time2_result, 'conversionValue': '234', 'googleClickId': '567' } }])
def test_succesful_app_event_call(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'True', 'False', '', 'some_id', '']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) next( uploader.process( Batch(execution, [{ 'app_instance_id': '123', 'name': 'event_name', 'value': '42', 'important_event': 'False' }]))) assert m.call_count == 1 assert m.last_request.json()['events'][0]['params']['value'] == '42'
def test_list_already_exists(mocker, uploader): service = mocker.MagicMock() service.management().remarketingAudience().list().execute = mocker.Mock( return_value={'items': [{ 'id': 1, 'name': 'list' }]}) mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, ['a', 'b', 'c', 'list', 'd', 'e'])) uploader.process(Batch(execution, [])) uploader._get_analytics_service().management().remarketingAudience( ).insert.assert_not_called()
def test_exception_app_event_without_app_instance_id(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'True', 'False', '', 'some_id', '']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) with pytest.raises( ValueError, match= 'GA4 MP needs an app_instance_id parameter when used for an App Stream.' ): next( uploader.process( Batch(execution, [{ 'client_id': '123', 'name': 'event_name', 'value': '42', 'important_event': 'False' }])))
def test_bigquery_write_failure(mocker, uploader, caplog): bq_client = mocker.MagicMock() mocker.patch.object(uploader, "_get_bq_client") uploader._get_bq_client.return_value = bq_client error_message = "This is an error message" bq_client.insert_rows.return_value = [{"errors": error_message}] account_config = AccountConfig("account_id", False, "ga_account_id", "", "") source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"]) destination = Destination( "dest1", DestinationType.GA_MEASUREMENT_PROTOCOL, ["web_property", "view", "c", "list", "d", "buyers_custom_dim"]) execution = Execution(account_config, source, destination) uploader.process(Batch(execution, [{"uuid": "uuid-1"}])) assert error_message in caplog.text
def test_conversion_upload(mocker, uploader): mocker.patch.object(uploader, '_get_ssd_service') conversion_name = 'ssd_conversion' external_upload_id = '123' source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination('dest1', DestinationType.ADS_SSD_UPLOAD, [conversion_name, external_upload_id]) execution = Execution(_account_config, source, destination) time1 = '2020-04-09T14:13:55.0005' time1_result = '20200409 141355 America/Sao_Paulo' time2 = '2020-04-09T13:13:55.0005' time2_result = '20200409 131355 America/Sao_Paulo' batch = Batch(execution, [{ 'hashedEmail': '*****@*****.**', 'time': time1, 'amount': '123' }, { 'hashedEmail': '*****@*****.**', 'time': time2, 'amount': '234' }]) uploader.process(batch) upload_data = [{ 'StoreSalesTransaction': { 'userIdentifiers': [{ 'userIdentifierType': 'HASHED_EMAIL', 'value': '*****@*****.**' }], 'transactionTime': time1_result, 'transactionAmount': { 'currencyCode': 'BRL', 'money': { 'microAmount': '123' } }, 'conversionName': conversion_name } }, { 'StoreSalesTransaction': { 'userIdentifiers': [{ 'userIdentifierType': 'HASHED_EMAIL', 'value': '*****@*****.**' }], 'transactionTime': time2_result, 'transactionAmount': { 'currencyCode': 'BRL', 'money': { 'microAmount': '234' } }, 'conversionName': conversion_name } }] uploader._get_ssd_service.return_value.mutate.assert_any_call([{ 'operand': { 'externalUploadId': external_upload_id, 'offlineDataList': upload_data, 'uploadType': 'STORE_SALES_UPLOAD_FIRST_PARTY', 'uploadMetadata': { 'StoreSalesUploadCommonMetadata': { 'xsi_type': 'FirstPartyUploadMetadata', 'loyaltyRate': 1.0, 'transactionUploadRate': 1.0, } } }, 'operator': 'ADD' }])