def test_bigquery_write(mocker, uploader): bq_client = mocker.MagicMock() mocker.patch.object(uploader, "_get_bq_client") uploader._get_bq_client.return_value = bq_client table = mocker.MagicMock() bq_client.get_table.return_value = table now = datetime.datetime.now().timestamp() account_config = AccountConfig("account_id", False, "ga_account_id", "", "") destination = Destination( "dest1", DestinationType.GA_MEASUREMENT_PROTOCOL, ["web_property", "view", "c", "list", "d", "buyers_custom_dim"]) source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"]) execution = Execution(account_config, source, destination) uploader._do_process(Batch(execution, [{"uuid": "uuid-1"}, {"uuid": "uuid-2"}]), now) bq_client.insert_rows.assert_called_once_with( table, [{"uuid": "uuid-1", "timestamp": now}, {"uuid": "uuid-2", "timestamp": now}], (SchemaField("uuid", "string"), SchemaField("timestamp", "timestamp")))
def test_data_source_not_found(mocker, eraser, caplog): service = mocker.MagicMock() mocker.patch.object(eraser, '_get_analytics_service') eraser._get_analytics_service.return_value = service mocker.patch.object(eraser, '_is_table_empty') eraser._is_table_empty.return_value = False service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'wrong_name' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Act try: next(eraser.process(Batch(execution, []))) except StopIteration: pass assert 'data_import_name - data import not found, please configure it in Google Analytics' in caplog.text
def read(self, range_tracker): def document_to_dict(doc): if not doc.exists: return None doc_dict = doc.to_dict() doc_dict['id'] = doc.id return doc_dict firestore_collection = self._setup_firestore_collection.get() logging.getLogger("megalista.FirestoreExecutionSource").info( f"Loading Firestore collection {firestore_collection}...") db = firestore.Client() entries = db.collection(self._setup_firestore_collection.get()).where( 'active', '==', 'yes').stream() entries = [document_to_dict(doc) for doc in entries] account_data = document_to_dict( db.collection(self._setup_firestore_collection.get()).document( 'account_config').get()) if not account_data: raise Exception('Firestore collection is absent') google_ads_id = account_data.get('google_ads_id', 'empty') mcc_trix = account_data.get('mcc_trix', 'FALSE') mcc = False if mcc_trix is None else bool( distutils.util.strtobool(mcc_trix)) app_id = account_data.get('app_id', 'empty') google_analytics_account_id = account_data.get( 'google_analytics_account_id', 'empty') campaign_manager_account_id = account_data.get( 'campaign_manager_account_id', 'empty') account_config = AccountConfig(google_ads_id, mcc, google_analytics_account_id, campaign_manager_account_id, app_id) logging.getLogger("megalista.FirestoreExecutionSource").info( f"Loaded: {account_config}") sources = self._read_sources(entries) destinations = self._read_destination(entries) if entries: for entry in entries: if entry['active'].upper() == 'YES': logging.getLogger( "megalista.FirestoreExecutionSource" ).info( f"Executing step Source:{sources[entry['id'] + '_source'].source_name} -> Destination:{destinations[entry['id'] + '_destination'].destination_name}" ) yield Execution(account_config, sources[entry['id'] + '_source'], destinations[entry['id'] + '_destination']) else: logging.getLogger("megalista.FirestoreExecutionSource").warn( "No schedules found!")
def test_files_deleted(mocker, eraser): service = mocker.MagicMock() mocker.patch.object(eraser, '_get_analytics_service') eraser._get_analytics_service.return_value = service mocker.patch.object(eraser, '_is_table_empty') eraser._is_table_empty.return_value = False service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }, { 'id': 2, 'name': 'data_import_name2' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Add mock to side effect of list uploads service.management().uploads().list().execute.return_value = { 'items': [{ 'id': 'ab' }, { 'id': 'cd' }] } # Add mock to side effect of deleteUploadData delete_call_mock = mocker.MagicMock() service.management().uploads( ).deleteUploadData.side_effect = delete_call_mock # Act next(eraser.process(Batch(execution, []))) # Called once delete_call_mock.assert_called_once() # Intercept args called _, kwargs = delete_call_mock.call_args # Check if really sent values from custom field ids = kwargs['body']
def test_list_creation_not_mcc(mocker, uploader): ads_account_id = 'xxx-yyy-zzzz' ga_account_id = 'acc' service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().remarketingAudience().insert().execute.return_value = { 'id': 1 } execution = Execution( AccountConfig(ads_account_id, False, ga_account_id, '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination( 'dest1', DestinationType.GA_USER_LIST_UPLOAD, ['web_property', 'view', 'c', 'list', 'd', 'buyers_custom_dim'])) uploader.process(Batch(execution, [])) service.management().remarketingAudience().insert.assert_any_call( accountId=ga_account_id, webPropertyId='web_property', body={ 'name': 'list', 'linkedViews': ['view'], 'linkedAdAccounts': [{ 'type': 'ADWORDS_LINKS', 'linkedAccountId': ads_account_id }], 'audienceType': 'SIMPLE', 'audienceDefinition': { 'includeConditions': { 'kind': 'analytics#includeConditions', 'isSmartList': False, 'segment': 'users::condition::%s==buyer' % 'buyers_custom_dim', 'membershipDurationDays': 365 } } })
def test_avoid_list_creation_when_name_blank(mocker, uploader): ads_account_id = 'xxx-yyy-zzzz' ga_account_id = 'acc' service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service execution = Execution( AccountConfig(ads_account_id, True, ga_account_id, '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, ['web_property', 'view', 'c', '', 'd', 'buyers_custom_dim'])) uploader.process(Batch(execution, [])) service.management().remarketingAudience().insert.assert_not_called()
def test_elements_uploading_custom_field(mocker, uploader): service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, [ 'web_property', 'b', 'data_import_name', 'd', 'user_id_custom_dim', 'buyer_custom_dim', 'my_field' ])) # Add mock to side effect of uploadData() my_mock = mocker.MagicMock() service.management().uploads().uploadData.side_effect = my_mock # Act uploader.process(Batch(execution, [{ 'user_id': '12', 'my_field': '11' }, { 'user_id': '34', 'my_field': '22' }])) # Called once my_mock.assert_called_once() # Intercept args called _, kwargs = my_mock.call_args # Check if really sent values from custom field media_bytes = kwargs['media_body'].getbytes(0, -1) assert media_bytes == b'user_id_custom_dim,buyer_custom_dim\n12,11\n34,22'
def test_list_already_exists(mocker, uploader): service = mocker.MagicMock() service.management().remarketingAudience().list().execute = mocker.Mock( return_value={'items': [{ 'id': 1, 'name': 'list' }]}) mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, ['a', 'b', 'c', 'list', 'd', 'e'])) uploader.process(Batch(execution, [])) uploader._get_analytics_service().management().remarketingAudience( ).insert.assert_not_called()
def test_bigquery_write_failure(mocker, uploader, caplog): bq_client = mocker.MagicMock() mocker.patch.object(uploader, "_get_bq_client") uploader._get_bq_client.return_value = bq_client error_message = "This is an error message" bq_client.insert_rows.return_value = [{"errors": error_message}] account_config = AccountConfig("account_id", False, "ga_account_id", "", "") source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"]) destination = Destination( "dest1", DestinationType.GA_MEASUREMENT_PROTOCOL, ["web_property", "view", "c", "list", "d", "buyers_custom_dim"]) execution = Execution(account_config, source, destination) uploader.process(Batch(execution, [{"uuid": "uuid-1"}])) assert error_message in caplog.text
def read(self, range_tracker): sheet_id = self._setup_sheet_id.get() logging.getLogger("megalista.SpreadsheetExecutionSource").info( f"Loading configuration sheet {sheet_id}...") google_ads_id = self._sheets_config.get_value(sheet_id, "GoogleAdsAccountId") mcc_trix = self._sheets_config.get_value(sheet_id, "GoogleAdsMCC") mcc = False if mcc_trix is None else bool( distutils.util.strtobool(mcc_trix)) app_id = self._sheets_config.get_value(sheet_id, "AppId") google_analytics_account_id = self._sheets_config.get_value( sheet_id, "GoogleAnalyticsAccountId") campaign_manager_account_id = self._sheets_config.get_value( sheet_id, "CampaignManagerAccountId") account_config = AccountConfig(google_ads_id, mcc, google_analytics_account_id, campaign_manager_account_id, app_id) logging.getLogger("megalista.SpreadsheetExecutionSource").info( f"Loaded: {account_config}") sources = self._read_sources(self._sheets_config, sheet_id) destinations = self._read_destination(self._sheets_config, sheet_id) schedules_range = self._sheets_config.get_range( sheet_id, 'SchedulesRange') if 'values' in schedules_range: for schedule in schedules_range['values']: if schedule[0] == 'YES': logging.getLogger( "megalista.SpreadsheetExecutionSource" ).info( f"Executing step Source:{sources[schedule[1]].source_name} -> Destination:{destinations[schedule[2]].destination_name}" ) yield Execution(account_config, sources[schedule[1]], destinations[schedule[2]]) else: logging.getLogger("megalista.SpreadsheetExecutionSource").warn( "No schedules found!")
# See the License for the specific language governing permissions and # limitations under the License. from apache_beam.options.value_provider import StaticValueProvider import pytest from uploaders.google_ads.conversions.google_ads_offline_conversions_uploader import GoogleAdsOfflineUploaderDoFn from models.execution import AccountConfig from models.execution import Destination from models.execution import DestinationType from models.execution import Execution from models.execution import Source from models.execution import SourceType from models.execution import Batch from models.oauth_credentials import OAuthCredentials _account_config = AccountConfig('account_id', False, 'ga_account_id', '', '') @pytest.fixture def uploader(mocker): mocker.patch('googleads.oauth2.GoogleRefreshTokenClient') mocker.patch('googleads.adwords.AdWordsClient') credential_id = StaticValueProvider(str, 'id') secret = StaticValueProvider(str, 'secret') access = StaticValueProvider(str, 'access') refresh = StaticValueProvider(str, 'refresh') credentials = OAuthCredentials(credential_id, secret, access, refresh) return GoogleAdsOfflineUploaderDoFn(credentials, StaticValueProvider(str, 'devtoken'))
# limitations under the License. import pytest from apache_beam.options.value_provider import StaticValueProvider from models.oauth_credentials import OAuthCredentials from uploaders.google_analytics.google_analytics_data_import_uploader import GoogleAnalyticsDataImportUploaderDoFn from models.execution import AccountConfig from models.execution import Destination from models.execution import DestinationType from models.execution import Execution from models.execution import Source from models.execution import SourceType from models.execution import Batch _account_config = AccountConfig('1234567890', False, '1234567890', '', '') @pytest.fixture def uploader(mocker): mocker.patch('googleads.oauth2.GoogleRefreshTokenClient') mocker.patch('googleads.adwords.AdWordsClient') client_id = StaticValueProvider(str, 'id') secret = StaticValueProvider(str, 'secret') access = StaticValueProvider(str, 'access') refresh = StaticValueProvider(str, 'refresh') credentials = OAuthCredentials(client_id, secret, access, refresh) return GoogleAnalyticsDataImportUploaderDoFn(credentials) def test_get_service(uploader):
from apache_beam.options.value_provider import StaticValueProvider from uploaders.campaign_manager.campaign_manager_conversion_uploader import CampaignManagerConversionUploaderDoFn from models.execution import AccountConfig from models.execution import Destination from models.execution import DestinationType from models.execution import Execution from models.execution import Source from models.execution import SourceType from models.execution import Batch from models.oauth_credentials import OAuthCredentials import pytest _account_config = AccountConfig(mcc=False, campaign_manager_account_id='dcm_profile_id', google_ads_account_id='', google_analytics_account_id='', app_id='') @pytest.fixture def uploader(mocker): credential_id = StaticValueProvider(str, 'id') secret = StaticValueProvider(str, 'secret') access = StaticValueProvider(str, 'access') refresh = StaticValueProvider(str, 'refresh') credentials = OAuthCredentials(credential_id, secret, access, refresh) return CampaignManagerConversionUploaderDoFn(credentials)