def process(self, element, executions: Iterable[Execution]): execution = next((execution for execution in executions if str(hash(execution)) == element[0])) batch: List[Any] = [] for i, element in enumerate(element[1]): if i != 0 and i % self._batch_size == 0: yield Batch(execution, batch) batch = [] batch.append(element) yield Batch(execution, batch)
def process(self, batch: Batch, **kwargs): success_elements: List[Any] = [] start_datetime = datetime.now() execution = batch.execution self.app_id = execution.destination.destination_metadata[0] #send all requests asyncronously loop = asyncio.new_event_loop() future = asyncio.ensure_future(self._async_request_runner( batch.elements, success_elements), loop=loop) responses = loop.run_until_complete(future) #wait to avoid api trotle delta_sec = (datetime.now() - start_datetime).total_seconds() min_duration_sec = len( batch.elements) / 500 #Using Rate limitation = 500 per sec if delta_sec < min_duration_sec: time.sleep(min_duration_sec - delta_sec) logging.getLogger("megalista.AppsFlyerS2SUploader").info( f"Successfully uploaded {len(success_elements)}/{len(batch.elements)} events." ) yield Batch(execution, success_elements)
def hash_users(self, batch: Batch): should_hash_fields = self._get_should_hash_fields( batch.execution.destination.destination_metadata) self.logger.debug('Should hash fields is %s' % should_hash_fields) return Batch(batch.execution, [self._hash_user(element, FieldHasher(should_hash_fields)) for element in batch.elements])
def test_bigquery_write(mocker, uploader): bq_client = mocker.MagicMock() mocker.patch.object(uploader, "_get_bq_client") uploader._get_bq_client.return_value = bq_client table = mocker.MagicMock() bq_client.get_table.return_value = table now = datetime.datetime.now().timestamp() account_config = AccountConfig("account_id", False, "ga_account_id", "", "") destination = Destination( "dest1", DestinationType.GA_MEASUREMENT_PROTOCOL, ["web_property", "view", "c", "list", "d", "buyers_custom_dim"]) source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"]) execution = Execution(account_config, source, destination) uploader._do_process(Batch(execution, [{"uuid": "uuid-1"}, {"uuid": "uuid-2"}]), now) bq_client.insert_rows.assert_called_once_with( table, [{"uuid": "uuid-1", "timestamp": now}, {"uuid": "uuid-2", "timestamp": now}], (SchemaField("uuid", "string"), SchemaField("timestamp", "timestamp")))
def test_data_source_not_found(mocker, eraser, caplog): service = mocker.MagicMock() mocker.patch.object(eraser, '_get_analytics_service') eraser._get_analytics_service.return_value = service mocker.patch.object(eraser, '_is_table_empty') eraser._is_table_empty.return_value = False service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'wrong_name' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Act try: next(eraser.process(Batch(execution, []))) except StopIteration: pass assert 'data_import_name - data import not found, please configure it in Google Analytics' in caplog.text
def test_conversion_upload_match_id(mocker, uploader): mocker.patch.object(uploader, '_get_dcm_service') floodlight_activity_id = 'floodlight_activity_id' floodlight_configuration_id = 'floodlight_configuration_id' source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination( 'dest1', DestinationType.CM_OFFLINE_CONVERSION, (floodlight_activity_id, floodlight_configuration_id)) execution = Execution(_account_config, source, destination) current_time = time.time() mocker.patch.object(time, 'time') time.time.return_value = current_time uploader._do_process(Batch(execution, [{'matchId': 'abc'}]), current_time) expected_body = { 'conversions': [{ 'matchId': 'abc', 'floodlightActivityId': floodlight_activity_id, 'floodlightConfigurationId': floodlight_configuration_id, 'ordinal': math.floor(current_time * 10e5), 'timestampMicros': math.floor(current_time * 10e5) }], } uploader._get_dcm_service().conversions().batchinsert.assert_any_call( profileId='dcm_profile_id', body=expected_body)
def test_error_on_api_call(mocker, uploader, caplog): caplog.set_level(logging.INFO, 'megalista.CampaignManagerConversionsUploader') mocker.patch.object(uploader, '_get_dcm_service') service = mocker.MagicMock() uploader._get_dcm_service.return_value = service service.conversions().batchinsert().execute.return_value = { 'hasFailures': True, 'status': [{ 'errors': [{ 'code': '123', 'message': 'error_returned' }] }] } source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination('dest1', DestinationType.CM_OFFLINE_CONVERSION, ['a', 'b']) execution = Execution(_account_config, source, destination) uploader._do_process(Batch(execution, [{'gclid': '123'}]), time.time()) assert 'Error(s) inserting conversions:' in caplog.text assert '[123]: error_returned' in caplog.text
def test_fail_missing_destination_metadata(uploader, mocker): mocker.patch.object(uploader, '_get_ssd_service') source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination('dest1', DestinationType.ADS_SSD_UPLOAD, ['1']) execution = Execution(_account_config, source, destination) batch = Batch(execution, []) uploader.process(batch) uploader._get_ssd_service.assert_not_called()
def test_not_active(mocker, caplog): credential_id = StaticValueProvider(str, 'id') secret = StaticValueProvider(str, 'secret') access = StaticValueProvider(str, 'access') refresh = StaticValueProvider(str, 'refresh') credentials = OAuthCredentials(credential_id, secret, access, refresh) uploader_dofn = GoogleAdsOfflineUploaderDoFn(credentials, None) mocker.patch.object(uploader_dofn, '_get_oc_service') uploader_dofn.process(Batch(None, [])) uploader_dofn._get_oc_service.assert_not_called() assert 'Skipping upload, parameters not configured.' in caplog.text
def test_exception_no_event_nor_user_property(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'False', 'False', '', 'some_id', '']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) with pytest.raises( ValueError, match='GA4 MP should be called either for sending events'): next(uploader.process(Batch(execution, [])))
def test_elements_uploading(mocker, uploader): service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }] } execution = Execution( _account_config, Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Add mock to side effect of uploadData() my_mock = mocker.MagicMock() service.management().uploads().uploadData.side_effect = my_mock # Act uploader.process( Batch(execution, [{ 'user_id': '12', 'cd1': 'value1a', 'cd2': 'value2a' }, { 'user_id': '34', 'cd1': 'value1b', 'cd2': 'value2b' }, { 'user_id': '56', 'cd1': None, 'cd2': '' }])) # Called once my_mock.assert_called_once() # Intercept args called _, kwargs = my_mock.call_args # Check if really sent values from custom field media_bytes = kwargs['media_body'].getbytes(0, -1) print(media_bytes) assert media_bytes == b'ga:user_id,ga:cd1,ga:cd2\n' \ b'12,value1a,value2a\n' \ b'34,value1b,value2b\n' \ b'56,,'
def test_pii_hashing(mocker): users = [{ "email": "*****@*****.**", "mailing_address_first_name": "John", "mailing_address_last_name": "Doe", "mailing_address_zip": "12345", "mailing_address_country": "US" }, { "email": "*****@*****.**", "mailing_address_first_name": "Jane", "mailing_address_last_name": "Doe", "mailing_address_zip": "12345", "mailing_address_country": "US" }] # Execution mock execution = mocker.MagicMock() execution.destination.destination_metadata = ['Audience', 'ADD'] batch = Batch(execution, [users[0], users[1]]) # Call hasher = AdsUserListPIIHashingMapper() hashed = hasher.hash_users(batch).elements assert len(hashed) == 2 assert hashed[0] == { 'hashedEmail': 'd709f370e52b57b4eb75f04e2b3422c4d41a05148cad8f81776d94a048fb70af', 'addressInfo': { 'countryCode': 'US', 'hashedFirstName': '96d9632f363564cc3032521409cf22a852f2032eec099ed5967c0d000cec607a', 'hashedLastName': '799ef92a11af918e3fb741df42934f3b568ed2d93ac1df74f1b8d41a27932a6f', 'zipCode': '12345' } } assert hashed[1] == { 'hashedEmail': '7c815580ad3844bcb627c74d24eaf700e1a711d9c23e9beb62ab8d28e8cb7954', 'addressInfo': { 'countryCode': 'US', 'hashedFirstName': '81f8f6dde88365f3928796ec7aa53f72820b06db8664f5fe76a7eb13e24546a2', 'hashedLastName': '799ef92a11af918e3fb741df42934f3b568ed2d93ac1df74f1b8d41a27932a6f', 'zipCode': '12345' } }
def test_files_deleted(mocker, eraser): service = mocker.MagicMock() mocker.patch.object(eraser, '_get_analytics_service') eraser._get_analytics_service.return_value = service mocker.patch.object(eraser, '_is_table_empty') eraser._is_table_empty.return_value = False service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }, { 'id': 2, 'name': 'data_import_name2' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_DATA_IMPORT, ['web_property', 'data_import_name'])) # Add mock to side effect of list uploads service.management().uploads().list().execute.return_value = { 'items': [{ 'id': 'ab' }, { 'id': 'cd' }] } # Add mock to side effect of deleteUploadData delete_call_mock = mocker.MagicMock() service.management().uploads( ).deleteUploadData.side_effect = delete_call_mock # Act next(eraser.process(Batch(execution, []))) # Called once delete_call_mock.assert_called_once() # Intercept args called _, kwargs = delete_call_mock.call_args # Check if really sent values from custom field ids = kwargs['body']
def test_exception_app_and_web(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'False', 'True', '', 'some_app_id', 'some_web_id']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) with pytest.raises( ValueError, match='GA4 MP should be called either with a firebase_app_id'): next(uploader.process(Batch(execution, [{ 'name': 'event_name', }])))
def test_avoid_pii_hashing(mocker): users = [{ "email": "*****@*****.**", "mailing_address_first_name": "John", "mailing_address_last_name": "Doe", "mailing_address_zip": "12345", "mailing_address_country": "US" }, { "email": "*****@*****.**", "mailing_address_first_name": "Jane", "mailing_address_last_name": "Doe", "mailing_address_zip": "12345", "mailing_address_country": "US" }] # Mock the execution execution = mocker.MagicMock() execution.destination.destination_metadata = ['Audience', 'ADD', 'False'] batch = Batch(execution, [users[0], users[1]]) # Call hasher = AdsUserListPIIHashingMapper() hashed = hasher.hash_users(batch).elements assert len(hashed) == 2 assert hashed[0] == { 'hashedEmail': '*****@*****.**', 'addressInfo': { 'countryCode': 'US', 'hashedFirstName': 'John', 'hashedLastName': 'Doe', 'zipCode': '12345' } } assert hashed[1] == { 'hashedEmail': '*****@*****.**', 'addressInfo': { 'countryCode': 'US', 'hashedFirstName': 'Jane', 'hashedLastName': 'Doe', 'zipCode': '12345' } }
def test_list_creation_not_mcc(mocker, uploader): ads_account_id = 'xxx-yyy-zzzz' ga_account_id = 'acc' service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().remarketingAudience().insert().execute.return_value = { 'id': 1 } execution = Execution( AccountConfig(ads_account_id, False, ga_account_id, '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination( 'dest1', DestinationType.GA_USER_LIST_UPLOAD, ['web_property', 'view', 'c', 'list', 'd', 'buyers_custom_dim'])) uploader.process(Batch(execution, [])) service.management().remarketingAudience().insert.assert_any_call( accountId=ga_account_id, webPropertyId='web_property', body={ 'name': 'list', 'linkedViews': ['view'], 'linkedAdAccounts': [{ 'type': 'ADWORDS_LINKS', 'linkedAccountId': ads_account_id }], 'audienceType': 'SIMPLE', 'audienceDefinition': { 'includeConditions': { 'kind': 'analytics#includeConditions', 'isSmartList': False, 'segment': 'users::condition::%s==buyer' % 'buyers_custom_dim', 'membershipDurationDays': 365 } } })
def test_succesful_web_user_property_call_with_user_id(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'False', 'True', '', '', 'some_id']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) next( uploader.process( Batch(execution, [{ 'user_ltv': '42', 'user_id': 'Id42', 'client_id': 'someId' }]))) assert m.call_count == 1 assert m.last_request.json()['user_id'] == 'Id42'
def test_avoid_list_creation_when_name_blank(mocker, uploader): ads_account_id = 'xxx-yyy-zzzz' ga_account_id = 'acc' service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service execution = Execution( AccountConfig(ads_account_id, True, ga_account_id, '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, ['web_property', 'view', 'c', '', 'd', 'buyers_custom_dim'])) uploader.process(Batch(execution, [])) service.management().remarketingAudience().insert.assert_not_called()
def test_elements_uploading_custom_field(mocker, uploader): service = mocker.MagicMock() mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service service.management().customDataSources().list().execute.return_value = { 'items': [{ 'id': 1, 'name': 'data_import_name' }] } execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, [ 'web_property', 'b', 'data_import_name', 'd', 'user_id_custom_dim', 'buyer_custom_dim', 'my_field' ])) # Add mock to side effect of uploadData() my_mock = mocker.MagicMock() service.management().uploads().uploadData.side_effect = my_mock # Act uploader.process(Batch(execution, [{ 'user_id': '12', 'my_field': '11' }, { 'user_id': '34', 'my_field': '22' }])) # Called once my_mock.assert_called_once() # Intercept args called _, kwargs = my_mock.call_args # Check if really sent values from custom field media_bytes = kwargs['media_body'].getbytes(0, -1) assert media_bytes == b'user_id_custom_dim,buyer_custom_dim\n12,11\n34,22'
def test_conversion_upload(mocker, uploader): mocker.patch.object(uploader, '_get_oc_service') conversion_name = 'user_list' destination = Destination('dest1', DestinationType.ADS_OFFLINE_CONVERSION, ['user_list']) source = Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']) execution = Execution(_account_config, source, destination) time1 = '2020-04-09T14:13:55.0005' time1_result = '20200409 141355 America/Sao_Paulo' time2 = '2020-04-09T13:13:55.0005' time2_result = '20200409 131355 America/Sao_Paulo' batch = Batch(execution, [{ 'time': time1, 'amount': '123', 'gclid': '456' }, { 'time': time2, 'amount': '234', 'gclid': '567' }]) uploader.process(batch) uploader._get_oc_service.return_value.mutate.assert_any_call([{ 'operator': 'ADD', 'operand': { 'conversionName': conversion_name, 'conversionTime': time1_result, 'conversionValue': '123', 'googleClickId': '456' } }, { 'operator': 'ADD', 'operand': { 'conversionName': conversion_name, 'conversionTime': time2_result, 'conversionValue': '234', 'googleClickId': '567' } }])
def test_succesful_app_event_call(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'True', 'False', '', 'some_id', '']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) next( uploader.process( Batch(execution, [{ 'app_instance_id': '123', 'name': 'event_name', 'value': '42', 'important_event': 'False' }]))) assert m.call_count == 1 assert m.last_request.json()['events'][0]['params']['value'] == '42'
def test_bigquery_write_failure(mocker, uploader, caplog): bq_client = mocker.MagicMock() mocker.patch.object(uploader, "_get_bq_client") uploader._get_bq_client.return_value = bq_client error_message = "This is an error message" bq_client.insert_rows.return_value = [{"errors": error_message}] account_config = AccountConfig("account_id", False, "ga_account_id", "", "") source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"]) destination = Destination( "dest1", DestinationType.GA_MEASUREMENT_PROTOCOL, ["web_property", "view", "c", "list", "d", "buyers_custom_dim"]) execution = Execution(account_config, source, destination) uploader.process(Batch(execution, [{"uuid": "uuid-1"}])) assert error_message in caplog.text
def test_list_already_exists(mocker, uploader): service = mocker.MagicMock() service.management().remarketingAudience().list().execute = mocker.Mock( return_value={'items': [{ 'id': 1, 'name': 'list' }]}) mocker.patch.object(uploader, '_get_analytics_service') uploader._get_analytics_service.return_value = service execution = Execution( AccountConfig('', False, '', '', ''), Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']), Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, ['a', 'b', 'c', 'list', 'd', 'e'])) uploader.process(Batch(execution, [])) uploader._get_analytics_service().management().remarketingAudience( ).insert.assert_not_called()
def test_exception_app_event_without_app_instance_id(uploader, caplog): with requests_mock.Mocker() as m: m.post(requests_mock.ANY, status_code=204) destination = Destination( 'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL, ['api_secret', 'True', 'False', '', 'some_id', '']) source = Source('orig1', SourceType.BIG_QUERY, []) execution = Execution(_account_config, source, destination) with pytest.raises( ValueError, match= 'GA4 MP needs an app_instance_id parameter when used for an App Stream.' ): next( uploader.process( Batch(execution, [{ 'client_id': '123', 'name': 'event_name', 'value': '42', 'important_event': 'False' }])))
def process(self, batch: Batch, **kwargs): execution = batch.execution api_secret = execution.destination.destination_metadata[0] is_event = self._str2bool( execution.destination.destination_metadata[1]) is_user_property = self._str2bool( execution.destination.destination_metadata[2]) non_personalized_ads = self._str2bool( execution.destination.destination_metadata[3]) firebase_app_id = None if len(execution.destination.destination_metadata) >= 5: firebase_app_id = execution.destination.destination_metadata[4] measurement_id = None if len(execution.destination.destination_metadata) >= 6: measurement_id = execution.destination.destination_metadata[5] if not self._exactly_one_of(firebase_app_id, measurement_id): raise ValueError( 'GA4 MP should be called either with a firebase_app_id (for apps) or a measurement_id (for web)' ) if not self._exactly_one_of(is_event, is_user_property): raise ValueError( 'GA4 MP should be called either for sending events or a user properties' ) payload: Dict[str, Any] = {'nonPersonalizedAds': non_personalized_ads} accepted_elements = [] for row in batch.elements: app_instance_id = row.get('app_instance_id') client_id = row.get('client_id') user_id = row.get('user_id') if not self._exactly_one_of(app_instance_id, client_id): raise ValueError( 'GA4 MP should be called either with an app_instance_id (for apps) or a client_id (for web)' ) if is_event: params = { k: v for k, v in row.items() if k not in ('name', 'app_instance_id', 'client_id', 'uuid', 'user_id') } payload['events'] = [{'name': row['name'], 'params': params}] if is_user_property: payload['userProperties'] = { k: { 'value': v } for k, v in row.items() if k not in ('app_instance_id', 'client_id', 'uuid', 'user_id') } payload['events'] = { 'name': 'user_property_addition_event', 'params': {} } url_container = [f'{self.API_URL}?api_secret={api_secret}'] if firebase_app_id: url_container.append(f'&firebase_app_id={firebase_app_id}') if not app_instance_id: raise ValueError( 'GA4 MP needs an app_instance_id parameter when used for an App Stream.' ) payload['app_instance_id'] = app_instance_id if measurement_id: url_container.append(f'&measurement_id={measurement_id}') if not client_id: raise ValueError( 'GA4 MP needs a client_id parameter when used for a Web Stream.' ) payload['client_id'] = client_id if user_id: payload['user_id'] = user_id url = ''.join(url_container) response = requests.post(url, data=json.dumps(payload)) if response.status_code != 204: logging.getLogger( 'megalista.GoogleAnalytics4MeasurementProtocolUploader' ).error( f'Error calling GA4 MP {response.status_code}: {response.raw}' ) else: accepted_elements.append(row) logging.getLogger( 'megalista.GoogleAnalytics4MeasurementProtocolUploader' ).info( f'Successfully uploaded {len(accepted_elements)}/{len(batch.elements)} events.' ) yield Batch(execution, accepted_elements)
def map_batch(self, batch: Batch): return Batch(batch.execution, self._map_conversions(batch.elements))
def test_conversion_upload(mocker, uploader): mocker.patch.object(uploader, '_get_ssd_service') conversion_name = 'ssd_conversion' external_upload_id = '123' source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers')) destination = Destination('dest1', DestinationType.ADS_SSD_UPLOAD, [conversion_name, external_upload_id]) execution = Execution(_account_config, source, destination) time1 = '2020-04-09T14:13:55.0005' time1_result = '20200409 141355 America/Sao_Paulo' time2 = '2020-04-09T13:13:55.0005' time2_result = '20200409 131355 America/Sao_Paulo' batch = Batch(execution, [{ 'hashedEmail': '*****@*****.**', 'time': time1, 'amount': '123' }, { 'hashedEmail': '*****@*****.**', 'time': time2, 'amount': '234' }]) uploader.process(batch) upload_data = [{ 'StoreSalesTransaction': { 'userIdentifiers': [{ 'userIdentifierType': 'HASHED_EMAIL', 'value': '*****@*****.**' }], 'transactionTime': time1_result, 'transactionAmount': { 'currencyCode': 'BRL', 'money': { 'microAmount': '123' } }, 'conversionName': conversion_name } }, { 'StoreSalesTransaction': { 'userIdentifiers': [{ 'userIdentifierType': 'HASHED_EMAIL', 'value': '*****@*****.**' }], 'transactionTime': time2_result, 'transactionAmount': { 'currencyCode': 'BRL', 'money': { 'microAmount': '234' } }, 'conversionName': conversion_name } }] uploader._get_ssd_service.return_value.mutate.assert_any_call([{ 'operand': { 'externalUploadId': external_upload_id, 'offlineDataList': upload_data, 'uploadType': 'STORE_SALES_UPLOAD_FIRST_PARTY', 'uploadMetadata': { 'StoreSalesUploadCommonMetadata': { 'xsi_type': 'FirstPartyUploadMetadata', 'loyaltyRate': 1.0, 'transactionUploadRate': 1.0, } } }, 'operator': 'ADD' }])