def sync_data(self): table = self.__class__.TABLE endpoints = { 'sent': FuelSDK.ET_SentEvent, 'click': FuelSDK.ET_ClickEvent, 'open': FuelSDK.ET_OpenEvent, 'bounce': FuelSDK.ET_BounceEvent, 'unsub': FuelSDK.ET_UnsubEvent } for event_name, selector in endpoints.items(): search_filter = None start = get_last_record_value_for_table(self.state, event_name) if start is None: start = self.config.get('start_date') if start is None: raise RuntimeError('start_date not defined!') pagination_unit = self.config.get( 'pagination__{}_interval_unit'.format(event_name), 'minutes') pagination_quantity = self.config.get( 'pagination__{}_interval_quantity'.format(event_name), 10) unit = {pagination_unit: int(pagination_quantity)} end = increment_date(start, unit) while before_now(start): LOGGER.info("Fetching {} from {} to {}" .format(event_name, start, end)) search_filter = get_date_page('EventDate', start, unit) stream = request(event_name, selector, self.auth_stub, search_filter) for event in stream: event = self.filter_keys_and_parse(event) self.state = incorporate(self.state, event_name, 'EventDate', event.get('EventDate')) singer.write_records(table, [event]) self.state = incorporate(self.state, event_name, 'EventDate', start) save_state(self.state) start = end end = increment_date(start, unit)
def sync_data(self): table = self.__class__.TABLE selector = FuelSDK.ET_Send search_filter = None # pass config to return start date if not bookmark is found retrieve_all_since = get_last_record_value_for_table( self.state, table, self.config) if retrieve_all_since is not None: search_filter = { 'Property': 'ModifiedDate', 'SimpleOperator': 'greaterThan', 'Value': retrieve_all_since } stream = request('Send', selector, self.auth_stub, search_filter, batch_size=self.batch_size) catalog_copy = copy.deepcopy(self.catalog) for send in stream: send = self.filter_keys_and_parse(send) self.state = incorporate(self.state, table, 'ModifiedDate', send.get('ModifiedDate')) self.write_records_with_transform(send, catalog_copy, table) save_state(self.state)
def sync_data(self): table = self.__class__.TABLE selector = FuelSDK.ET_Email search_filter = None retrieve_all_since = get_last_record_value_for_table(self.state, table) if retrieve_all_since is not None: search_filter = { 'Property': 'ModifiedDate', 'SimpleOperator': 'greaterThan', 'Value': retrieve_all_since } stream = request('Email', selector, self.auth_stub, search_filter) for email in stream: email = self.filter_keys_and_parse(email) self.state = incorporate(self.state, table, 'ModifiedDate', email.get('ModifiedDate')) singer.write_records(table, [email]) save_state(self.state)
def sync_data(self): table = self.__class__.TABLE subscriber_dao = SubscriberDataAccessObject(self.config, self.state, self.auth_stub, self.subscriber_catalog) # pass config to return start date if not bookmark is found start = get_last_record_value_for_table(self.state, table, self.config) pagination_unit = self.config.get( 'pagination__list_subscriber_interval_unit', 'days') pagination_quantity = self.config.get( 'pagination__list_subscriber_interval_quantity', 1) unit = {pagination_unit: int(pagination_quantity)} end = increment_date(start, unit) all_subscribers_list = self._get_all_subscribers_list() while before_now(start): stream = request('ListSubscriber', FuelSDK.ET_List_Subscriber, self.auth_stub, _get_list_subscriber_filter( all_subscribers_list, start, unit), batch_size=self.batch_size) batch_size = 100 if self.replicate_subscriber: subscriber_dao.write_schema() catalog_copy = copy.deepcopy(self.catalog) for list_subscribers_batch in partition_all(stream, batch_size): for list_subscriber in list_subscribers_batch: list_subscriber = self.filter_keys_and_parse( list_subscriber) if list_subscriber.get('ModifiedDate'): self.state = incorporate( self.state, table, 'ModifiedDate', list_subscriber.get('ModifiedDate')) self.write_records_with_transform(list_subscriber, catalog_copy, table) if self.replicate_subscriber: # make the list of subscriber keys subscriber_keys = list( map(_get_subscriber_key, list_subscribers_batch)) # pass the list of 'subscriber_keys' to fetch subscriber details subscriber_dao.pull_subscribers_batch(subscriber_keys) save_state(self.state) start = end end = increment_date(start, unit)
def sync_data(self): table = self.__class__.TABLE selector = FuelSDK.ET_List search_filter = None retrieve_all_since = get_last_record_value_for_table( self.state, table, self.config.get('start_date')) if retrieve_all_since is not None: search_filter = { 'Property': 'ModifiedDate', 'SimpleOperator': 'greaterThan', 'Value': retrieve_all_since } stream = request('List', selector, self.auth_stub, search_filter) for _list in stream: _list = self.filter_keys_and_parse(_list) self.state = incorporate(self.state, table, 'ModifiedDate', _list.get('ModifiedDate')) singer.write_records(table, [_list]) save_state(self.state)
def sync_data(self): table = self.__class__.TABLE selector = FuelSDK.ET_ContentArea search_filter = None retrieve_all_since = get_last_record_value_for_table(self.state, table) if retrieve_all_since is not None: search_filter = { 'Property': 'ModifiedDate', 'SimpleOperator': 'greaterThan', 'Value': retrieve_all_since } stream = request('ContentAreaDataAccessObject', selector, self.auth_stub, search_filter) for content_area in stream: content_area = self.filter_keys_and_parse(content_area) self.state = incorporate(self.state, table, 'ModifiedDate', content_area.get('ModifiedDate')) singer.write_records(table, [content_area]) save_state(self.state)
def sync_data(self): table = self.__class__.TABLE subscriber_dao = SubscriberDataAccessObject(self.config, self.state, self.auth_stub, self.subscriber_catalog) start = get_last_record_value_for_table(self.state, table, self.config.get('start_date')) if start is None: start = self.config.get('start_date') pagination_unit = self.config.get( 'pagination__list_subscriber_interval_unit', 'days') pagination_quantity = self.config.get( 'pagination__list_subsctiber_interval_quantity', 1) unit = {pagination_unit: int(pagination_quantity)} end = increment_date(start, unit) all_subscribers_list = self._get_all_subscribers_list() while before_now(start): stream = request( 'ListSubscriber', FuelSDK.ET_List_Subscriber, self.auth_stub, _get_list_subscriber_filter(all_subscribers_list, start, unit)) batch_size = 100 if self.replicate_subscriber: subscriber_dao.write_schema() for list_subscribers_batch in partition_all(stream, batch_size): for list_subscriber in list_subscribers_batch: list_subscriber = self.filter_keys_and_parse( list_subscriber) if list_subscriber.get('ModifiedDate'): self.state = incorporate( self.state, table, 'ModifiedDate', list_subscriber.get('ModifiedDate')) list_subscriber = self.remove_sensitive_data( list_subscriber) singer.write_records(table, [list_subscriber]) if self.replicate_subscriber: subscriber_keys = list( map(_get_subscriber_key, list_subscribers_batch)) subscriber_dao.pull_subscribers_batch(subscriber_keys) save_state(self.state) start = end end = increment_date(start, unit)
def sync_data(self): table = self.__class__.TABLE search_filter = None start = get_last_record_value_for_table(self.state, self.event_name, self.config.get('start_date')) if start is None: start = self.config.get('start_date') if start is None: raise RuntimeError('start_date not defined!') pagination_unit = self.config.get( 'pagination__{}_interval_unit'.format(self.event_name), 'minutes') pagination_quantity = self.config.get( 'pagination__{}_interval_quantity'.format(self.event_name), 10) unit = {pagination_unit: int(pagination_quantity)} end = increment_date(start, unit) while before_now(start): LOGGER.info("Fetching {} from {} to {}".format( self.event_name, start, end)) search_filter = get_date_page('EventDate', start, unit) stream = request(self.event_name, self.selector, self.auth_stub, search_filter) for event in stream: event = self.filter_keys_and_parse(event) self.state = incorporate(self.state, self.event_name, 'EventDate', event.get('EventDate')) if event.get('SubscriberKey') is None: LOGGER.info( "SubscriberKey is NULL so ignoring {} record with SendID: {} and EventDate: {}" .format(self.event_name, event.get('SendID'), event.get('EventDate'))) continue event = self.remove_sensitive_data(event) singer.write_records(table, [event]) self.state = incorporate(self.state, self.event_name, 'EventDate', start) save_state(self.state) start = end end = increment_date(start, unit)
def sync_data(self): table = self.__class__.TABLE selector = FuelSDK.ET_Send list_sends_dao = ListSendDataAccessObject(self.config, self.state, self.auth_stub, self.listsend_catalog) link_sends_dao = LinkSendDataAccessObject(self.config, self.state, self.auth_stub, self.linksend_catalog) search_filter = None retrieve_all_since = get_last_record_value_for_table( self.state, table, self.config.get('start_date')) if self.REPLICATION_METHOD == "FULL_TABLE": retrieve_all_since = datetime.datetime.strptime( self.config.get('start_date'), "%Y-%m-%dT%H:%M:%SZ") if retrieve_all_since is not None: search_filter = { 'Property': 'CreatedDate', 'SimpleOperator': 'greaterThan', 'Value': retrieve_all_since } stream = request('Send', selector, self.auth_stub, search_filter) if self.replicate_listsend: list_sends_dao.write_schema() if self.replicate_linksend: link_sends_dao.write_schema() for send in stream: send = self.filter_keys_and_parse(send) if self.replicate_listsend: list_sends_dao.sync_data_by_sendID(send.get('ID')) if self.replicate_linksend: link_sends_dao.sync_data_by_sendID(send.get('ID')) if retrieve_all_since.strftime('%Y-%m-%d') < send.get( 'CreatedDate' )[: 10] and self.REPLICATION_METHOD == 'INCREMENTAL' or self.REPLICATION_METHOD == 'FULL_TABLE': self.state = incorporate(self.state, table, 'CreatedDate', send.get('CreatedDate')) singer.write_records(table, [send]) save_state(self.state)
def _replicate(self, customer_key, keys, parent_category_id, table, partial=False, start=None, end=None, unit=None, replication_key=None): if partial: LOGGER.info("Fetching {} from {} to {}" .format(table, start, end)) cursor = FuelSDK.ET_DataExtension_Row() cursor.auth_stub = self.auth_stub cursor.CustomerKey = customer_key cursor.props = keys if partial: cursor.search_filter = get_date_page(replication_key, start, unit) batch_size = int(self.config.get('batch_size', 2500)) result = request_from_cursor('DataExtensionObject', cursor, batch_size=batch_size) for row in result: row = self.filter_keys_and_parse(row) row['CategoryID'] = parent_category_id self.state = incorporate(self.state, table, replication_key, row.get(replication_key)) singer.write_records(table, [row]) if partial: self.state = incorporate(self.state, table, replication_key, start) save_state(self.state)
def do_sync(args): LOGGER.info("Starting sync.") config = load_config(args.config) state = load_state(args.state) catalog = load_catalog(args.properties) auth_stub = get_auth_stub(config) stream_accessors = [] subscriber_selected = False subscriber_catalog = None list_subscriber_selected = False for stream_catalog in catalog.get('streams'): stream_accessor = None if not _is_selected(stream_catalog.get('schema', {})): LOGGER.info("'{}' is not marked selected, skipping.".format( stream_catalog.get('stream'))) continue if SubscriberDataAccessObject.matches_catalog(stream_catalog): subscriber_selected = True subscriber_catalog = stream_catalog LOGGER.info("'subscriber' selected, will replicate via " "'list_subscriber'") continue if ListSubscriberDataAccessObject.matches_catalog(stream_catalog): list_subscriber_selected = True for available_stream_accessor in AVAILABLE_STREAM_ACCESSORS: if available_stream_accessor.matches_catalog(stream_catalog): stream_accessors.append( available_stream_accessor(config, state, auth_stub, stream_catalog)) break if subscriber_selected and not list_subscriber_selected: LOGGER.fatal('Cannot replicate `subscriber` without ' '`list_subscriber`. Please select `list_subscriber` ' 'and try again.') exit(1) for stream_accessor in stream_accessors: if isinstance(stream_accessor, ListSubscriberDataAccessObject) and \ subscriber_selected: stream_accessor.replicate_subscriber = True stream_accessor.subscriber_catalog = subscriber_catalog try: stream_accessor.state = state stream_accessor.sync() state = stream_accessor.state except Exception: LOGGER.error('Failed to sync endpoint, moving on!') save_state(state)
def sync_data(self): table = self.__class__.TABLE endpoints = { 'sent': FuelSDK.ET_SentEvent, 'click': FuelSDK.ET_ClickEvent, 'open': FuelSDK.ET_OpenEvent, 'bounce': FuelSDK.ET_BounceEvent, 'unsub': FuelSDK.ET_UnsubEvent } for event_name, selector in endpoints.items(): search_filter = None # pass config to return start date if not bookmark is found start = get_last_record_value_for_table(self.state, event_name, self.config) if start is None: raise RuntimeError('start_date not defined!') pagination_unit = self.config.get( 'pagination__{}_interval_unit'.format(event_name), 'minutes') pagination_quantity = self.config.get( 'pagination__{}_interval_quantity'.format(event_name), 10) unit = {pagination_unit: int(pagination_quantity)} end = increment_date(start, unit) while before_now(start): LOGGER.info("Fetching {} from {} to {}" .format(event_name, start, end)) search_filter = get_date_page('EventDate', start, unit) stream = request(event_name, selector, self.auth_stub, search_filter, batch_size=self.batch_size) catalog_copy = copy.deepcopy(self.catalog) for event in stream: event = self.filter_keys_and_parse(event) self.state = incorporate(self.state, event_name, 'EventDate', event.get('EventDate')) if event.get('SubscriberKey') is None: LOGGER.info("SubscriberKey is NULL so ignoring {} record with SendID: {} and EventDate: {}" .format(event_name, event.get('SendID'), event.get('EventDate'))) continue self.write_records_with_transform(event, catalog_copy, table) self.state = incorporate(self.state, event_name, 'EventDate', start) save_state(self.state) start = end end = increment_date(start, unit)
def sync_data(self): tap_stream_id = self.catalog.get('tap_stream_id') table = self.catalog.get('stream') (_, customer_key) = tap_stream_id.split('.', 1) keys = self.get_catalog_keys() keys.remove('CategoryID') replication_key = None start = get_last_record_value_for_table(self.state, table) if start is None: start = self.config.get('start_date') for key in ['ModifiedDate', 'JoinDate']: if key in keys: replication_key = key pagination_unit = self.config.get( 'pagination__data_extension_interval_unit', 'days') pagination_quantity = self.config.get( 'pagination__data_extension_interval_quantity', 7) unit = {pagination_unit: int(pagination_quantity)} end = increment_date(start, unit) parent_result = None parent_extension = None parent_result = request('DataExtension', FuelSDK.ET_DataExtension, self.auth_stub, search_filter={ 'Property': 'CustomerKey', 'SimpleOperator': 'equals', 'Value': customer_key, }, props=['CustomerKey', 'CategoryID']) parent_extension = next(parent_result) parent_category_id = parent_extension.CategoryID while before_now(start) or replication_key is None: self._replicate(customer_key, keys, parent_category_id, table, partial=(replication_key is not None), start=start, end=end, unit=unit, replication_key=replication_key) if replication_key is None: return self.state = incorporate(self.state, table, replication_key, start) save_state(self.state) start = end end = increment_date(start, unit)
def do_sync(args): LOGGER.info("Starting sync.") config = args.config state = args.state catalog = args.properties success = True auth_stub = get_auth_stub(config) stream_accessors = [] subscriber_selected = False subscriber_catalog = None list_subscriber_selected = False for stream_catalog in catalog.get('streams'): stream_accessor = None if not _is_selected(stream_catalog): LOGGER.info("'{}' is not marked selected, skipping." .format(stream_catalog.get('stream'))) continue # for 'subscriber' stream if it is selected, add values for 'subscriber_catalog' and # 'subscriber_selected', and it will replicated via 'list_subscribers' stream # The 'subscribers' stream is the child stream of 'list_subscribers' # When we sync 'list_subscribers', it makes the list of subscriber's # 'SubscriberKey' that were returned as part of 'list_subscribers' records # and pass that list to 'subscribers' stream and thus 'subscribers' stream # will only sync records of subscribers that are present in the list. # Hence, for different start dates the 'SubscriberKey' list will differ and # thus 'subscribers' records will also be different for different start dates. if SubscriberDataAccessObject.matches_catalog(stream_catalog): subscriber_selected = True subscriber_catalog = stream_catalog LOGGER.info("'subscriber' selected, will replicate via " "'list_subscriber'") continue if ListSubscriberDataAccessObject.matches_catalog(stream_catalog): list_subscriber_selected = True for available_stream_accessor in AVAILABLE_STREAM_ACCESSORS: if available_stream_accessor.matches_catalog(stream_catalog): stream_accessors.append(available_stream_accessor( config, state, auth_stub, stream_catalog)) break # do not replicate 'subscriber' stream without selecting 'list_subscriber' stream if subscriber_selected and not list_subscriber_selected: LOGGER.fatal('Cannot replicate `subscriber` without ' '`list_subscriber`. Please select `list_subscriber` ' 'and try again.') sys.exit(1) for stream_accessor in stream_accessors: if isinstance(stream_accessor, ListSubscriberDataAccessObject) and \ subscriber_selected: stream_accessor.replicate_subscriber = True stream_accessor.subscriber_catalog = subscriber_catalog try: stream_accessor.state = state stream_accessor.sync() state = stream_accessor.state except Exception as e: LOGGER.exception(e) LOGGER.error('Failed to sync endpoint, moving on!') success = False save_state(state) return success