def execute_feed(feed, dao, print_field, msg='Processing'): """Executes a specific feed. Args: feed: Feed object representing the Bulkdozer feed to process. dao: The data access object to be used to interact with the CM API and update, must match the entity being updated in CM, in the sense that the required fields to fetch, create, and update the entity in CM must be included in the feed. print_field: Field that identifies the item, used to print status messages to the Log tab of the Bulkdozer feed. msg: Prefix message to use when writing to the Log tab of the Bulkdozer feed, for instance we display Processing Campaign for campaign, and Uploading Asset for assets. """ global clean_run try: dao.pre_fetch(feed.feed) for feed_item in feed.feed: try: value = str(feed_item[print_field]) print '%s %s' % (msg, value.encode('utf-8')) logger.log('%s %s' % (msg, value.encode('utf-8'))) dao.process(feed_item) except Exception as error: clean_run = False stack = traceback.format_exc() print stack logger.log(str(error)) finally: feed.update()
def _retry(self, job, retries=6, wait=2): """Handles required logic to ensure robust interactions with the CM API. Analyzes errors to determine if retries are appropriate, performs retries, and exponential backoff. Args: job: The API function to execute. retries: Optional, defaults to 10. The number of retries before failing. wait: Optional, defaults to 30. The number of seconds to wait between retries. This number is doubled at each retry (a.k.a. exponential backoff). """ try: data = job.execute() return data except http.HttpError, e: stack = traceback.format_exc() print stack msg = str(e) match = re.search(r'"(.*)"', msg) if e.resp.status in [403, 429, 500, 503]: if retries > 0: time.sleep(wait) return self._retry(job, retries - 1, wait * 2) if match: raise Exception('ERROR: %s' % match.group(0)) else: logger.log(msg) raise
def traffic(): """Main function of Bulkdozer, performs the Bulkdozer job """ global clean_run if project.verbose: print 'traffic' try: setup() if config.mode in ['ALWAYS', 'ONCE']: try: logger.clear() logger.log('Bulkdozer traffic job starting') logger.log('Execution config is %s' % config.mode) logger.flush() if config.mode == 'ONCE': config.mode = 'OFF' config.update() init_daos() assets() landing_pages() campaigns() event_tags() placement_groups() placements() creatives() ads() dynamic_targeting_keys() if clean_run: store.clear() finally: logger.log('Bulkdozer traffic job ended') logger.flush() store.save_id_map() except Exception as error: stack = traceback.format_exc() print stack logger.log(str(error)) logger.flush() if clean_run: print 'Done: Clean run.' else: raise Exception("Done: Errors happened.")
def traffic(): """Main function of Bulkdozer, performs the Bulkdozer job """ global clean_run if project.verbose: print('traffic') try: setup() logger.clear() logger.log('Bulkdozer traffic job starting') logger.flush() init_daos() assets() landing_pages() campaigns() event_tags() placement_groups() placements() creatives() ads() dynamic_targeting_keys() #if clean_run: # store.clear() except Exception as error: stack = traceback.format_exc() print(stack) logger.log(str(error)) finally: logger.log('Bulkdozer traffic job ended') logger.flush() #store.save_id_map() if clean_run: print('Done: Clean run.') else: raise Exception( "Done: Errors happened with some of the assets, check your sheet log." )