def after_refresh_complete(tiid, failure_message=None): # logger.info(u"here in after_refresh_complete with {tiid}".format( # tiid=tiid)) product = Product.query.get(tiid) if not product: logger.warning(u"Empty product in after_refresh_complete for tiid {tiid}".format( tiid=tiid)) return None product.embed_markup = product.get_embed_markup() product.set_refresh_status(myredis, failure_message) #need commit after this db.session.merge(product) commit(db) sleep_seconds = random.random() * 10 logger.info(u"Sleeping {sleep_seconds}s in after_refresh_complete for {tiid} before checking done refreshes".format( sleep_seconds=sleep_seconds, tiid=tiid)) time.sleep(sleep_seconds) profile = Profile.query.get(product.profile_id) if not profile: print "\n\n-------> no profile after done all refreshes?!?", product.profile_id, "\n\n\n---------------\n\n\n" return None refresh_status = profile.get_refresh_status() if refresh_status.is_done_refreshing and refresh_status.refresh_state == "progress bar": print "\n\n-------> done all refreshes", product.profile_id, "\n\n\n---------------\n\n\n" logger.info(u"just_finished_profile_refresh for {tiid}, now deduping etc".format( tiid=tiid)) save_profile_refresh_status(profile, RefreshStatus.states["CRUNCHING"]) logger.info(u"deduplicating for {url_slug}".format( url_slug=profile.url_slug)) deleted_tiids = profile.remove_duplicates() logger.info(u"parse_and_save_tweets for {url_slug}".format( url_slug=profile.url_slug)) profile.parse_and_save_tweets() save_profile_refresh_status(profile, RefreshStatus.states["ALL_DONE"])
def get_new_products(self, provider_name, product_seeds, analytics_credentials={}, add_even_if_removed=False): save_profile_refresh_status(self, RefreshStatus.states["PROGRESS_BAR"]) if add_even_if_removed: tiids_to_exclude = self.tiids else: tiids_to_exclude = self.tiids_including_removed # don't re-import dup or removed products try: new_products = import_and_create_products( self.id, provider_name, product_seeds, analytics_credentials, tiids_to_exclude) except (ImportError, ProviderError): new_products = [] return new_products
def done_all_refreshes(profile_id): print "\n\n-------> done all refreshes", profile_id, "\n\n\n---------------\n\n\n" profile = Profile.query.get(profile_id) save_profile_refresh_status(profile, RefreshStatus.states["CRUNCHING"]) logger.info(u"deduplicating for {url_slug}".format( url_slug=profile.url_slug)) deleted_tiids = profile.remove_duplicates() logger.info(u"parse_and_save_tweets for {url_slug}".format( url_slug=profile.url_slug)) profile.parse_and_save_tweets() save_profile_refresh_status(profile, RefreshStatus.states["ALL_DONE"]) return
def refresh_products_from_tiids(profile_id, tiids, analytics_credentials={}, source="webapp"): # assume the profile is the same one as the first product if not profile_id: temp_profile = Product.query.get(tiids[0]) profile_id = temp_profile.profile_id from totalimpactwebapp.profile import Profile profile = Profile.query.get(profile_id) save_profile_refresh_status(profile, RefreshStatus.states["PROGRESS_BAR"]) if not tiids: return None priority = "high" if source=="scheduled": priority = "low" products = Product.query.filter(Product.tiid.in_(tiids)).all() tiids_to_update = [] for product in products: try: tiid = product.tiid product.set_last_refresh_start() db.session.merge(product) tiids_to_update += [tiid] except AttributeError: logger.debug(u"couldn't find tiid {tiid} so not refreshing its metrics".format( tiid=tiid)) db.session.commit() start_product_update(profile_id, tiids_to_update, priority) return tiids