def execute(self): """ Executes all added actions in bulk """ obj_funs = map(self._get_obj_fun, self.actions) udb = get_userdata_database() bulk_save_retry(obj_funs, udb) # prepare for another run self.actions = []
def update_episodes(user, actions, now, ua_string): update_urls = [] grouped_actions = defaultdict(list) # group all actions by their episode for action in actions: podcast_url = action['podcast'] podcast_url = sanitize_append(podcast_url, update_urls) if podcast_url == '': continue episode_url = action['episode'] episode_url = sanitize_append(episode_url, update_urls) if episode_url == '': continue act = parse_episode_action(action, user, update_urls, now, ua_string) grouped_actions[ (podcast_url, episode_url) ].append(act) auto_flattr_episodes = [] # Prepare the updates for each episode state obj_funs = [] for (p_url, e_url), action_list in grouped_actions.iteritems(): episode_state = episode_state_for_ref_urls(user, p_url, e_url) if any(a['action'] == 'play' for a in actions): auto_flattr_episodes.append(episode_state.episode) fun = partial(update_episode_actions, action_list=action_list) obj_funs.append( (episode_state, fun) ) udb = get_userdata_database() bulk_save_retry(obj_funs, udb) if user.get_wksetting(FLATTR_AUTO): for episode_id in auto_flattr_episodes: auto_flattr_episode.delay(user, episode_id) return update_urls
def _update_episodes(self, podcast, parsed_episodes): pid = podcast.get_id() # list of (obj, fun) where fun is the function to update obj changes = [] episodes_to_update = list(islice(parsed_episodes, 0, MAX_EPISODES_UPDATE)) logger.info('Parsed %d (%d) episodes', len(parsed_episodes), len(episodes_to_update)) for n, parsed in enumerate(episodes_to_update, 1): url = get_episode_url(parsed) if not url: logger.info('Skipping episode %d for missing URL', n) continue logger.info('Updating episode %d / %d', n, len(parsed_episodes)) episode = episode_for_podcast_id_url(pid, url, create=True) update_episode = get_episode_update_function(parsed, episode, podcast) changes.append((episode, update_episode)) # determine which episodes have been found updated_episodes = [e for (e, f) in changes] logger.info('Updating %d episodes with new data', len(updated_episodes)) # and mark the remaining ones outdated current_episodes = set(episodes_for_podcast_current(podcast, limit=500)) outdated_episodes = current_episodes - set(updated_episodes) logger.info('Marking %d episodes as outdated', len(outdated_episodes)) changes.extend((e, mark_outdated) for e in outdated_episodes) logger.info('Saving %d changes', len(changes)) bulk_save_retry(changes, self.db) return updated_episodes
def handle(self, *args, **options): skip = options.get('skip') total = episode_states_count() actions = Counter() actions['merged'] = 0 for n in count(skip): first = get_nth_episode_state(n) if first is None: break states = get_duplicate_episode_states(first.user, first.episode) l1 = len(states) # we don't want to delete this one states.remove(first) assert len(states) == l1-1 if states: updater = get_updater(states) obj_funs = [(first, updater)] + [(state, do_delete) for state in states] udb = get_userdata_database() bulk_save_retry(obj_funs, udb) merged = len(states)-1 actions['merged'] += merged total -= merged status_str = ', '.join('%s: %d' % x for x in actions.items()) progress(n+1, total, status_str)