def on_task_filter(self, task, config): config = self.prepare_config(config) if not config or not config['target']: return identified_by = '{{ id }}' if config['identified_by'] == 'auto' else config['identified_by'] grouped_entries = group_entries(task.accepted + task.undecided, identified_by) if not grouped_entries: return with Session() as session: # Prefetch Data existing_ids = session.query(EntryUpgrade).filter(EntryUpgrade.id.in_(grouped_entries.keys())).all() existing_ids = {e.id: e for e in existing_ids} for identifier, entries in grouped_entries.items(): if not entries: continue existing = existing_ids.get(identifier) if not existing: # No existing, do_nothing continue log.debug('Looking for upgrades for identifier %s (within %s entries)', identifier, len(entries)) # Check if passed allowed timeframe if config['timeframe']: expires = existing.first_seen + parse_timedelta(config['timeframe']) if expires <= datetime.now(): # Timeframe reached, allow log.debug('Skipping upgrade with identifier %s as timeframe reached', identifier) continue # Filter out lower quality and propers action_on_lower = entry_actions[config['on_lower']] if config['on_lower'] != 'do_nothing' else None upgradeable = self.filter_entries(entries, existing, config['target'], action_on_lower) # Skip if we have no entries after filtering if not upgradeable: continue # Sort entities in order of quality and best proper upgradeable.sort(key=lambda e: (e['quality'], e.get('proper_count', 0)), reverse=True) # First entry will be the best quality best = upgradeable.pop(0) best.accept('upgraded quality') log.debug('Found %s as upgraded quality for identifier %s', best['title'], identifier) # Process rest for entry in upgradeable: log.debug('Skipping %s as lower quality then best %s', entry['title'], best['title']) if action_on_lower: action_on_lower(entry, 'lower quality then best match')
def on_task_learn(self, task, config): config = self.prepare_config(config) if not config or not config['tracking']: return identified_by = ( '{{ id }}' if config['identified_by'] == 'auto' else config['identified_by'] ) grouped_entries = group_entries(task.accepted, identified_by) if not grouped_entries: return with Session() as session: # Prefetch Data existing_ids = ( session.query(EntryUpgrade) .filter(EntryUpgrade.id.in_(grouped_entries.keys())) .all() ) existing_ids = {e.id: e for e in existing_ids} for identifier, entries in grouped_entries.items(): if not entries: continue # Sort entities in order of quality entries.sort(key=lambda e: e['quality'], reverse=True) # First entry will be the best quality best_entry = entries[0] existing = existing_ids.get(identifier) if not existing: existing = EntryUpgrade() existing.id = identifier session.add(existing) elif existing.quality > best_entry['quality']: continue existing.quality = best_entry['quality'] existing.title = best_entry['title'] existing.proper_count = best_entry.get('proper_count', 0) existing.updated = datetime.now() log.debug( 'Tracking upgrade on identifier `%s` current quality `%s`', identifier, best_entry['quality'], )
def on_task_learn(self, task, config): if not config: return identified_by = ( '{{ media_id }}' if config['identified_by'] == 'auto' else config['identified_by'] ) grouped_entries = group_entries(task.accepted, identified_by) if not grouped_entries: return with Session() as session: # Prefetch Data existing_ids = ( session.query(EntryTimeFrame) .filter(EntryTimeFrame.id.in_(grouped_entries.keys())) .all() ) existing_ids = {e.id: e for e in existing_ids} for identifier, entries in grouped_entries.items(): if not entries: continue id_timeframe = existing_ids.get(identifier) if not id_timeframe: continue # Sort entities in order of quality entries.sort(key=lambda e: e['quality'], reverse=True) # First entry will be the best quality best_entry = entries[0] id_timeframe.quality = best_entry['quality'] id_timeframe.title = best_entry['title'] id_timeframe.proper_count = best_entry.get('proper_count', 0) id_timeframe.status = 'accepted'
def on_task_filter(self, task, config): if not config: return identified_by = '{{ id }}' if config[ 'identified_by'] == 'auto' else config['identified_by'] grouped_entries = group_entries(task.accepted + task.undecided, identified_by) if not grouped_entries: return action_on_waiting = entry_actions[config[ 'on_waiting']] if config['on_waiting'] != 'do_nothing' else None action_on_reached = entry_actions[config[ 'on_reached']] if config['on_reached'] != 'do_nothing' else None with Session() as session: # Prefetch Data existing_ids = session.query(EntryTimeFrame).filter( EntryTimeFrame.id.in_(grouped_entries.keys())).all() existing_ids = {e.id: e for e in existing_ids} for identifier, entries in grouped_entries.items(): if not entries: continue id_timeframe = existing_ids.get(identifier) if not id_timeframe: id_timeframe = EntryTimeFrame() id_timeframe.id = identifier id_timeframe.status = 'waiting' id_timeframe.first_seen = datetime.now() session.add(id_timeframe) if id_timeframe.status == 'accepted': log.debug('Previously accepted %s with %s skipping', identifier, id_timeframe.title) continue # Sort entities in order of quality and best proper entries.sort(key=lambda e: (e['quality'], e.get('proper_count', 0)), reverse=True) best_entry = entries[0] log.debug('Current best for identifier %s is %s', identifier, best_entry['title']) id_timeframe.title = best_entry['title'] id_timeframe.quality = best_entry['quality'] id_timeframe.proper_count = best_entry.get('proper_count', 0) # Check we hit target or better target_requirement = qualities.Requirements(config['target']) target_quality = qualities.Quality(config['target']) if target_requirement.allows( best_entry['quality'] ) or best_entry['quality'] >= target_quality: log.debug( 'timeframe reach target quality %s or higher for %s' % (target_quality, identifier)) if action_on_reached: action_on_reached( best_entry, 'timeframe reached target quality or higher') continue # Check if passed wait time expires = id_timeframe.first_seen + parse_timedelta( config['wait']) if expires <= datetime.now(): log.debug( 'timeframe expired, releasing quality restriction for %s' % identifier) if action_on_reached: action_on_reached(best_entry, 'timeframe wait expired') continue # Verbose waiting, add to backlog if action_on_waiting: for entry in entries: action_on_waiting(entry, 'timeframe waiting') diff = expires - datetime.now() hours, remainder = divmod(diff.seconds, 3600) hours += diff.days * 24 minutes, _ = divmod(remainder, 60) log.info( '`%s`: timeframe waiting for %02dh:%02dmin. Currently best is `%s`.', identifier, hours, minutes, best_entry['title']) # add best entry to backlog (backlog is able to handle duplicate adds) if self.backlog: self.backlog.instance.add_backlog(task, best_entry, session=session)