def append(self, result): new_result = self.fillResult(result) is_correct = fire_event('searcher.correct_release', new_result, self.media, self.quality, imdb_results=self.kwargs.get( 'imdb_results', False), single=True) if is_correct and new_result['id'] not in self.result_ids: is_correct_weight = float(is_correct) new_result['score'] += fire_event('score.calculate', new_result, self.media, single=True) old_score = new_result['score'] new_result['score'] = int(old_score * is_correct_weight) log.info2( 'Found correct release with weight %.02f, old_score(%d) now scaled to score(%d)', (is_correct_weight, old_score, new_result['score'])) self.found(new_result) self.result_ids.append(result['id']) super(ResultList, self).append(new_result)
def addMovies(self): movies = fire_event('automation.get_movies', merge=True) movie_ids = [] for imdb_id in movies: if self.shuttingDown(): break prop_name = 'automation.added.%s' % imdb_id added = Env.prop(prop_name, default = False) if not added: added_movie = fire_event('movie.add', params={'identifier': imdb_id}, force_readd=False, search_after=False, update_after=True, single=True) if added_movie: movie_ids.append(added_movie['_id']) Env.prop(prop_name, True) for movie_id in movie_ids: if self.shuttingDown(): break movie_dict = fire_event('media.get', movie_id, single=True) if movie_dict: fire_event('movie.searcher.single', movie_dict) return True
def forMedia(self, media_id): db = get_db() raw_releases = db.get_many('release', media_id) releases = [] for r in raw_releases: try: doc = db.get('id', r.get('_id')) releases.append(doc) except RecordDeleted: pass except (ValueError, EOFError): fire_event('database.delete_corrupted', r.get('_id'), traceback_error=traceback.format_exc(0)) releases = sorted(releases, key=lambda k: k.get('info', {}).get('score', 0), reverse=True) # Sort based on preferred search method download_preference = self.conf('preferred_method', section='searcher') if download_preference != 'both': releases = sorted( releases, key=lambda k: k.get('info', {}).get('protocol', '')[:3], reverse=(download_preference == 'torrent')) return releases or []
def containsOtherQuality(self, nzb, movie_year = None, preferred_quality = None): if not preferred_quality: preferred_quality = {} found = {} # Try guessing via quality tags guess = fire_event('quality.guess', files=[nzb.get('name')], size=nzb.get('size', None), single=True) if guess: found[guess['identifier']] = True # Hack for older movies that don't contain quality tag name = nzb['name'] size = nzb.get('size', 0) year_name = fire_event('scanner.name_year', name, single=True) if len(found) == 0 and movie_year < datetime.datetime.now().year - 3 and not year_name.get('year', None): if size > 20000: # Assume bd50 log.info('Quality was missing in name, assuming it\'s a BR-Disk based on the size: %s', size) found['bd50'] = True elif size > 3000: # Assume dvdr log.info('Quality was missing in name, assuming it\'s a DVD-R based on the size: %s', size) found['dvdr'] = True else: # Assume dvdrip log.info('Quality was missing in name, assuming it\'s a DVD-Rip based on the size: %s', size) found['dvdrip'] = True # Allow other qualities for allowed in preferred_quality.get('allow'): if found.get(allowed): del found[allowed] if found.get(preferred_quality['identifier']) and len(found) == 1: return False return found
def setCrons(): fire_event('schedule.cron', '%s.searcher.all' % _type, self.searchAll, day=self.conf('cron_day'), hour=self.conf('cron_hour'), minute=self.conf('cron_minute'))
def searchAllView(self): results = {} for _type in fire_event('media.types'): results[_type] = fire_event('%s.searcher.all_view' % _type) return results
def search(self, q = '', types = None, **kwargs): # Make sure types is the correct instance if isinstance(types, str): types = [types] elif isinstance(types, (list, tuple, set)): types = list(types) imdb_identifier = get_imdb(q) if not types: if imdb_identifier: result = fire_event('movie.info', identifier=imdb_identifier, merge=True) result = {result['type']: [result]} else: result = fire_event('info.search', q=q, merge=True) else: result = {} for media_type in types: if imdb_identifier: result[media_type] = fire_event('%s.info' % media_type, identifier=imdb_identifier) else: result[media_type] = fire_event('%s.search' % media_type, q=q) return merge_dictionaries({ 'success': True, }, result)
def withStatus(self, status, types=None, with_doc=True): db = get_db() if types and not isinstance(types, (list, tuple)): types = [types] status = list(status if isinstance(status, (list, tuple)) else [status]) for s in status: for ms in db.get_many('media_status', s): if with_doc: try: doc = db.get('id', ms['_id']) if types and doc.get('type') not in types: continue yield doc except (RecordDeleted, RecordNotFound): log.debug('Record not found, skipping: %s', ms['_id']) except (ValueError, EOFError): fire_event('database.delete_corrupted', ms.get('_id'), traceback_error=traceback.format_exc(0)) else: yield ms
def tryNextRelease(self, media_id, manual=False, force_download=False): try: rels = fire_event('release.for_media', media_id, single=True) for rel in rels: if rel.get('status') in ['snatched', 'done']: fire_event('release.update_status', rel.get('_id'), status='ignored') media = fire_event('media.get', media_id, single=True) if media: log.info('Trying next release for: %s', get_title(media)) self.single(media, manual=manual, force_download=force_download) return True return False except: log.error('Failed searching for next release: %s', traceback.format_exc()) return False
def restatus(self, media_id, tag_recent=True, allowed_restatus=None): try: db = get_db() m = db.get('id', media_id) previous_status = m['status'] log.debug('Changing status for %s', get_title(m)) if not m['profile_id']: m['status'] = 'done' else: m['status'] = 'active' try: profile = db.get('id', m['profile_id']) media_releases = fire_event('release.for_media', m['_id'], single=True) done_releases = [ release for release in media_releases if release.get('status') == 'done' ] if done_releases: # Check if we are finished with the media for release in done_releases: if fire_event('quality.isfinish', { 'identifier': release['quality'], 'is_3d': release.get('is_3d', False) }, profile, timedelta(seconds=time.time() - release['last_edit']).days, single=True): m['status'] = 'done' break elif previous_status == 'done': m['status'] = 'done' except RecordNotFound: log.debug('Failed restatus, keeping previous: %s', traceback.format_exc()) m['status'] = previous_status # Only update when status has changed if previous_status != m['status'] and ( not allowed_restatus or m['status'] in allowed_restatus): db.update(m) # Tag media as recent if tag_recent: self.tag(media_id, 'recent', update_edited=True) return m['status'] except: log.error('Failed restatus: %s', traceback.format_exc())
def notifyFront(): try: media = fire_event('media.get', media_id, single=True) if media: event_name = '%s.update' % media.get('type') fire_event('notify.frontend', type=event_name, data=media) except: log.error('Failed creating onComplete: %s', traceback.format_exc())
def availableChars(self, types=None, status=None, release_status=None): db = get_db() # Make a list from string if status and not isinstance(status, (list, tuple)): status = [status] if release_status and not isinstance(release_status, (list, tuple)): release_status = [release_status] if types and not isinstance(types, (list, tuple)): types = [types] # query media ids if types: all_media_ids = set() for media_type in types: all_media_ids = all_media_ids.union( set([ x['_id'] for x in db.get_many('media_by_type', media_type) ])) else: all_media_ids = set([x['_id'] for x in db.all('media')]) media_ids = all_media_ids filter_by = {} # Filter on movie status if status and len(status) > 0: filter_by['media_status'] = set() for media_status in fire_event('media.with_status', status, with_doc=False, single=True): filter_by['media_status'].add(media_status.get('_id')) # Filter on release status if release_status and len(release_status) > 0: filter_by['release_status'] = set() for release_status in fire_event('release.with_status', release_status, with_doc=False, single=True): filter_by['release_status'].add(release_status.get('media_id')) # Filter by combining ids for x in filter_by: media_ids = [n for n in media_ids if n in filter_by[x]] chars = set() for x in db.all('media_startswith'): if x['_id'] in media_ids: chars.add(x['key']) if len(chars) == 27: break return list(chars)
def suggestView(self, limit=6, **kwargs): if self.is_disabled(): return {'success': True, 'movies': []} movies = split_string(kwargs.get('movies', '')) ignored = split_string(kwargs.get('ignored', '')) seen = split_string(kwargs.get('seen', '')) cached_suggestion = self.getCache('suggestion_cached') if cached_suggestion: suggestions = cached_suggestion else: if not movies or len(movies) == 0: active_movies = fire_event('media.with_status', ['active', 'done'], types='movie', single=True) movies = [get_identifier(x) for x in active_movies] if not ignored or len(ignored) == 0: ignored = split_string(Env.prop('suggest_ignore', default='')) if not seen or len(seen) == 0: movies.extend( split_string(Env.prop('suggest_seen', default=''))) suggestions = fire_event('movie.suggest', movies=movies, ignore=ignored, single=True) self.setCache('suggestion_cached', suggestions, timeout=6048000) # Cache for 10 weeks medias = [] for suggestion in suggestions[:int(limit)]: # Cache poster posters = suggestion.get('images', {}).get('poster', []) poster = [x for x in posters if 'tmdb' in x] posters = poster if len(poster) > 0 else posters cached_poster = fire_event( 'file.download', url=posters[0], single=True) if len(posters) > 0 else False files = {'image_poster': [cached_poster]} if cached_poster else {} medias.append({ 'status': 'suggested', 'title': get_title(suggestion), 'type': 'movie', 'info': suggestion, 'files': files, 'identifiers': { 'imdb': suggestion.get('imdb') } }) return {'success': True, 'movies': medias}
def automationView(self, force_update=False, **kwargs): db = get_db() charts = fire_event('automation.get_chart_list', merge=True) ignored = split_string(Env.prop('charts_ignore', default='')) # Create a list the movie/list.js can use for chart in charts: medias = [] for media in chart.get('list', []): identifier = media.get('imdb') if identifier in ignored: continue try: try: in_library = db.get('media', 'imdb-%s' % identifier) if in_library: continue except RecordNotFound: pass except: pass # Cache poster posters = media.get('images', {}).get('poster', []) poster = [x for x in posters if 'tmdb' in x] posters = poster if len(poster) > 0 else posters cached_poster = fire_event( 'file.download', url=posters[0], single=True) if len(posters) > 0 else False files = { 'image_poster': [cached_poster] } if cached_poster else {} medias.append({ 'status': 'chart', 'title': get_title(media), 'type': 'movie', 'info': media, 'files': files, 'identifiers': { 'imdb': identifier } }) chart['list'] = medias return { 'success': True, 'count': len(charts), 'charts': charts, 'ignored': ignored, }
def bookmark(self, host = None, **kwargs): params = { 'includes': fire_event('userscript.get_includes', merge=True), 'excludes': fire_event('userscript.get_excludes', merge=True), 'host': host, } return self.renderTemplate(__file__, 'bookmark.js_tmpl', **params)
def set_crons(self): fire_event('schedule.remove', 'updater.check', single=True) if self.is_enabled(): fire_event('schedule.interval', 'updater.check', self.auto_update, hours=24) self.auto_update() # Check after enabling
def searchAll(self, manual=False): if self.in_progress: log.info('Search already in progress') fire_event('notify.frontend', type='movie.searcher.already_started', data=True, message='Full search already in progress') return self.in_progress = True fire_event('notify.frontend', type='movie.searcher.started', data=True, message='Full search started') medias = [ x['_id'] for x in fire_event('media.with_status', 'active', types='movie', with_doc=False, single=True) ] random.shuffle(medias) total = len(medias) self.in_progress = { 'total': total, 'to_go': total, } try: search_protocols = fire_event('searcher.protocols', single=True) for media_id in medias: media = fire_event('media.get', media_id, single=True) if not media: continue try: self.single(media, search_protocols, manual=manual) except IndexError: log.error( 'Forcing library update for %s, if you see this often, please report: %s', (get_identifier(media), traceback.format_exc())) fire_event('movie.update', media_id) except: log.error('Search failed for %s: %s', (get_identifier(media), traceback.format_exc())) self.in_progress['to_go'] -= 1 # Break if CP wants to shut down if self.shuttingDown(): break except SearchSetupError: pass self.in_progress = False
def __init__(self): fire_event('scheduler.interval', identifier='manage.update_library', handle=self.updateLibrary, hours=2) add_event('manage.update', self.updateLibrary) add_event('manage.diskspace', self.getDiskSpace) # Add files after renaming def after_rename(message=None, group=None): if not group: group = {} return self.scanFilesToLibrary( folder=group['destination_dir'], files=group['renamed_files'], release_download=group['release_download']) add_event('renamer.after', after_rename, priority=110) addApiView( 'manage.update', self.updateLibraryView, docs={ 'desc': 'Update the library by scanning for new movies', 'params': { 'full': { 'desc': 'Do a full update or just recently changed/added movies.' }, } }) addApiView('manage.progress', self.getProgress, docs={ 'desc': 'Get the progress of current manage update', 'return': { 'type': 'object', 'example': """{ 'progress': False || object, total & to_go, }""" }, }) if not Env.get('dev') and self.conf('startup_scan'): add_event('app.load', self.updateLibraryQuick) add_event('app.load', self.setCrons) # Enable / disable interval add_event('setting.save.manage.library_refresh_interval.after', self.setCrons)
def namePositionScore(nzb_name, movie_name): score = 0 nzb_words = re.split('\W+', simplify_string(nzb_name)) qualities = fire_event('quality.all', single=True) try: nzb_name = re.search(r'([\'"])[^\1]*\1', nzb_name).group(0) except: pass name_year = fire_event('scanner.name_year', nzb_name, single=True) # Give points for movies beginning with the correct name split_by = simplify_string(movie_name) name_split = [] if len(split_by) > 0: name_split = simplify_string(nzb_name).split(split_by) if name_split[0].strip() == '': score += 10 # If year is second in line, give more points if len(name_split) > 1 and name_year: after_name = name_split[1].strip() if try_int(after_name[:4]) == name_year.get('year', None): score += 10 after_name = after_name[4:] # Give -point to crap between year and quality found_quality = None for quality in qualities: # Main in words if quality['identifier'] in nzb_words: found_quality = quality['identifier'] # Alt in words for alt in quality['alternative']: if alt in nzb_words: found_quality = alt break if not found_quality: return score - 20 allowed = [] for value in name_scores: name, sc = value.split(':') allowed.append(name) inbetween = re.split('\W+', after_name.split(found_quality)[0].strip()) score -= (10 * len(set(inbetween) - set(allowed))) return score
def __init__(self): addApiView('release.manual_download', self.manualDownload, docs={ 'desc': 'Send a release manually to the downloaders', 'params': { 'id': { 'type': 'id', 'desc': 'ID of the release object in release-table' } } }) addApiView('release.delete', self.deleteView, docs={ 'desc': 'Delete releases', 'params': { 'id': { 'type': 'id', 'desc': 'ID of the release object in release-table' } } }) addApiView('release.ignore', self.ignore, docs={ 'desc': 'Toggle ignore, for bad or wrong releases', 'params': { 'id': { 'type': 'id', 'desc': 'ID of the release object in release-table' } } }) add_event('release.add', self.add) add_event('release.download', self.download) add_event('release.try_download_result', self.tryDownloadResult) add_event('release.create_from_search', self.createFromSearch) add_event('release.delete', self.delete) add_event('release.clean', self.clean) add_event('release.update_status', self.updateStatus) add_event('release.with_status', self.withStatus) add_event('release.for_media', self.forMedia) # Clean releases that didn't have activity in the last week add_event('app.load', self.cleanDone, priority=1000) fire_event('schedule.interval', 'movie.clean_releases', self.cleanDone, hours=12)
def setCrons(self): fire_event('schedule.remove', 'manage.update_library') refresh = try_int(self.conf('library_refresh_interval')) if refresh > 0: fire_event('schedule.interval', 'manage.update_library', self.updateLibrary, hours=refresh, single=True) return True
def search(self, name, year=None): result = fire_event('movie.search', q='%s %s' % (name, year), limit=1, merge=True) if len(result) > 0: movie = fire_event('movie.info', identifier=result[0].get('imdb'), extended=False, merge=True) return movie else: return None
def afterUpdate(): if not self.in_progress or self.shuttingDown(): return total = self.in_progress[folder]['total'] movie_dict = fire_event('media.get', identifier, single=True) if movie_dict: fire_event( 'notify.frontend', type='movie.added', data=movie_dict, message=None if total > 5 else 'Added "%s" to manage.' % get_title(movie_dict))
def do_update(self): try: download_data = fire_event('cp.source_url', repo=self.repo_user, repo_name=self.repo_name, branch=self.branch, single=True) destination = os.path.join( Env.get('cache_dir'), self.update_version.get( 'hash')) + '.' + download_data.get('type') extracted_path = os.path.join(Env.get('cache_dir'), 'temp_updater') destination = fire_event('file.download', url=download_data.get('url'), dest=destination, single=True) # Cleanup leftover from last time if os.path.isdir(extracted_path): self.remove_directory(extracted_path) self.makeDir(extracted_path) # Extract if download_data.get('type') == 'zip': zip_file = zipfile.ZipFile(destination) zip_file.extractall(extracted_path) zip_file.close() else: tar = tarfile.open(destination) tar.extractall(path=extracted_path) tar.close() os.remove(destination) if self.replace_with( os.path.join(extracted_path, os.listdir(extracted_path)[0])): self.remove_directory(extracted_path) # Write update version to file self.createFile(self.version_file, json.dumps(self.update_version)) return True except: log.error('Failed updating: %s', traceback.format_exc()) self.update_failed = True return False
def get_property(self, identifier): from couchpotato import get_db db = get_db() prop = None try: propert = db.get('property', identifier, with_doc=True) prop = propert['doc']['value'] except ValueError: propert = db.get('property', identifier) fire_event('database.delete_corrupted', propert.get('_id')) except: self.log.debug('Property "%s" doesn\'t exist: %s', (identifier, traceback.format_exc(0))) return prop
def search(self, name, year=None, imdb_only=False): try: cache_name = name.decode('utf-8').encode('ascii', 'ignore') except UnicodeEncodeError: cache_name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore') prop_name = 'automation.cached.%s.%s' % (cache_name, year) cached_imdb = Env.prop(prop_name, default=False) if cached_imdb and imdb_only: return cached_imdb result = fire_event('movie.search', q='%s %s' % (name, year if year else ''), limit=1, merge=True) if len(result) > 0: if imdb_only and result[0].get('imdb'): Env.prop(prop_name, result[0].get('imdb')) return result[0].get('imdb') if imdb_only else result[0] else: return None
def search(self, q, limit=12): if self.is_disabled(): return [] name_year = fire_event('scanner.name_year', q, single=True) if not name_year or (name_year and not name_year.get('name')): name_year = {'name': q} cache_key = 'omdbapi.cache.%s' % q url = self.urls['search'] % (self.getApiKey(), try_url_encode( { 't': name_year.get('name'), 'y': name_year.get('year', '') })) cached = self.getCache(cache_key, url, timeout=3, headers={'User-Agent': Env.getIdentifier()}) if cached: result = self.parseMovie(cached) if result.get('titles') and len(result.get('titles')) > 0: log.info( 'Found: %s', result['titles'][0] + ' (' + str(result.get('year')) + ')') return [result] return [] return []
def notifyClients(self, message, client_names): success = True for client_name in client_names: client_success = False client = self.server.clients.get(client_name) if client and client['found']: client_success = fire_event('notify.plex.notifyClient', client, message, single=True) if not client_success: if self.server.staleClients() or not client: log.info( 'Failed to send notification to client "%s". ' 'Client list is stale, updating the client list and retrying.', client_name) self.server.updateClients(self.getClientNames()) else: log.warning( 'Failed to send notification to client %s, skipping this time', client_name) success = False return success
def refresh(self, id='', **kwargs): handlers = [] ids = split_string(id) for x in ids: refresh_handler = self.createRefreshHandler(x) if refresh_handler: handlers.append(refresh_handler) fire_event('notify.frontend', type='media.busy', data={'_id': ids}) fire_event_async('schedule.queue', handlers=handlers) return { 'success': True, }
def get(self, media_id): try: db = get_db() imdb_id = get_imdb(str(media_id)) if imdb_id: media = db.get('media', 'imdb-%s' % imdb_id, with_doc=True)['doc'] else: media = db.get('id', media_id) if media: # Attach category try: media['category'] = db.get('id', media.get('category_id')) except: pass media['releases'] = fire_event('release.for_media', media['_id'], single=True) return media except (RecordNotFound, RecordDeleted): log.error('Media with id "%s" not found', media_id) except: raise