def checkLibrary(self, result): result = mergeDicts(copy.deepcopy(self.default_info), copy.deepcopy(result)) if result and result.get('imdb'): return mergeDicts(result, self.getLibraryTags(result['imdb'])) return result
def addOptions(self, section_name, options): if not self.options.get(section_name): self.options[section_name] = options else: self.options[section_name] = mergeDicts(self.options[section_name], options)
def create(self, release): if self.isDisabled(): return log.info('Creating %s metadata.' % self.getName()) # Update library to get latest info try: updated_library = fireEvent('library.update', release['library']['identifier'], force = True, single = True) release['library'] = mergeDicts(release['library'], updated_library) except: log.error('Failed to update movie, before creating metadata: %s' % traceback.format_exc()) root = self.getRootName(release) movie_info = release['library'].get('info') for file_type in ['nfo', 'thumbnail', 'fanart']: try: # Get file path name = getattr(self, 'get' + file_type.capitalize() + 'Name')(root) if name and self.conf('meta_' + file_type): # Get file content content = getattr(self, 'get' + file_type.capitalize())(movie_info = movie_info, data = release) if content: log.debug('Creating %s file: %s' % (file_type, name)) if os.path.isfile(content): shutil.copy2(content, name) else: self.createFile(name, content) except: log.error('Unable to create %s file: %s' % (file_type, traceback.format_exc()))
def combineOnIMDB(self, results): temp = {} order = [] # Combine on imdb id for item in results: random_string = randomString() imdb = item.get('imdb', random_string) imdb = imdb if imdb else random_string if not temp.get(imdb): temp[imdb] = self.getLibraryTags(imdb) order.append(imdb) if item.get('via_imdb'): if order.count(imdb): order.remove(imdb) order.insert(0, imdb) # Merge dicts temp[imdb] = mergeDicts(temp[imdb], item) # Make it a list again temp_list = [temp[x] for x in order] return temp_list
def call(self, request_params, use_json=True, **kwargs): url = cleanHost(self.conf('host'), ssl=self.conf('ssl')) + 'api?' + tryUrlencode( mergeDicts(request_params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout=60, show_error=False, verify_ssl=False, headers={'User-Agent': Env.getIdentifier()}, **kwargs) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d.get(request_params['mode']) or d else: return data
def fillResult(self, result): defaults = { 'id': 0, 'type': self.provider.type, 'provider': self.provider.getName(), 'download': self.provider.loginDownload if self.provider.urls.get('login') else self.provider.download, 'url': '', 'name': '', 'age': 0, 'size': 0, 'description': '', 'score': 0 } return mergeDicts(defaults, result)
def addOptions(self, section_name, options): # no additional actions (related to ro-rw options) are required here if not self.options.get(section_name): self.options[section_name] = options else: self.options[section_name] = mergeDicts(self.options[section_name], options)
def search(self, q = '', types = None, **kwargs): # Make sure types is the correct instance if isinstance(types, (str, unicode)): types = [types] elif isinstance(types, (list, tuple, set)): types = list(types) imdb_identifier = getImdb(q) if not types: if imdb_identifier: result = fireEvent('movie.info', identifier = imdb_identifier, merge = True) result = {result['type']: [result]} else: result = fireEvent('info.search', q = q, merge = True) else: result = {} for media_type in types: if imdb_identifier: result[media_type] = fireEvent('%s.info' % media_type, identifier = imdb_identifier) else: result[media_type] = fireEvent('%s.search' % media_type, q = q) return mergeDicts({ 'success': True, }, result)
def search(self, group): movie_name = getTitle(group['library']) url = self.urls['api'] % self.movieUrlName(movie_name) try: data = self.getCache('hdtrailers.%s' % group['library']['identifier'], url, show_error = False) except HTTPError: log.debug('No page found for: %s', movie_name) data = None result_data = {'480p':[], '720p':[], '1080p':[]} if not data: return result_data did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get('404') and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = mergeDicts(result_data, results) return result_data
def search(self, group): movie_name = getTitle(group) url = self.urls['api'] % self.movieUrlName(movie_name) try: data = self.getCache('hdtrailers.%s' % group['identifier'], url, show_error=False) except HTTPError: log.debug('No page found for: %s', movie_name) data = None result_data = {'480p': [], '720p': [], '1080p': []} if not data: return result_data did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get('404') and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = mergeDicts(result_data, results) return result_data
def addOptions(self, section_name, options): if not self.options.get(section_name): self.options[section_name] = options else: options['groups'] = self.options[section_name].get('groups') + options.get('groups') self.options[section_name] = mergeDicts(self.options[section_name], options)
def search(self, q='', types=None, **kwargs): # Make sure types is the correct instance if isinstance(types, (str, unicode)): types = [types] elif isinstance(types, (list, tuple, set)): types = list(types) imdb_identifier = getImdb(q) if not types: if imdb_identifier: result = fireEvent('movie.info', identifier=imdb_identifier, merge=True) result = {result['type']: [result]} else: result = fireEvent('info.search', q=q, merge=True) else: result = {} for media_type in types: if imdb_identifier: result[media_type] = fireEvent('%s.info' % media_type, identifier=imdb_identifier) else: result[media_type] = fireEvent('%s.search' % media_type, q=q) return mergeDicts({ 'success': True, }, result)
def create(self, message=None, group=None): if self.isDisabled(): return if not group: group = {} log.info('Creating %s metadata.', self.getName()) # Update library to get latest info try: updated_library = fireEvent('library.update.movie', group['library']['identifier'], force=True, single=True) group['library'] = mergeDicts(group['library'], updated_library) except: log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc()) root_name = self.getRootName(group) meta_name = os.path.basename(root_name) root = os.path.dirname(root_name) movie_info = group['library'].get('info') for file_type in ['nfo', 'thumbnail', 'fanart']: try: # Get file path name = getattr(self, 'get' + file_type.capitalize() + 'Name')( meta_name, root) if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): # Get file content content = getattr(self, 'get' + file_type.capitalize())( movie_info=movie_info, data=group) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): shutil.copy2(content, name) shutil.copyfile(content, name) # Try and copy stats seperately try: shutil.copystat(content, name) except: pass else: self.createFile(name, content) group['renamed_files'].append(name) try: os.chmod(name, Env.getPermission('file')) except: log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc())) except: log.error('Unable to create %s file: %s', (file_type, traceback.format_exc()))
def fillResult(self, result): defaults = { 'id': 0, 'status': 'busy', 'downloader': self.provider.getName(), } return mergeDicts(defaults, result)
def addOptions(self, section_name, options): if not self.options.get(section_name): self.options[section_name] = options else: options['groups'] = self.options[section_name].get( 'groups') + options.get('groups') self.options[section_name] = mergeDicts(self.options[section_name], options)
def fillResult(self, result): defaults = { 'id': 0, 'status': 'busy', 'downloader': self.provider.getName(), 'folder': '', } return mergeDicts(defaults, result)
def single(self, identifier = ''): db = get_db() quality_dict = {} quality = db.get('quality', identifier, with_doc = True)['doc'] if quality: quality_dict = mergeDicts(self.getQuality(quality['identifier']), quality) return quality_dict
def all(self): db = get_session() qualities = db.query(Quality).all() temp = [] for quality in qualities: q = mergeDicts(self.getQuality(quality.identifier), quality.to_dict()) temp.append(q) return temp
def call(self, params, use_json = True): url = cleanHost(self.conf('host')) + "api?" + tryUrlencode(mergeDicts(params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout = 60, show_error = False) if use_json: return json.loads(data)[params['mode']] else: return data
def create(self, message = None, group = None): if self.isDisabled(): return if not group: group = {} log.info('Creating %s metadata.', self.getName()) # Update library to get latest info try: updated_library = fireEvent('library.update.movie', group['library']['identifier'], extended = True, single = True) group['library'] = mergeDicts(group['library'], updated_library) except: log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc()) root_name = self.getRootName(group) meta_name = os.path.basename(root_name) root = os.path.dirname(root_name) movie_info = group['library'].get('info') for file_type in ['nfo', 'thumbnail', 'fanart']: try: # Get file path name = getattr(self, 'get' + file_type.capitalize() + 'Name')(meta_name, root) if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): # Get file content content = getattr(self, 'get' + file_type.capitalize())(movie_info = movie_info, data = group) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): content = sp(content) name = sp(name) shutil.copy2(content, name) shutil.copyfile(content, name) # Try and copy stats seperately try: shutil.copystat(content, name) except: pass else: self.createFile(name, content) group['renamed_files'].append(name) try: os.chmod(sp(name), Env.getPermission('file')) except: log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc())) except: log.error('Unable to create %s file: %s', (file_type, traceback.format_exc()))
def updateReleaseDate(self, identifier): db = get_session() library = db.query(Library).filter_by(identifier = identifier).first() if library.info.get('release_date', {}).get('expires', 0) < time.time(): dates = fireEvent('movie.release_date', identifier = identifier, merge = True) library.info = mergeDicts(library.info, {'release_date': dates}) db.commit() dates = library.info.get('release_date', {}) db.remove() return dates
def single(self, identifier = ''): db = get_db() quality_dict = {} try: quality = db.get('quality', identifier, with_doc = True)['doc'] except RecordNotFound: log.error("Unable to find '%s' in the quality DB", indentifier) quality = None if quality: quality_dict = mergeDicts(self.getQuality(quality['identifier']), quality) return quality_dict
def fillResult(self, result): defaults = { "id": 0, "type": self.provider.type, "provider": self.provider.getName(), "download": self.provider.download, "url": "", "name": "", "age": 0, "size": 0, "description": "", "score": 0, } return mergeDicts(defaults, result)
def fillResult(self, result): defaults = { 'id': 0, 'type': self.provider.type, 'provider': self.provider.getName(), 'download': self.provider.download, 'url': '', 'name': '', 'age': 0, 'size': 0, 'description': '', 'score': 0 } return mergeDicts(defaults, result)
def all(self): if self.cached_qualities: return self.cached_qualities db = get_db() temp = [] for quality in self.qualities: quality_doc = db.get('quality', quality.get('identifier'), with_doc = True)['doc'] q = mergeDicts(quality, quality_doc) temp.append(q) if len(temp) == len(self.qualities): self.cached_qualities = temp return temp
def call(self, request_params, use_json = True, **kwargs): url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(request_params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d.get(request_params['mode']) or d else: return data
def call(self, params, use_json = True): url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout = 60, show_error = False) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d[params['mode']] else: return data
def updateReleaseDate(self, identifier): db = get_session() library = db.query(Library).filter_by(identifier = identifier).first() if not library.info: library_dict = self.update(identifier, force = True) dates = library_dict.get('info', {}).get('release_date') else: dates = library.info.get('release_date') if dates and dates.get('expires', 0) < time.time() or not dates: dates = fireEvent('movie.release_date', identifier = identifier, merge = True) library.info = mergeDicts(library.info, {'release_date': dates }) db.commit() return dates
def search(self, q = '', types = None, **kwargs): # Make sure types is the correct instance if isinstance(types, (str, unicode)): types = [types] elif isinstance(types, (list, tuple, set)): types = list(types) if not types: result = fireEvent('info.search', q = q, merge = True) else: result = {} for media_type in types: result[media_type] = fireEvent('%s.search' % media_type) return mergeDicts({ 'success': True, }, result)
def all(self): if self.cached_qualities: return self.cached_qualities db = get_db() qualities = db.all('quality', with_doc = True) temp = [] for quality in qualities: quality = quality['doc'] q = mergeDicts(self.getQuality(quality.get('identifier')), quality) temp.append(q) self.cached_qualities = temp return temp
def create(self, release): if self.isDisabled(): return log.info('Creating %s metadata.', self.getName()) # Update library to get latest info try: updated_library = fireEvent('library.update', release['library']['identifier'], force=True, single=True) release['library'] = mergeDicts(release['library'], updated_library) except: log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc()) root_name = self.getRootName(release) meta_name = os.path.basename(root_name) root = os.path.dirname(root_name) movie_info = release['library'].get('info') for file_type in ['nfo', 'thumbnail', 'fanart']: try: # Get file path name = getattr(self, 'get' + file_type.capitalize() + 'Name')( meta_name, root) if name and self.conf('meta_' + file_type): # Get file content content = getattr(self, 'get' + file_type.capitalize())( movie_info=movie_info, data=release) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): shutil.copy2(content, name) else: self.createFile(name, content) except: log.error('Unable to create %s file: %s', (file_type, traceback.format_exc()))
def fillResult(self, result): defaults = { 'id': 0, 'protocol': self.provider.protocol, 'type': self.provider.type, 'provider': self.provider.getName(), 'download': self.provider.loginDownload if self.provider.urls.get('login') else self.provider.download, 'seed_ratio': Env.setting('seed_ratio', section = self.provider.getName().lower(), default = ''), 'seed_time': Env.setting('seed_time', section = self.provider.getName().lower(), default = ''), 'url': '', 'name': '', 'age': 0, 'size': 0, 'description': '', 'score': 0 } return mergeDicts(defaults, result)
def updateReleaseDate(self, identifier): db = get_session() library = db.query(Library).filter_by(identifier=identifier).first() if not library.info: library_dict = self.update(identifier, force=True) dates = library_dict.get('info', {}).get('release_date') else: dates = library.info.get('release_date') if dates and dates.get('expires', 0) < time.time() or not dates: dates = fireEvent('movie.release_date', identifier=identifier, merge=True) library.info = mergeDicts(library.info, {'release_date': dates}) db.commit() return dates
def combineOnIMDB(self, results): temp = {} order = [] # Combine on imdb id for item in results: imdb = item.get('imdb', 'random-%s' % time.time()) if not temp.get(imdb): temp[imdb] = self.getLibraryTags(imdb) order.append(imdb) # Merge dicts temp[imdb] = mergeDicts(temp[imdb], item) # Make it a list again temp_list = [temp[x] for x in order] return temp_list
def fillResult(self, result): defaults = { "id": 0, "protocol": self.provider.protocol, "type": self.provider.type, "provider": self.provider.getName(), "download": self.provider.loginDownload if self.provider.urls.get("login") else self.provider.download, "seed_ratio": Env.setting("seed_ratio", section=self.provider.getName().lower(), default=""), "seed_time": Env.setting("seed_time", section=self.provider.getName().lower(), default=""), "url": "", "name": "", "age": 0, "size": 0, "description": "", "score": 0, } return mergeDicts(defaults, result)
def search(self, group): movie_name = group['library']['titles'][0]['title'] url = self.urls['api'] % self.movieUrlName(movie_name) data = self.getCache('hdtrailers.%s' % group['library']['identifier'], url) result_data = {} did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get('404') and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = mergeDicts(result_data, results) return result_data
def combineOnIMDB(self, results): temp = {} unique = 1 # Combine on imdb id for item in results: imdb = item.get('imdb') if imdb: if not temp.get(imdb): temp[imdb] = self.getLibraryTags(imdb) # Merge dicts temp[imdb] = mergeDicts(temp[imdb], item) else: temp[unique] = item unique += 1 # Make it a list again temp_list = [temp[x] for x in temp] return temp_list
def search(self, group): movie_name = getTitle(group["library"]) url = self.urls["api"] % self.movieUrlName(movie_name) data = self.getCache("hdtrailers.%s" % group["library"]["identifier"], url) result_data = {"480p": [], "720p": [], "1080p": []} if not data: return result_data did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get("404") and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = mergeDicts(result_data, results) return result_data
def search(self, group): movie_name = getTitle(group['library']) url = self.urls['api'] % self.movieUrlName(movie_name) data = self.getCache('hdtrailers.%s' % group['library']['identifier'], url) result_data = {'480p':[], '720p':[], '1080p':[]} if not data: return result_data did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get('404') and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = mergeDicts(result_data, results) return result_data
def search(self, movie, quality): results = [] if self.isDisabled(): return results movie_title = getTitle(movie['library']) quality_id = quality['identifier'] log.info('Searching for %s at quality %s' % (movie_title, quality_id)) params = mergeDicts( self.quality_search_params[quality_id].copy(), { 'order_by': 'relevance', 'order_way': 'descending', 'searchstr': movie['library']['identifier'] }) # Do login for the cookies if not self.login_opener and not self.login(): return results try: url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params)) txt = self.urlopen(url, opener=self.login_opener) res = json.loads(txt) except: log.error( 'Search on PassThePopcorn.me (%s) failed (could not decode JSON)' % params) return [] try: if not 'Movies' in res: log.info( "PTP search returned nothing for '%s' at quality '%s' with search parameters %s" % (movie_title, quality_id, params)) return [] authkey = res['AuthKey'] passkey = res['PassKey'] for ptpmovie in res['Movies']: if not 'Torrents' in ptpmovie: log.debug('Movie %s (%s) has NO torrents' % (ptpmovie['Title'], ptpmovie['Year'])) continue log.debug('Movie %s (%s) has %d torrents' % (ptpmovie['Title'], ptpmovie['Year'], len(ptpmovie['Torrents']))) for torrent in ptpmovie['Torrents']: torrent_id = tryInt(torrent['Id']) torrentdesc = '%s %s %s' % (torrent['Resolution'], torrent['Source'], torrent['Codec']) if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']: torrentdesc += ' HQ' if 'Scene' in torrent and torrent['Scene']: torrentdesc += ' Scene' if 'RemasterTitle' in torrent and torrent['RemasterTitle']: # eliminate odd characters... torrentdesc += self.htmlToASCII( ' %s' % torrent['RemasterTitle']) torrentdesc += ' (%s)' % quality_id torrent_name = re.sub( '[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) - %s' % (movie_title, ptpmovie['Year'], torrentdesc)) def extra_check(item): return self.torrentMeetsQualitySpec(item, type) def extra_score(item): return 50 if torrent['GoldenPopcorn'] else 0 new = { 'id': torrent_id, 'type': 'torrent', 'provider': self.getName(), 'name': torrent_name, 'description': '', 'url': '%s?action=download&id=%d&authkey=%s&torrent_pass=%s' % (self.urls['torrent'], torrent_id, authkey, passkey), 'detail_url': self.urls['detail'] % torrent_id, 'date': tryInt( time.mktime( parse(torrent['UploadTime']).timetuple())), 'size': tryInt(torrent['Size']) / (1024 * 1024), 'provider': self.getName(), 'seeders': tryInt(torrent['Seeders']), 'leechers': tryInt(torrent['Leechers']), 'extra_score': extra_score, 'extra_check': extra_check, 'download': self.loginDownload, } new['score'] = fireEvent('score.calculate', new, movie, single=True) if fireEvent('searcher.correct_movie', nzb=new, movie=movie, quality=quality): results.append(new) self.found(new) return results except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) return []
def _search(self, media, quality, results): movie_title = getTitle(media) quality_id = quality['identifier'] params = mergeDicts(self.quality_search_params[quality_id].copy(), { 'order_by': 'relevance', 'order_way': 'descending', 'searchstr': getIdentifier(media) }) url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params)) res = self.getJsonData(url) try: if not 'Movies' in res: return authkey = res['AuthKey'] passkey = res['PassKey'] for ptpmovie in res['Movies']: if not 'Torrents' in ptpmovie: log.debug('Movie %s (%s) has NO torrents', (ptpmovie['Title'], ptpmovie['Year'])) continue log.debug('Movie %s (%s) has %d torrents', (ptpmovie['Title'], ptpmovie['Year'], len(ptpmovie['Torrents']))) for torrent in ptpmovie['Torrents']: torrent_id = tryInt(torrent['Id']) torrentdesc = '%s %s %s' % (torrent['Resolution'], torrent['Source'], torrent['Codec']) torrentscore = 0 if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']: torrentdesc += ' HQ' if self.conf('prefer_golden'): torrentscore += 5000 if 'Scene' in torrent and torrent['Scene']: torrentdesc += ' Scene' if self.conf('prefer_scene'): torrentscore += 2000 if 'RemasterTitle' in torrent and torrent['RemasterTitle']: torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle']) torrentdesc += ' (%s)' % quality_id torrent_name = re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) - %s' % (movie_title, ptpmovie['Year'], torrentdesc)) def extra_check(item): return self.torrentMeetsQualitySpec(item, quality_id) results.append({ 'id': torrent_id, 'name': torrent_name, 'Source': torrent['Source'], 'Checked': 'true' if torrent['Checked'] else 'false', 'Resolution': torrent['Resolution'], 'url': '%s?action=download&id=%d&authkey=%s&torrent_pass=%s' % (self.urls['torrent'], torrent_id, authkey, passkey), 'detail_url': self.urls['detail'] % torrent_id, 'date': tryInt(time.mktime(parse(torrent['UploadTime']).timetuple())), 'size': tryInt(torrent['Size']) / (1024 * 1024), 'seeders': tryInt(torrent['Seeders']), 'leechers': tryInt(torrent['Leechers']), 'score': torrentscore, 'extra_check': extra_check, }) except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def scan(self): if self.isDisabled(): return if self.renaming_started is True: log.info( 'Renamer is disabled to avoid infinite looping of the same error.' ) return # Check to see if the "to" folder is inside the "from" folder. if not os.path.isdir(self.conf('from')) or not os.path.isdir( self.conf('to')): log.debug('"To" and "From" have to exist.') return elif self.conf('from') in self.conf('to'): log.error( 'The "to" can\'t be inside of the "from" folder. You\'ll get an infinite loop.' ) return groups = fireEvent('scanner.scan', folder=self.conf('from'), single=True) self.renaming_started = True destination = self.conf('to') folder_name = self.conf('folder_name') file_name = self.conf('file_name') trailer_name = self.conf('trailer_name') nfo_name = self.conf('nfo_name') separator = self.conf('separator') # Statusses done_status = fireEvent('status.get', 'done', single=True) active_status = fireEvent('status.get', 'active', single=True) downloaded_status = fireEvent('status.get', 'downloaded', single=True) snatched_status = fireEvent('status.get', 'snatched', single=True) db = get_session() for group_identifier in groups: group = groups[group_identifier] rename_files = {} remove_files = [] remove_releases = [] movie_title = getTitle(group['library']) # Add _UNKNOWN_ if no library item is connected if not group['library'] or not movie_title: self.tagDir(group, 'unknown') continue # Rename the files using the library data else: group['library'] = fireEvent( 'library.update', identifier=group['library']['identifier'], single=True) if not group['library']: log.error( 'Could not rename, no library item to work with: %s', group_identifier) continue library = group['library'] movie_title = getTitle(library) # Find subtitle for renaming fireEvent('renamer.before', group) # Remove weird chars from moviename movie_name = re.sub(r"[\x00\/\\:\*\?\"<>\|]", '', movie_title) # Put 'The' at the end name_the = movie_name if movie_name[:4].lower() == 'the ': name_the = movie_name[4:] + ', The' replacements = { 'ext': 'mkv', 'namethe': name_the.strip(), 'thename': movie_name.strip(), 'year': library['year'], 'first': name_the[0].upper(), 'quality': group['meta_data']['quality']['label'], 'quality_type': group['meta_data']['quality_type'], 'video': group['meta_data'].get('video'), 'audio': group['meta_data'].get('audio'), 'group': group['meta_data']['group'], 'source': group['meta_data']['source'], 'resolution_width': group['meta_data'].get('resolution_width'), 'resolution_height': group['meta_data'].get('resolution_height'), 'imdb_id': library['identifier'], } for file_type in group['files']: # Move nfo depending on settings if file_type is 'nfo' and not self.conf('rename_nfo'): log.debug('Skipping, renaming of %s disabled', file_type) if self.conf('cleanup'): for current_file in group['files'][file_type]: remove_files.append(current_file) continue # Subtitle extra if file_type is 'subtitle_extra': continue # Move other files multiple = len( group['files']['movie']) > 1 and not group['is_dvd'] cd = 1 if multiple else 0 for current_file in sorted(list( group['files'][file_type])): # Original filename replacements['original'] = os.path.splitext( os.path.basename(current_file))[0] replacements['original_folder'] = fireEvent( 'scanner.remove_cptag', group['dirname'], single=True) # Extension replacements['ext'] = getExt(current_file) # cd # replacements['cd'] = ' cd%d' % cd if cd else '' replacements['cd_nr'] = cd # Naming final_folder_name = self.doReplace( folder_name, replacements) final_file_name = self.doReplace( file_name, replacements) replacements['filename'] = final_file_name[:-( len(getExt(final_file_name)) + 1)] # Group filename without cd extension replacements['cd'] = '' replacements['cd_nr'] = '' # Meta naming if file_type is 'trailer': final_file_name = self.doReplace( trailer_name, replacements) elif file_type is 'nfo': final_file_name = self.doReplace( nfo_name, replacements) # Seperator replace if separator: final_file_name = final_file_name.replace( ' ', separator) # Move DVD files (no structure renaming) if group['is_dvd'] and file_type is 'movie': found = False for top_dir in [ 'video_ts', 'audio_ts', 'bdmv', 'certificate' ]: has_string = current_file.lower().find( os.path.sep + top_dir + os.path.sep) if has_string >= 0: structure_dir = current_file[ has_string:].lstrip(os.path.sep) rename_files[current_file] = os.path.join( destination, final_folder_name, structure_dir) found = True break if not found: log.error( 'Could not determine dvd structure for: %s', current_file) # Do rename others else: if file_type is 'leftover': if self.conf('move_leftover'): rename_files[current_file] = os.path.join( destination, final_folder_name, os.path.basename(current_file)) elif file_type not in ['subtitle']: rename_files[current_file] = os.path.join( destination, final_folder_name, final_file_name) # Check for extra subtitle files if file_type is 'subtitle': # rename subtitles with or without language rename_files[current_file] = os.path.join( destination, final_folder_name, final_file_name) sub_langs = group['subtitle_language'].get( current_file, []) rename_extras = self.getRenameExtras( extra_type='subtitle_extra', replacements=replacements, folder_name=folder_name, file_name=file_name, destination=destination, group=group, current_file=current_file) # Don't add language if multiple languages in 1 file if len(sub_langs) > 1: rename_files[current_file] = os.path.join( destination, final_folder_name, final_file_name) elif len(sub_langs) == 1: sub_name = final_file_name.replace( replacements['ext'], '%s.%s' % (sub_langs[0], replacements['ext'])) rename_files[current_file] = os.path.join( destination, final_folder_name, sub_name) rename_files = mergeDicts(rename_files, rename_extras) # Filename without cd etc if file_type is 'movie': rename_extras = self.getRenameExtras( extra_type='movie_extra', replacements=replacements, folder_name=folder_name, file_name=file_name, destination=destination, group=group, current_file=current_file) rename_files = mergeDicts(rename_files, rename_extras) group['filename'] = self.doReplace( file_name, replacements)[:-(len(getExt(final_file_name)) + 1)] group['destination_dir'] = os.path.join( destination, final_folder_name) if multiple: cd += 1 # Before renaming, remove the lower quality files library = db.query(Library).filter_by( identifier=group['library']['identifier']).first() remove_leftovers = True # Add it to the wanted list before we continue if len(library.movies) == 0: profile = db.query(Profile).filter_by( core=True, label=group['meta_data']['quality']['label']).first() fireEvent('movie.add', params={ 'identifier': group['library']['identifier'], 'profile_id': profile.id }, search_after=False) db.expire_all() library = db.query(Library).filter_by( identifier=group['library']['identifier']).first() for movie in library.movies: # Mark movie "done" onces it found the quality with the finish check try: if movie.status_id == active_status.get( 'id') and movie.profile: for profile_type in movie.profile.types: if profile_type.quality_id == group[ 'meta_data']['quality'][ 'id'] and profile_type.finish: movie.status_id = done_status.get('id') db.commit() except Exception, e: log.error('Failed marking movie finished: %s %s', (e, traceback.format_exc())) # Go over current movie releases for release in movie.releases: # When a release already exists if release.status_id is done_status.get('id'): # This is where CP removes older, lesser quality releases if release.quality.order > group['meta_data'][ 'quality']['order']: log.info('Removing lesser quality %s for %s.', (movie.library.titles[0].title, release.quality.label)) for current_file in release.files: remove_files.append(current_file) remove_releases.append(release) # Same quality, but still downloaded, so maybe repack/proper/unrated/directors cut etc elif release.quality.order is group['meta_data'][ 'quality']['order']: log.info( 'Same quality release already exists for %s, with quality %s. Assuming repack.', (movie.library.titles[0].title, release.quality.label)) for current_file in release.files: remove_files.append(current_file) remove_releases.append(release) # Downloaded a lower quality, rename the newly downloaded files/folder to exclude them from scan else: log.info( 'Better quality release already exists for %s, with quality %s', (movie.library.titles[0].title, release.quality.label)) # Add _EXISTS_ to the parent dir self.tagDir(group, 'exists') # Notify on rename fail download_message = 'Renaming of %s (%s) canceled, exists in %s already.' % ( movie.library.titles[0].title, group['meta_data']['quality']['label'], release.quality.label) fireEvent('movie.renaming.canceled', message=download_message, data=group) remove_leftovers = False break elif release.status_id is snatched_status.get('id'): if release.quality.id is group['meta_data'][ 'quality']['id']: log.debug('Marking release as downloaded') release.status_id = downloaded_status.get('id') db.commit() # Remove leftover files if self.conf('cleanup') and not self.conf( 'move_leftover') and remove_leftovers: log.debug('Removing leftover files') for current_file in group['files']['leftover']: remove_files.append(current_file) elif not remove_leftovers: # Don't remove anything break # Remove files delete_folders = [] for src in remove_files: if isinstance(src, File): src = src.path if rename_files.get(src): log.debug('Not removing file that will be renamed: %s', src) continue log.info('Removing "%s"', src) try: if os.path.isfile(src): os.remove(src) parent_dir = os.path.normpath(os.path.dirname(src)) if delete_folders.count( parent_dir) == 0 and os.path.isdir( parent_dir) and destination != parent_dir: delete_folders.append(parent_dir) except: log.error('Failed removing %s: %s', (src, traceback.format_exc())) self.tagDir(group, 'failed_remove') # Delete leftover folder from older releases for delete_folder in delete_folders: self.deleteEmptyFolder(delete_folder, show_error=False) # Rename all files marked group['renamed_files'] = [] for src in rename_files: if rename_files[src]: dst = rename_files[src] log.info('Renaming "%s" to "%s"', (src, dst)) # Create dir self.makeDir(os.path.dirname(dst)) try: self.moveFile(src, dst) group['renamed_files'].append(dst) except: log.error( 'Failed moving the file "%s" : %s', (os.path.basename(src), traceback.format_exc())) self.tagDir(group, 'failed_rename') # Remove matching releases for release in remove_releases: log.debug('Removing release %s', release.identifier) try: db.delete(release) except: log.error('Failed removing %s: %s', (release.identifier, traceback.format_exc())) if group['dirname'] and group['parentdir']: try: log.info('Deleting folder: %s', group['parentdir']) self.deleteEmptyFolder(group['parentdir']) except: log.error('Failed removing %s: %s', (group['parentdir'], traceback.format_exc())) # Search for trailers etc fireEventAsync('renamer.after', group) # Notify on download download_message = 'Downloaded %s (%s)' % (movie_title, replacements['quality']) fireEventAsync('movie.downloaded', message=download_message, data=group) # Break if CP wants to shut down if self.shuttingDown(): break
def _search(self, movie, quality, results): movie_title = getTitle(movie['library']) quality_id = quality['identifier'] params = mergeDicts(self.quality_search_params[quality_id].copy(), { 'order_by': 'relevance', 'order_way': 'descending', 'searchstr': movie['library']['identifier'] }) # Do login for the cookies if not self.login_opener and not self.login(): return try: url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params)) txt = self.urlopen(url, opener = self.login_opener) res = json.loads(txt) except: log.error('Search on PassThePopcorn.me (%s) failed (could not decode JSON)', params) return try: if not 'Movies' in res: return authkey = res['AuthKey'] passkey = res['PassKey'] for ptpmovie in res['Movies']: if not 'Torrents' in ptpmovie: log.debug('Movie %s (%s) has NO torrents', (ptpmovie['Title'], ptpmovie['Year'])) continue log.debug('Movie %s (%s) has %d torrents', (ptpmovie['Title'], ptpmovie['Year'], len(ptpmovie['Torrents']))) for torrent in ptpmovie['Torrents']: torrent_id = tryInt(torrent['Id']) torrentdesc = '%s %s %s' % (torrent['Resolution'], torrent['Source'], torrent['Codec']) if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']: torrentdesc += ' HQ' if 'Scene' in torrent and torrent['Scene']: torrentdesc += ' Scene' if 'RemasterTitle' in torrent and torrent['RemasterTitle']: torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle']) torrentdesc += ' (%s)' % quality_id torrent_name = re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) - %s' % (movie_title, ptpmovie['Year'], torrentdesc)) def extra_check(item): return self.torrentMeetsQualitySpec(item, type) results.append({ 'id': torrent_id, 'name': torrent_name, 'url': '%s?action=download&id=%d&authkey=%s&torrent_pass=%s' % (self.urls['torrent'], torrent_id, authkey, passkey), 'detail_url': self.urls['detail'] % torrent_id, 'date': tryInt(time.mktime(parse(torrent['UploadTime']).timetuple())), 'size': tryInt(torrent['Size']) / (1024 * 1024), 'seeders': tryInt(torrent['Seeders']), 'leechers': tryInt(torrent['Leechers']), 'score': 50 if torrent['GoldenPopcorn'] else 0, 'extra_check': extra_check, 'download': self.loginDownload, }) except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def checkLibrary(self, result): if result and result.get('imdb'): return mergeDicts(result, self.getLibraryTags(result['imdb'])) return result
def fireEvent(name, *args, **kwargs): if name not in events: return #log.debug('Firing event %s', name) try: options = { 'is_after_event': False, # Fire after event 'on_complete': False, # onComplete event 'single': False, # Return single handler 'merge': False, # Merge items 'in_order': False, # Fire them in specific order, waits for the other to finish } # Do options for x in options: try: val = kwargs[x] del kwargs[x] options[x] = val except: pass if len(events[name]) == 1: single = None try: single = events[name][0]['handler'](*args, **kwargs) except: log.error('Failed running single event: %s', traceback.format_exc()) # Don't load thread for single event result = { 'single': (single is not None, single), } else: e = Event(name=name, threads=10, exc_info=True, traceback=True, lock=threading.RLock()) for event in events[name]: e.handle(event['handler'], priority=event['priority']) # Make sure only 1 event is fired at a time when order is wanted kwargs['event_order_lock'] = threading.RLock( ) if options['in_order'] or options['single'] else None kwargs['event_return_on_result'] = options['single'] # Fire result = e(*args, **kwargs) result_keys = result.keys() result_keys.sort(key=natsortKey) if options['single'] and not options['merge']: results = None # Loop over results, stop when first not None result is found. for r_key in result_keys: r = result[r_key] if r[0] is True and r[1] is not None: results = r[1] break elif r[1]: errorHandler(r[1]) else: log.debug('Assume disabled eventhandler for: %s', name) else: results = [] for r_key in result_keys: r = result[r_key] if r[0] == True and r[1]: results.append(r[1]) elif r[1]: errorHandler(r[1]) # Merge if options['merge'] and len(results) > 0: # Dict if isinstance(results[0], dict): results.reverse() merged = {} for result in results: merged = mergeDicts(merged, result, prepend_list=True) results = merged # Lists elif isinstance(results[0], list): merged = [] for result in results: if result not in merged: merged += result results = merged modified_results = fireEvent('result.modify.%s' % name, results, single=True) if modified_results: log.debug('Return modified results for %s', name) results = modified_results if not options['is_after_event']: fireEvent('%s.after' % name, is_after_event=True) if options['on_complete']: options['on_complete']() return results except Exception: log.error('%s: %s', (name, traceback.format_exc()))
def fireEvent(name, *args, **kwargs): if not events.get(name): return #log.debug('Firing event %s', name) try: options = { 'is_after_event': False, # Fire after event 'on_complete': False, # onComplete event 'single': False, # Return single handler 'merge': False, # Merge items 'in_order': False, # Fire them in specific order, waits for the other to finish } # Do options for x in options: try: val = kwargs[x] del kwargs[x] options[x] = val except: pass e = events[name] # Lock this event e.lock.acquire() e.asynchronous = False # Make sure only 1 event is fired at a time when order is wanted kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None kwargs['event_return_on_result'] = options['single'] # Fire result = e(*args, **kwargs) # Release lock for this event e.lock.release() if options['single'] and not options['merge']: results = None # Loop over results, stop when first not None result is found. for r_key in sorted(result.iterkeys(), cmp = natcmp): r = result[r_key] if r[0] is True and r[1] is not None: results = r[1] break elif r[1]: errorHandler(r[1]) else: log.debug('Assume disabled eventhandler for: %s', name) else: results = [] for r_key in sorted(result.iterkeys(), cmp = natcmp): r = result[r_key] if r[0] == True and r[1]: results.append(r[1]) elif r[1]: errorHandler(r[1]) # Merge if options['merge'] and len(results) > 0: # Dict if type(results[0]) == dict: merged = {} for result in results: merged = mergeDicts(merged, result) results = merged # Lists elif type(results[0]) == list: merged = [] for result in results: merged += result results = merged modified_results = fireEvent('result.modify.%s' % name, results, single = True) if modified_results: log.debug('Return modified results for %s', name) results = modified_results if not options['is_after_event']: fireEvent('%s.after' % name, is_after_event = True) if options['on_complete']: options['on_complete']() return results except KeyError, e: pass
def scan(self, movie_folder = None, download_info = None): if self.isDisabled(): return if self.renaming_started is True: log.info('Renamer is already running, if you see this often, check the logs above for errors.') return # Check to see if the "to" folder is inside the "from" folder. if movie_folder and not os.path.isdir(movie_folder) or not os.path.isdir(self.conf('from')) or not os.path.isdir(self.conf('to')): l = log.debug if movie_folder else log.error l('Both the "To" and "From" have to exist.') return elif self.conf('from') in self.conf('to'): log.error('The "to" can\'t be inside of the "from" folder. You\'ll get an infinite loop.') return elif (movie_folder and movie_folder in [self.conf('to'), self.conf('from')]): log.error('The "to" and "from" folders can\'t be inside of or the same as the provided movie folder.') return self.renaming_started = True # make sure the movie folder name is included in the search folder = None files = [] if movie_folder: log.info('Scanning movie folder %s...', movie_folder) movie_folder = movie_folder.rstrip(os.path.sep) folder = os.path.dirname(movie_folder) # Get all files from the specified folder try: for root, folders, names in os.walk(movie_folder): files.extend([os.path.join(root, name) for name in names]) except: log.error('Failed getting files from %s: %s', (movie_folder, traceback.format_exc())) db = get_session() # Extend the download info with info stored in the downloaded release download_info = self.extendDownloadInfo(download_info) groups = fireEvent('scanner.scan', folder = folder if folder else self.conf('from'), files = files, download_info = download_info, return_ignored = False, single = True) destination = self.conf('to') folder_name = self.conf('folder_name') file_name = self.conf('file_name') trailer_name = self.conf('trailer_name') nfo_name = self.conf('nfo_name') separator = self.conf('separator') # Statusses done_status, active_status, downloaded_status, snatched_status = \ fireEvent('status.get', ['done', 'active', 'downloaded', 'snatched'], single = True) for group_identifier in groups: group = groups[group_identifier] rename_files = {} remove_files = [] remove_releases = [] movie_title = getTitle(group['library']) # Add _UNKNOWN_ if no library item is connected if not group['library'] or not movie_title: self.tagDir(group, 'unknown') continue # Rename the files using the library data else: group['library'] = fireEvent('library.update', identifier = group['library']['identifier'], single = True) if not group['library']: log.error('Could not rename, no library item to work with: %s', group_identifier) continue library = group['library'] movie_title = getTitle(library) # Find subtitle for renaming fireEvent('renamer.before', group) # Remove weird chars from moviename movie_name = re.sub(r"[\x00\/\\:\*\?\"<>\|]", '', movie_title) # Put 'The' at the end name_the = movie_name if movie_name[:4].lower() == 'the ': name_the = movie_name[4:] + ', The' replacements = { 'ext': 'mkv', 'namethe': name_the.strip(), 'thename': movie_name.strip(), 'year': library['year'], 'first': name_the[0].upper(), 'quality': group['meta_data']['quality']['label'], 'quality_type': group['meta_data']['quality_type'], 'video': group['meta_data'].get('video'), 'audio': group['meta_data'].get('audio'), 'group': group['meta_data']['group'], 'source': group['meta_data']['source'], 'resolution_width': group['meta_data'].get('resolution_width'), 'resolution_height': group['meta_data'].get('resolution_height'), 'imdb_id': library['identifier'], 'cd': '', 'cd_nr': '', } for file_type in group['files']: # Move nfo depending on settings if file_type is 'nfo' and not self.conf('rename_nfo'): log.debug('Skipping, renaming of %s disabled', file_type) if self.conf('cleanup'): for current_file in group['files'][file_type]: remove_files.append(current_file) continue # Subtitle extra if file_type is 'subtitle_extra': continue # Move other files multiple = len(group['files'][file_type]) > 1 and not group['is_dvd'] cd = 1 if multiple else 0 for current_file in sorted(list(group['files'][file_type])): current_file = toUnicode(current_file) # Original filename replacements['original'] = os.path.splitext(os.path.basename(current_file))[0] replacements['original_folder'] = fireEvent('scanner.remove_cptag', group['dirname'], single = True) # Extension replacements['ext'] = getExt(current_file) # cd # replacements['cd'] = ' cd%d' % cd if multiple else '' replacements['cd_nr'] = cd if multiple else '' # Naming final_folder_name = self.doReplace(folder_name, replacements).lstrip('. ') final_file_name = self.doReplace(file_name, replacements).lstrip('. ') replacements['filename'] = final_file_name[:-(len(getExt(final_file_name)) + 1)] # Meta naming if file_type is 'trailer': final_file_name = self.doReplace(trailer_name, replacements, remove_multiple = True).lstrip('. ') elif file_type is 'nfo': final_file_name = self.doReplace(nfo_name, replacements, remove_multiple = True).lstrip('. ') # Seperator replace if separator: final_file_name = final_file_name.replace(' ', separator) # Move DVD files (no structure renaming) if group['is_dvd'] and file_type is 'movie': found = False for top_dir in ['video_ts', 'audio_ts', 'bdmv', 'certificate']: has_string = current_file.lower().find(os.path.sep + top_dir + os.path.sep) if has_string >= 0: structure_dir = current_file[has_string:].lstrip(os.path.sep) rename_files[current_file] = os.path.join(destination, final_folder_name, structure_dir) found = True break if not found: log.error('Could not determine dvd structure for: %s', current_file) # Do rename others else: if file_type is 'leftover': if self.conf('move_leftover'): rename_files[current_file] = os.path.join(destination, final_folder_name, os.path.basename(current_file)) elif file_type not in ['subtitle']: rename_files[current_file] = os.path.join(destination, final_folder_name, final_file_name) # Check for extra subtitle files if file_type is 'subtitle': remove_multiple = False if len(group['files']['movie']) == 1: remove_multiple = True sub_langs = group['subtitle_language'].get(current_file, []) # rename subtitles with or without language sub_name = self.doReplace(file_name, replacements, remove_multiple = remove_multiple) rename_files[current_file] = os.path.join(destination, final_folder_name, sub_name) rename_extras = self.getRenameExtras( extra_type = 'subtitle_extra', replacements = replacements, folder_name = folder_name, file_name = file_name, destination = destination, group = group, current_file = current_file, remove_multiple = remove_multiple, ) # Don't add language if multiple languages in 1 subtitle file if len(sub_langs) == 1: sub_name = final_file_name.replace(replacements['ext'], '%s.%s' % (sub_langs[0], replacements['ext'])) rename_files[current_file] = os.path.join(destination, final_folder_name, sub_name) rename_files = mergeDicts(rename_files, rename_extras) # Filename without cd etc elif file_type is 'movie': rename_extras = self.getRenameExtras( extra_type = 'movie_extra', replacements = replacements, folder_name = folder_name, file_name = file_name, destination = destination, group = group, current_file = current_file ) rename_files = mergeDicts(rename_files, rename_extras) group['filename'] = self.doReplace(file_name, replacements, remove_multiple = True)[:-(len(getExt(final_file_name)) + 1)] group['destination_dir'] = os.path.join(destination, final_folder_name) if multiple: cd += 1 # Before renaming, remove the lower quality files library = db.query(Library).filter_by(identifier = group['library']['identifier']).first() remove_leftovers = True # Add it to the wanted list before we continue if len(library.movies) == 0: profile = db.query(Profile).filter_by(core = True, label = group['meta_data']['quality']['label']).first() fireEvent('movie.add', params = {'identifier': group['library']['identifier'], 'profile_id': profile.id}, search_after = False) db.expire_all() library = db.query(Library).filter_by(identifier = group['library']['identifier']).first() for movie in library.movies: # Mark movie "done" onces it found the quality with the finish check try: if movie.status_id == active_status.get('id') and movie.profile: for profile_type in movie.profile.types: if profile_type.quality_id == group['meta_data']['quality']['id'] and profile_type.finish: movie.status_id = done_status.get('id') movie.last_edit = int(time.time()) db.commit() except Exception, e: log.error('Failed marking movie finished: %s %s', (e, traceback.format_exc())) # Go over current movie releases for release in movie.releases: # When a release already exists if release.status_id is done_status.get('id'): # This is where CP removes older, lesser quality releases if release.quality.order > group['meta_data']['quality']['order']: log.info('Removing lesser quality %s for %s.', (movie.library.titles[0].title, release.quality.label)) for current_file in release.files: remove_files.append(current_file) remove_releases.append(release) # Same quality, but still downloaded, so maybe repack/proper/unrated/directors cut etc elif release.quality.order is group['meta_data']['quality']['order']: log.info('Same quality release already exists for %s, with quality %s. Assuming repack.', (movie.library.titles[0].title, release.quality.label)) for current_file in release.files: remove_files.append(current_file) remove_releases.append(release) # Downloaded a lower quality, rename the newly downloaded files/folder to exclude them from scan else: log.info('Better quality release already exists for %s, with quality %s', (movie.library.titles[0].title, release.quality.label)) # Add exists tag to the .ignore file self.tagDir(group, 'exists') # Notify on rename fail download_message = 'Renaming of %s (%s) canceled, exists in %s already.' % (movie.library.titles[0].title, group['meta_data']['quality']['label'], release.quality.label) fireEvent('movie.renaming.canceled', message = download_message, data = group) remove_leftovers = False break elif release.status_id is snatched_status.get('id'): if release.quality.id is group['meta_data']['quality']['id']: log.debug('Marking release as downloaded') try: release.status_id = downloaded_status.get('id') release.last_edit = int(time.time()) except Exception, e: log.error('Failed marking release as finished: %s %s', (e, traceback.format_exc())) db.commit() # Remove leftover files if self.conf('cleanup') and not self.conf('move_leftover') and remove_leftovers and \ not (self.conf('file_action') != 'move' and self.downloadIsTorrent(download_info)): log.debug('Removing leftover files') for current_file in group['files']['leftover']: remove_files.append(current_file) elif not remove_leftovers: # Don't remove anything break