def _downloadPropers(self, properList): for curProper in properList: historyLimit = datetime.datetime.today() - datetime.timedelta( days=30) # make sure the episode has been downloaded before myDB = db.DBConnection() historyResults = myDB.select( "SELECT resource FROM history " + "WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? " + "AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")", [ curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(history.dateFormat) ]) # if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it if len(historyResults) == 0: logger.log( u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.") continue else: # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = self._genericName( helpers.remove_non_release_groups(curProper.name)) isSame = False for curResult in historyResults: # if the result exists in history already we need to skip it if self._genericName( helpers.remove_non_release_groups( curResult["resource"])) == clean_proper_name: isSame = True break if isSame: logger.log( u"This proper is already in history, skipping it", logger.DEBUG) continue # get the episode object epObj = curProper.show.getEpisode(curProper.season, curProper.episode) # make the result object result = curProper.provider.getResult([epObj]) result.show = curProper.show result.url = curProper.url result.name = curProper.name result.quality = curProper.quality result.release_group = curProper.release_group result.version = curProper.version # snatch it search.snatchEpisode(result, SNATCHED_PROPER)
def _downloadPropers(self, properList): """ Download proper (snatch it) :param properList: """ for curProper in properList: historyLimit = datetime.datetime.today() - datetime.timedelta(days=30) # make sure the episode has been downloaded before myDB = db.DBConnection() historyResults = myDB.select( "SELECT resource FROM history " + "WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? " + "AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED + Quality.DOWNLOADED]) + ")", [curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(History.date_format)]) # if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it if len(historyResults) == 0: logger.log( u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.") continue else: # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = self._genericName(helpers.remove_non_release_groups(curProper.name)) isSame = False for curResult in historyResults: # if the result exists in history already we need to skip it if self._genericName(helpers.remove_non_release_groups(curResult["resource"])) == clean_proper_name: isSame = True break if isSame: logger.log(u"This proper is already in history, skipping it", logger.DEBUG) continue # get the episode object epObj = curProper.show.getEpisode(curProper.season, curProper.episode) # make the result object result = curProper.provider.getResult([epObj]) result.show = curProper.show result.url = curProper.url result.name = curProper.name result.quality = curProper.quality result.release_group = curProper.release_group result.version = curProper.version result.content = curProper.content # snatch it snatchEpisode(result, SNATCHED_PROPER) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
def _download_propers(proper_list): for cur_proper in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta(days=30) # make sure the episode has been downloaded before my_db = db.DBConnection() history_results = my_db.select( 'SELECT resource FROM history ' + 'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' + 'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')', [cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality, history_limit.strftime(history.dateFormat)]) # if we didn't download this episode in the first place we don't know what quality to use for the proper = skip if 0 == len(history_results): logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name) continue else: # get the show object show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid) if None is show_obj: logger.log(u'Unable to find the show with indexerid ' + str( cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR) continue # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime)) is_same = False for result in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name(helpers.remove_non_release_groups(result['resource'])): is_same = True break if is_same: logger.log(u'This proper is already in history, skipping it', logger.DEBUG) continue ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode) # make the result object result = cur_proper.provider.get_result([ep_obj], cur_proper.url) if None is result: continue result.name = cur_proper.name result.quality = cur_proper.quality result.version = cur_proper.version # snatch it search.snatch_episode(result, SNATCHED_PROPER)
def _downloadPropers(properList): for curProper in properList: historyLimit = datetime.datetime.today() - datetime.timedelta(days=30) # make sure the episode has been downloaded before myDB = db.DBConnection() historyResults = myDB.select( 'SELECT resource FROM history ' 'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' 'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')', [curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(history.dateFormat)]) # if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it if len(historyResults) == 0: logger.log( u'Unable to find an original history entry for proper ' + curProper.name + ' so I\'m not downloading it.') continue else: # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = _genericName(helpers.remove_non_release_groups(curProper.name)) isSame = False for curResult in historyResults: # if the result exists in history already we need to skip it if _genericName(helpers.remove_non_release_groups(curResult['resource'])) == clean_proper_name: isSame = True break if isSame: logger.log(u'This proper is already in history, skipping it', logger.DEBUG) continue # get the episode object showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if showObj == None: logger.log(u'Unable to find the show with indexerid ' + str( curProper.indexerid) + ' so unable to download the proper', logger.ERROR) continue epObj = showObj.getEpisode(curProper.season, curProper.episode) # make the result object result = curProper.provider.get_result([epObj], curProper.url) if None is result: continue result.name = curProper.name result.quality = curProper.quality result.version = curProper.version # snatch it search.snatchEpisode(result, SNATCHED_PROPER)
def _finalize(self, parse_result): """ Store parse result if it is complete and final :param parse_result: Result of parsers """ self.release_group = parse_result.release_group # remember whether it's a proper if parse_result.extra_info: self.is_proper = re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None # if the result is complete then remember that for later # if the result is complete then set release name if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers) or parse_result.air_date) and parse_result.release_group: if not self.release_name: self.release_name = helpers.remove_non_release_groups(helpers.remove_extension(ek(os.path.basename, parse_result.original_name))) else: logger.log(u"Parse result not sufficient (all following have to be set). will not save release name", logger.DEBUG) logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG) logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG) logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG) logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG) logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
def parse(self, name): name = self._unicodify(name) cached = name_parser_cache.get(name) if cached: return cached # break it into parts if there are any (dirname, file name, extension) dir_name, file_name = ek.ek(os.path.split, name) if self.is_file_name: base_file_name = helpers.remove_non_release_groups(helpers.remove_extension(file_name)) else: base_file_name = file_name # use only the direct parent dir dir_name = ek.ek(os.path.basename, dir_name) # set up a result to use final_result = ParseResult(name) # try parsing the file name file_name_result = self._parse_string(base_file_name) # parse the dirname for extra info if needed dir_name_result = self._parse_string(dir_name) # build the ParseResult object final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date') if not final_result.air_date: final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number') final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers') final_result.is_proper = self._combine_results(file_name_result, dir_name_result, 'is_proper') # if the dirname has a release group/show name I believe it over the filename final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name') final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info') final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group') final_result.which_regex = [] if final_result == file_name_result: final_result.which_regex = file_name_result.which_regex elif final_result == dir_name_result: final_result.which_regex = dir_name_result.which_regex else: if file_name_result: final_result.which_regex += file_name_result.which_regex if dir_name_result: final_result.which_regex += dir_name_result.which_regex # if there's no useful info in it then raise an exception if final_result.season_number is None and not final_result.episode_numbers and final_result.air_date is None and not final_result.series_name: raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace')) name_parser_cache.add(name, final_result) # return it return final_result
def _title_and_url(self, item): title, url = None, None try: url = str(item.findtext('link')).replace('&', '&') title = ('%s' % item.findtext('title')).strip() title = re.sub(r'\s+', '.', title) # remove indexer specific release name parts r_found = True while r_found: r_found = False for pattern, repl in ((r'(?i)-Scrambled$', ''), (r'(?i)-BUYMORE$', ''), (r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', ''), (r'(?i)[-.]English$', '')): if re.search(pattern, title): r_found = True title = re.sub(pattern, repl, title) parts = re.findall( '(.*(?:(?:h.?|x)26[45]|vp9|av1|hevc|xvid|divx)[^-]*)(.*)', title, re.I)[0] title = '%s-%s' % ( parts[0], remove_non_release_groups(parts[1].split('-')[1])) except (StandardError, Exception): pass return title, url
def test_remove_non_release_groups(self): test_names = { ('[HorribleSubs] Hidan no Aria AA - 08 [1080p]', True): '[HorribleSubs] Hidan no Aria AA - 08 [1080p]', ('The.Last.Man.On.Earth.S02E08.No.Bull.1080p.WEB-DL.DD5.1.H264-BTN[rartv]', False): 'The.Last.Man.On.Earth.S02E08.No.Bull.1080p.WEB-DL.DD5.1.H264-BTN', } for test_name, test_result in test_names.items(): self.assertEqual(test_result, helpers.remove_non_release_groups(test_name[0], test_name[1]))
def _test(self): """ Generate tests :param self: :return: test to run """ for test_string in test_strings: self.assertEqual(helpers.remove_non_release_groups(test_string), TEST_RESULT)
def _downloadPropers(self, properList): """ Download proper (snatch it) :param properList: """ for curProper in properList: historyLimit = datetime.datetime.today() - datetime.timedelta(days=30) # make sure the episode has been downloaded before main_db_con = db.DBConnection() historyResults = main_db_con.select( "SELECT resource FROM history " + "WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? " + "AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED + Quality.DOWNLOADED]) + ")", [curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(History.date_format)]) # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = self._genericName(helpers.remove_non_release_groups(curProper.name)) isSame = False for curResult in historyResults: # if the result exists in history already we need to skip it if self._genericName(helpers.remove_non_release_groups(curResult["resource"])) == clean_proper_name: isSame = True break if isSame: logger.log(u"This proper is already in history, skipping it", logger.DEBUG) continue # get the episode object epObj = curProper.show.getEpisode(curProper.season, curProper.episode) # make the result object result = curProper.provider.get_result([epObj]) result.show = curProper.show result.url = curProper.url result.name = curProper.name result.quality = curProper.quality result.release_group = curProper.release_group result.version = curProper.version result.content = curProper.content # snatch it snatchEpisode(result, SNATCHED_PROPER) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
def test_remove_non_release_groups(self): test_names = { ('[HorribleSubs] Hidan no Aria AA - 08 [1080p]', True): '[HorribleSubs] Hidan no Aria AA - 08 [1080p]', ('The.Last.Man.On.Earth.S02E08.No.Bull.1080p.WEB-DL.DD5.1.H264-BTN[rartv]', False): 'The.Last.Man.On.Earth.S02E08.No.Bull.1080p.WEB-DL.DD5.1.H264-BTN', } for test_name, test_result in test_names.items(): self.assertEqual( test_result, helpers.remove_non_release_groups(test_name[0], test_name[1]))
def refine(video, release_name=None, release_file=None, extension='release', **kwargs): """Refine a video by using the original release name. The refiner will first try: - Read the file video_name.<extension> seeking for a release name - If no release name, it will read the file release file seeking for a release name - If no release name, it will use the release_name passed as an argument - If no release name, then no change in the video object is made When a release name is found, the video object will be enhanced using the guessit properties extracted from it. Several :class:`~subliminal.video.Video` attributes can be found: * :attr:`~subliminal.video.Video.title` * :attr:`~subliminal.video.Video.series` * :attr:`~subliminal.video.Video.season` * :attr:`~subliminal.video.Video.episode` * :attr:`~subliminal.video.Video.year` * :attr:`~subliminal.video.Video.format` * :attr:`~subliminal.video.Video.release_group` * :attr:`~subliminal.video.Video.resolution` * :attr:`~subliminal.video.Video.video_codec` * :attr:`~subliminal.video.Video.audio_codec` :param video: the video to refine. :param str release_name: the release name to be used. :param str release_file: the release file to be used :param str extension: the release file extension. """ logger.debug(u'Starting release refiner [extension=%s, release_name=%s, release_file=%s]', extension, release_name, release_file) dirpath, filename = os.path.split(video.name) dirpath = dirpath or '.' fileroot, fileext = os.path.splitext(filename) release_file = get_release_file(dirpath, fileroot, extension) or release_file release_name = remove_non_release_groups(get_release_name(release_file) or release_name) if not release_name: logger.debug(u'No release name for %s', video.name) return release_path = os.path.join(dirpath, release_name + fileext) logger.debug(u'Guessing using %s', release_path) guess = guessit(release_path) attributes = MOVIE_ATTRIBUTES if guess.get('type') == 'movie' else EPISODE_ATTRIBUTES for key, value in attributes.items(): old_value = getattr(video, key) new_value = guess.get(value) if new_value and old_value != new_value: setattr(video, key, new_value) logger.debug(u'Attribute %s changed from %s to %s', key, old_value, new_value)
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, [], None, None) if not name: return to_return name = helpers.remove_non_release_groups( helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode np = NameParser(file, tryIndexers=True, trySceneExceptions=True, convert=True) parse_result = np.parse(name) # show object show = parse_result.show if parse_result.is_air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (show, season, episodes, parse_result.quality, None) self._finalize(parse_result) return to_return
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, [], None, None) if not name: return to_return name = helpers.remove_non_release_groups(helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode np = NameParser(file, tryIndexers=True, convert=True) parse_result = np.parse(name) # show object show = parse_result.show if parse_result.is_air_by_date: season = -1 episodes = [parse_result.air_date] elif parse_result.is_sports: season = -1 episodes = [parse_result.sports_air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (show, season, episodes, parse_result.quality, None) self._finalize(parse_result) return to_return
def _analyze_name(self, name, resource=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u'Analyzing name ' + repr(name)) to_return = (None, None, [], None) if not name: return to_return name = helpers.remove_non_release_groups(helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode np = NameParser(resource, try_indexers=True, try_scene_exceptions=True, convert=True) parse_result = np.parse(name) self._log(u'Parsed %s<br />.. into %s' % (name, str(parse_result).decode('utf-8', 'xmlcharrefreplace')), logger.DEBUG) if parse_result.is_air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers # show object show = parse_result.show to_return = (show, season, episodes, parse_result.quality) self._finalize(parse_result) return to_return
def _parse_string(self, name): if not name: return matches = [] initial_best_result = None for reg_ex in self.compiled_regexes: for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]: new_name = helpers.remove_non_release_groups( name, 'anime' in cur_regex_name) match = cur_regex.match(new_name) if not match: continue if 'garbage_name' == cur_regex_name: return result = ParseResult(new_name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name( result.series_name) name_parts = re.match( '(?i)(.*)[ -]((?:part|pt)[ -]?\w+)$', result.series_name) try: result.series_name = name_parts.group(1) result.extra_info = name_parts.group(2) except (AttributeError, IndexError): pass result.score += 1 if 'anime' in cur_regex_name and not (self.showObj and self.showObj.is_anime): p_show = helpers.get_show(result.series_name, True) if p_show and self.showObj and p_show.indexerid != self.showObj.indexerid: p_show = None if not p_show and self.showObj: p_show = self.showObj if p_show and not p_show.is_anime: continue if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if 'bare' == cur_regex_name and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 def _process_epnum(captures, capture_names, grp_name, extra_grp_name, ep_numbers, parse_result): ep_num = self._convert_number(captures.group(grp_name)) extra_grp_name = 'extra_%s' % extra_grp_name ep_numbers = '%sepisode_numbers' % ep_numbers if extra_grp_name in capture_names and captures.group( extra_grp_name): try: if hasattr(self.showObj, 'getEpisode'): ep = self.showObj.getEpisode( parse_result.season_number, ep_num) else: tmp_show = helpers.get_show( parse_result.series_name, True, False) if tmp_show and hasattr( tmp_show, 'getEpisode'): ep = tmp_show.getEpisode( parse_result.season_number, ep_num) else: ep = None except (StandardError, Exception): ep = None en = ep and ep.name and re.match( r'^\W*(\d+)', ep.name) or None es = en and en.group(1) or None extra_ep_num = self._convert_number( captures.group(extra_grp_name)) parse_result.__dict__[ep_numbers] = range( ep_num, extra_ep_num + 1) if not (ep and es and es != captures.group(extra_grp_name) ) and (0 < extra_ep_num - ep_num < 10) else [ep_num] parse_result.score += 1 else: parse_result.__dict__[ep_numbers] = [ep_num] parse_result.score += 1 return parse_result if 'ep_num' in named_groups: result = _process_epnum(match, named_groups, 'ep_num', 'ep_num', '', result) if 'ep_ab_num' in named_groups: result = _process_epnum(match, named_groups, 'ep_ab_num', 'ab_ep_num', 'ab_', result) if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: year = int(match.group('air_year')) try: month = int(match.group('air_month')) except ValueError: try: month = time.strptime( match.group('air_month')[0:3], '%b').tm_mon except ValueError as e: raise InvalidNameException(ex(e)) day = int(match.group('air_day')) # make an attempt to detect YYYY-DD-MM formats if 12 < month: tmp_month = month month = day day = tmp_month try: result.air_date = datetime.date( year + ((1900, 2000)[0 < year < 28], 0)[1900 < year], month, day) except ValueError as e: raise InvalidNameException(ex(e)) if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and 'season_only' == cur_regex_name and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue if tmp_extra_info: if result.extra_info: tmp_extra_info = '%s %s' % (result.extra_info, tmp_extra_info) result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = match.group('release_group') result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = helpers.tryInt(version) else: result.version = 1 else: result.version = -1 if None is result.season_number and result.episode_numbers and not result.air_date and \ cur_regex_name in ['no_season', 'no_season_general', 'no_season_multi_ep'] and \ re.search(r'(?i)\bpart.?\d{1,2}\b', result.original_name): result.season_number = 1 matches.append(result) if len(matches): # pick best match with highest score based on placement best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show = None if not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(best_result.series_name, self.try_scene_exceptions) # confirm passed in show object indexer id matches result show object indexer id if show and not self.testing: if self.showObj and show.indexerid != self.showObj.indexerid: show = None elif not show and self.showObj: show = self.showObj best_result.show = show if show and show.is_anime and 1 < len( self.compiled_regexes[1]) and 1 != reg_ex: continue # if this is a naming pattern test then return best result if not show or self.naming_pattern: if not show and not self.naming_pattern and not self.testing: # ensure anime regex test but use initial best if show still not found if 0 == reg_ex: initial_best_result = best_result matches = [] # clear non-anime match scores continue return initial_best_result return best_result # get quality new_name = helpers.remove_non_release_groups( name, show.is_anime) best_result.quality = common.Quality.nameQuality( new_name, show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if best_result.is_air_by_date: season_number, episode_numbers = None, [] airdate = best_result.air_date.toordinal() my_db = db.DBConnection() sql_result = my_db.select( 'SELECT season, episode, name FROM tv_episodes ' + 'WHERE showid = ? and indexer = ? and airdate = ?', [show.indexerid, show.indexer, airdate]) if sql_result: season_number = int(sql_result[0]['season']) episode_numbers = [int(sql_result[0]['episode'])] if 1 < len(sql_result): # multi-eps broadcast on this day nums = { '1': 'one', '2': 'two', '3': 'three', '4': 'four', '5': 'five', '6': 'six', '7': 'seven', '8': 'eight', '9': 'nine', '10': 'ten' } patt = '(?i)(?:e(?:p(?:isode)?)?|part|pt)[. _-]?(%s)' try: src_num = str( re.findall(patt % '\w+', best_result.extra_info)[0]) alt_num = nums.get(src_num) or list( nums.keys())[list( nums.values()).index(src_num)] re_partnum = re.compile( patt % ('%s|%s' % (src_num, alt_num))) for ep_details in sql_result: if re_partnum.search(ep_details['name']): season_number = int( ep_details['season']) episode_numbers = [ int(ep_details['episode']) ] break except (StandardError, Exception): pass if self.indexer_lookup and not season_number or not len( episode_numbers): try: lindexer_api_parms = sickbeard.indexerApi( show.indexer).api_params.copy() if show.lang: lindexer_api_parms['language'] = show.lang t = sickbeard.indexerApi( show.indexer).indexer(**lindexer_api_parms) ep_obj = t[show.indexerid].aired_on( best_result.air_date)[0] season_number = int(ep_obj['seasonnumber']) episode_numbers = [int(ep_obj['episodenumber'])] except sickbeard.indexer_episodenotfound: logger.log( u'Unable to find episode with date ' + str(best_result.air_date) + ' for show ' + show.name + ', skipping', logger.WARNING) episode_numbers = [] except sickbeard.indexer_error as e: logger.log( u'Unable to contact ' + sickbeard.indexerApi(show.indexer).name + ': ' + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering( show.indexerid, show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif show.is_anime and len( best_result.ab_episode_numbers) and not self.testing: scene_season = scene_exceptions.get_scene_exception_by_name( best_result.series_name)[1] for epAbsNo in best_result.ab_episode_numbers: a = epAbsNo if self.convert and show.is_scene: a = scene_numbering.get_indexer_absolute_numbering( show.indexerid, show.indexer, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number( show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif best_result.season_number and len( best_result.episode_numbers) and not self.testing: for epNo in best_result.episode_numbers: s = best_result.season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering( show.indexerid, show.indexer, best_result.season_number, epNo) if show.is_anime: a = helpers.get_absolute_number_from_season_and_episode( show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. new_season_numbers = list( set(new_season_numbers)) # remove duplicates if 1 < len(new_season_numbers): raise InvalidNameException( 'Scene numbering results episodes from ' 'seasons %s, (i.e. more than one) and ' 'SickGear does not support this. ' 'Sorry.' % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if len(new_absolute_numbers): best_result.ab_episode_numbers = new_absolute_numbers if len(new_season_numbers) and len(new_episode_numbers): best_result.episode_numbers = new_episode_numbers best_result.season_number = new_season_numbers[0] if self.convert and show.is_scene: logger.log( u'Converted parsed result %s into %s' % (best_result.original_name, str(best_result).decode( 'utf-8', 'xmlcharrefreplace')), logger.DEBUG) helpers.cpu_sleep() return best_result
def execute(self): backupDatabase(16) logger.log(u"Setting special episodes status to SKIPPED.") self.connection.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND season = 0", [common.SKIPPED, common.WANTED]) fix_ep_rls_group = [] fix_ep_release_name = [] # re-analyze snatched data logger.log(u"Analyzing history to correct bad data (this could take a moment, be patient)...") history_results = self.connection.select("SELECT * FROM history WHERE action % 100 = 2 ORDER BY date ASC") for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select("SELECT * FROM history WHERE action % 100 = 4 AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date > ?", [cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"]]) # only continue if there was a download found (thats newer than the snatched) if not download_results: logger.log(u"Found a snatch in the history for " + cur_result["resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) continue # take the snatched nzb, clean it up so we can store it for the corresponding tv_episodes entry clean_nzb_name = helpers.remove_non_release_groups(helpers.remove_extension(cur_result["resource"])) # fixed known bad release_group data if download_results[0]["provider"].upper() in ["-1", "RP", "NZBGEEK"] or "." in download_results[0]["provider"]: try: np = NameParser(False) parse_result = np.parse(clean_nzb_name) except InvalidNameException: continue # leave off check for episode number so we can update season rip data as well? if parse_result.series_name and parse_result.season_number is not None and parse_result.release_group: fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \ [parse_result.release_group, download_results[0]["action"], download_results[0]["showid"], download_results[0]["season"], download_results[0]["episode"], download_results[0]["quality"], download_results[0]["date"]] ]) # find the associated episode on disk ep_results = self.connection.select("SELECT episode_id, status, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", [cur_result["showid"], cur_result["season"], cur_result["episode"]]) if not ep_results: logger.log(u"The episode " + cur_result["resource"] + " was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) continue # skip items that appears to have a 'scene' name already to avoid replacing locally pp/manually moved items match = re.search(".(xvid|x264|h.?264|mpeg-?2)", ep_results[0]["release_name"], re.I) if match: continue # get the status/quality of the existing ep and make sure it's what we expect ep_status, ep_quality = common.Quality.splitCompositeStatus(int(ep_results[0]["status"])) if ep_status != common.DOWNLOADED: continue if ep_quality != int(cur_result["quality"]): continue # take the extension off the filename, it's not needed file_name = ek.ek(os.path.basename, download_results[0]["resource"]) if '.' in file_name: file_name = file_name.rpartition('.')[0] # make sure this is actually a real release name and not a season pack or something for cur_name in (clean_nzb_name, file_name): logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) except InvalidNameException: continue if parse_result.series_name and parse_result.season_number is not None and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database fix_ep_release_name.append(["UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]]]) break logger.log(u"Corrected " + str(len(fix_ep_release_name)) + " release names (" + str(len(fix_ep_rls_group)) + " release groups) out of the " + str(len(history_results)) + " releases analyzed.") if len(fix_ep_rls_group) > 0: self.connection.mass_action(fix_ep_rls_group) if len(fix_ep_release_name) > 0: self.connection.mass_action(fix_ep_release_name) # now cleanup all downloaded release groups in the history fix_ep_rls_group = [] logger.log(u"Analyzing downloaded history release groups...") history_results = self.connection.select("SELECT * FROM history WHERE action % 100 = 4 ORDER BY date ASC") for cur_result in history_results: clean_provider = helpers.remove_non_release_groups(helpers.remove_extension(cur_result["provider"])) # take the data on the left of the _, fixes 'LOL_repost' if clean_provider and "_" in clean_provider: clean_provider = clean_provider.rsplit('_', 1)[0] if clean_provider != cur_result["provider"]: fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \ [clean_provider, cur_result["action"], cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"]] ]) logger.log(u"Corrected " + str(len(fix_ep_rls_group)) + " release groups.") if len(fix_ep_rls_group) > 0: self.connection.mass_action(fix_ep_rls_group) self.incDBVersion() # cleanup and reduce db if any previous data was removed logger.log(u"Performing a vacuum on the database.", logger.DEBUG) self.connection.action("VACUUM")
def _test(self): for test_string in test_strings: self.assertEqual(remove_non_release_groups(test_string), test_result)
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None): propers = {} my_db = db.DBConnection() # for each provider get a list of arbitrary Propers orig_thread_name = threading.currentThread().name providers = filter(lambda p: p.is_active(), sickbeard.providers.sortedProviderList()) for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue if None is not proper_list: found_propers = proper_list.get(cur_provider.get_id(), []) if not found_propers: continue else: threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) logger.log('Searching for new PROPER releases') try: found_propers = cur_provider.find_propers( search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log('Authentication error: %s' % ex(e), logger.ERROR) continue except Exception as e: logger.log( 'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the Proper to the list for cur_proper in found_propers: name = _generic_name(cur_proper.name) if name in propers: continue try: np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) parse_result = np.parse(cur_proper.name) except (InvalidNameException, InvalidShowException, Exception): continue # get the show object cur_proper.parsed_show = (cur_proper.parsed_show or helpers.findCertainShow( sickbeard.showList, parse_result.show.indexerid)) if None is cur_proper.parsed_show: logger.log( 'Skip download; cannot find show with indexerid [%s]' % cur_proper.indexerid, logger.ERROR) continue cur_proper.indexer = cur_proper.parsed_show.indexer cur_proper.indexerid = cur_proper.parsed_show.indexerid if not (-1 != cur_proper.indexerid and parse_result.series_name and parse_result.episode_numbers and (cur_proper.indexer, cur_proper.indexerid) in recent_shows + recent_anime): continue # only get anime Proper if it has release group and version if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version: logger.log( 'Ignored Proper with no release group and version in name [%s]' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks( cur_proper.name, parse=False, indexer_lookup=False): logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG) continue re_x = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any( cur_proper.name, cur_proper.parsed_show.rls_ignore_words, **re_x) if None is not result and result: logger.log( 'Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG) continue result = show_name_helpers.contains_any( cur_proper.name, cur_proper.parsed_show.rls_require_words, **re_x) if None is not result and not result: logger.log( 'Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG) continue cur_size = getattr(cur_proper, 'size', None) if failed_history.has_failed(cur_proper.name, cur_size, cur_provider.name): continue cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] # check if we actually want this Proper (if it's the right quality) sql_results = my_db.select( 'SELECT release_group, status, version, release_name' ' FROM tv_episodes' ' WHERE showid = ? AND indexer = ? AND season = ? AND episode = ?' ' LIMIT 1', [ cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode ]) if not sql_results: continue # only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones) # check if we want this release: same quality as current, current has correct status # restrict other release group releases to Proper's old_status, old_quality = Quality.splitCompositeStatus( int(sql_results[0]['status'])) cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level( parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True) cur_proper.proper_level = cur_proper.properlevel # local non global value old_release_group = sql_results[0]['release_group'] try: same_release_group = parse_result.release_group.lower( ) == old_release_group.lower() except (StandardError, Exception): same_release_group = parse_result.release_group == old_release_group if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \ or cur_proper.quality != old_quality \ or (cur_proper.is_repack and not same_release_group): continue np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) try: extra_info = np.parse( sql_results[0]['release_name']).extra_info_no_name() except (StandardError, Exception): extra_info = None # don't take Proper of the same level we already downloaded old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \ get_old_proper_level(cur_proper.parsed_show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season, parse_result.episode_numbers, old_status, cur_proper.quality, extra_info, parse_result.version, parse_result.is_anime) cur_proper.codec = _get_codec(parse_result.extra_info_no_name()) if cur_proper.proper_level < old_proper_level: continue cur_proper.is_internal = (parse_result.extra_info_no_name() and re.search( r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)) if cur_proper.proper_level == old_proper_level: if (('264' == cur_proper.codec and 'xvid' == old_codec) or (old_is_internal and not cur_proper.is_internal)): pass continue is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or (old_quality == Quality.SDTV and re.search( r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) if is_web: old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)] old_webdl_type = get_webdl_type(old_extra_no_name, old_name) new_webdl_type = get_webdl_type( parse_result.extra_info_no_name(), cur_proper.name) if old_webdl_type != new_webdl_type: logger.log( 'Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]' % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) continue # for webdls, prevent Propers from different groups log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \ % (parse_result.release_group, old_release_group, cur_proper.name) if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group: logger.log(log_same_grp, logger.DEBUG) continue # check if we actually want this Proper (if it's the right release group and a higher version) if parse_result.is_anime: old_version = int(sql_results[0]['version']) if not (-1 < old_version < parse_result.version): continue if not same_release_group: logger.log(log_same_grp, logger.DEBUG) continue found_msg = 'Found anime Proper v%s to replace v%s' % ( parse_result.version, old_version) else: found_msg = 'Found Proper [%s]' % cur_proper.name # make sure the episode has been downloaded before history_limit = datetime.datetime.today() - datetime.timedelta( days=30) history_results = my_db.select( 'SELECT resource FROM history' ' WHERE showid = ?' ' AND season = ? AND episode = ? AND quality = ? AND date >= ?' ' AND (%s)' % ' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]), [ cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality, history_limit.strftime(history.dateFormat) ]) # skip if the episode has never downloaded, because a previous quality is required to match the Proper if not len(history_results): logger.log( 'Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG) continue # make sure that none of the existing history downloads are the same Proper as the download candidate clean_proper_name = _generic_name( helpers.remove_non_release_groups( cur_proper.name, cur_proper.parsed_show.is_anime)) is_same = False for hitem in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name( helpers.remove_non_release_groups( ek.ek(os.path.basename, hitem['resource']))): is_same = True break if is_same: logger.log('Ignored Proper already in history [%s]' % cur_proper.name) continue logger.log(found_msg, logger.DEBUG) # finish populating the Proper instance # cur_proper.show = cur_proper.parsed_show.indexerid cur_proper.provider = cur_provider cur_proper.extra_info = parse_result.extra_info cur_proper.extra_info_no_name = parse_result.extra_info_no_name cur_proper.release_group = parse_result.release_group cur_proper.is_anime = parse_result.is_anime cur_proper.version = parse_result.version propers[name] = cur_proper cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name) return propers.values()
def _genericName(name, **kwargs): if kwargs.pop('remove', True): name = remove_non_release_groups(name, clean_proper=kwargs.pop('clean_proper', False)) return name.replace('.', ' ').replace('-', ' ').replace('_', ' ').lower()
def _parse_string(self, name): if not name: return matches = [] for regex in self.compiled_regexes: for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[regex]: match = cur_regex.match(name) if not match: continue result = ParseResult(name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name( result.series_name) result.score += 1 if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if 'bare' == cur_regex_name and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 if 'ep_num' in named_groups: ep_num = self._convert_number(match.group('ep_num')) if 'extra_ep_num' in named_groups and match.group( 'extra_ep_num'): result.episode_numbers = range( ep_num, self._convert_number(match.group('extra_ep_num')) + 1) result.score += 1 else: result.episode_numbers = [ep_num] result.score += 1 if 'ep_ab_num' in named_groups: ep_ab_num = self._convert_number(match.group('ep_ab_num')) if 'extra_ab_ep_num' in named_groups and match.group( 'extra_ab_ep_num'): result.ab_episode_numbers = range( ep_ab_num, self._convert_number( match.group('extra_ab_ep_num')) + 1) result.score += 1 else: result.ab_episode_numbers = [ep_ab_num] result.score += 1 if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: year = int(match.group('air_year')) month = int(match.group('air_month')) day = int(match.group('air_day')) # make an attempt to detect YYYY-DD-MM formats if 12 < month: tmp_month = month month = day day = tmp_month try: result.air_date = datetime.date(year, month, day) except ValueError as e: raise InvalidNameException(ex(e)) if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and 'season_only' == cur_regex_name and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = helpers.remove_non_release_groups( match.group('release_group')) result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = version else: result.version = 1 else: result.version = -1 matches.append(result) if len(matches): # pick best match with highest score based on placement best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show = None if not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(best_result.series_name, self.try_scene_exceptions) # confirm passed in show object indexer id matches result show object indexer id if show and not self.testing: if self.showObj and show.indexerid != self.showObj.indexerid: show = None elif not show and self.showObj: show = self.showObj best_result.show = show if show and show.is_anime and 1 < len( self.compiled_regexes[1]) and 1 != regex: continue # if this is a naming pattern test then return best result if not show or self.naming_pattern: return best_result # get quality best_result.quality = common.Quality.nameQuality( name, show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if best_result.is_air_by_date: airdate = best_result.air_date.toordinal() my_db = db.DBConnection() sql_result = my_db.select( 'SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?', [show.indexerid, show.indexer, airdate]) season_number = None episode_numbers = [] if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] if not season_number or not len(episode_numbers): try: lindexer_api_parms = sickbeard.indexerApi( show.indexer).api_params.copy() if show.lang: lindexer_api_parms['language'] = show.lang t = sickbeard.indexerApi( show.indexer).indexer(**lindexer_api_parms) ep_obj = t[show.indexerid].airedOn( best_result.air_date)[0] season_number = int(ep_obj['seasonnumber']) episode_numbers = [int(ep_obj['episodenumber'])] except sickbeard.indexer_episodenotfound: logger.log( u'Unable to find episode with date ' + str(best_result.air_date) + ' for show ' + show.name + ', skipping', logger.WARNING) episode_numbers = [] except sickbeard.indexer_error as e: logger.log( u'Unable to contact ' + sickbeard.indexerApi(show.indexer).name + ': ' + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering( show.indexerid, show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif show.is_anime and len( best_result.ab_episode_numbers) and not self.testing: scene_season = scene_exceptions.get_scene_exception_by_name( best_result.series_name)[1] for epAbsNo in best_result.ab_episode_numbers: a = epAbsNo if self.convert and show.is_scene: a = scene_numbering.get_indexer_absolute_numbering( show.indexerid, show.indexer, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number( show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif best_result.season_number and len( best_result.episode_numbers) and not self.testing: for epNo in best_result.episode_numbers: s = best_result.season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering( show.indexerid, show.indexer, best_result.season_number, epNo) if show.is_anime: a = helpers.get_absolute_number_from_season_and_episode( show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. new_season_numbers = list( set(new_season_numbers)) # remove duplicates if 1 < len(new_season_numbers): raise InvalidNameException( 'Scene numbering results episodes from ' 'seasons %s, (i.e. more than one) and ' 'SickGear does not support this. ' 'Sorry.' % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if len(new_absolute_numbers): best_result.ab_episode_numbers = new_absolute_numbers if len(new_season_numbers) and len(new_episode_numbers): best_result.episode_numbers = new_episode_numbers best_result.season_number = new_season_numbers[0] if self.convert and show.is_scene: logger.log( u'Converted parsed result %s into %s' % (best_result.original_name, str(best_result).decode( 'utf-8', 'xmlcharrefreplace')), logger.DEBUG) # CPU sleep time.sleep(cpu_presets[sickbeard.CPU_PRESET]) return best_result
def _parse_string(self, name): if not name: return matches = [] initial_best_result = None for reg_ex in self.compiled_regexes: for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]: new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name) match = cur_regex.match(new_name) if not match: continue if 'garbage_name' == cur_regex_name: return result = ParseResult(new_name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name(result.series_name) name_parts = re.match('(?i)(.*)[ -]((?:part|pt)[ -]?\w+)$', result.series_name) try: result.series_name = name_parts.group(1) result.extra_info = name_parts.group(2) except (AttributeError, IndexError): pass result.score += 1 if 'anime' in cur_regex_name and not (self.showObj and self.showObj.is_anime): p_show = helpers.get_show(result.series_name, True) if p_show and self.showObj and p_show.indexerid != self.showObj.indexerid: p_show = None if not p_show and self.showObj: p_show = self.showObj if p_show and not p_show.is_anime: continue if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if 'bare' == cur_regex_name and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 def _process_epnum(captures, capture_names, grp_name, extra_grp_name, ep_numbers, parse_result): ep_num = self._convert_number(captures.group(grp_name)) extra_grp_name = 'extra_%s' % extra_grp_name ep_numbers = '%sepisode_numbers' % ep_numbers if extra_grp_name in capture_names and captures.group(extra_grp_name): try: if hasattr(self.showObj, 'getEpisode'): ep = self.showObj.getEpisode(parse_result.season_number, ep_num) else: tmp_show = helpers.get_show(parse_result.series_name, True, False) if tmp_show and hasattr(tmp_show, 'getEpisode'): ep = tmp_show.getEpisode(parse_result.season_number, ep_num) else: ep = None except (StandardError, Exception): ep = None en = ep and ep.name and re.match(r'^\W*(\d+)', ep.name) or None es = en and en.group(1) or None extra_ep_num = self._convert_number(captures.group(extra_grp_name)) parse_result.__dict__[ep_numbers] = range(ep_num, extra_ep_num + 1) if not ( ep and es and es != captures.group(extra_grp_name)) and ( 0 < extra_ep_num - ep_num < 10) else [ep_num] parse_result.score += 1 else: parse_result.__dict__[ep_numbers] = [ep_num] parse_result.score += 1 return parse_result if 'ep_num' in named_groups: result = _process_epnum(match, named_groups, 'ep_num', 'ep_num', '', result) if 'ep_ab_num' in named_groups: result = _process_epnum(match, named_groups, 'ep_ab_num', 'ab_ep_num', 'ab_', result) if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: year = int(match.group('air_year')) try: month = int(match.group('air_month')) except ValueError: try: month = time.strptime(match.group('air_month')[0:3], '%b').tm_mon except ValueError as e: raise InvalidNameException(ex(e)) day = int(match.group('air_day')) # make an attempt to detect YYYY-DD-MM formats if 12 < month: tmp_month = month month = day day = tmp_month try: result.air_date = datetime.date( year + ((1900, 2000)[0 < year < 28], 0)[1900 < year], month, day) except ValueError as e: raise InvalidNameException(ex(e)) if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and 'season_only' == cur_regex_name and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue if tmp_extra_info: if result.extra_info: tmp_extra_info = '%s %s' % (result.extra_info, tmp_extra_info) result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = match.group('release_group') result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = helpers.tryInt(version) else: result.version = 1 else: result.version = -1 if None is result.season_number and result.episode_numbers and not result.air_date and \ cur_regex_name in ['no_season', 'no_season_general', 'no_season_multi_ep'] and \ re.search(r'(?i)\bpart.?\d{1,2}\b', result.original_name): result.season_number = 1 matches.append(result) if len(matches): # pick best match with highest score based on placement best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show = None if not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(best_result.series_name, self.try_scene_exceptions) # confirm passed in show object indexer id matches result show object indexer id if show and not self.testing: if self.showObj and show.indexerid != self.showObj.indexerid: show = None elif not show and self.showObj: show = self.showObj best_result.show = show if not best_result.series_name and getattr(show, 'name', None): best_result.series_name = show.name if show and show.is_anime and 1 < len(self.compiled_regexes[1]) and 1 != reg_ex: continue # if this is a naming pattern test then return best result if not show or self.naming_pattern: if not show and not self.naming_pattern and not self.testing: # ensure anime regex test but use initial best if show still not found if 0 == reg_ex: initial_best_result = best_result matches = [] # clear non-anime match scores continue return initial_best_result return best_result # get quality new_name = helpers.remove_non_release_groups(name, show.is_anime) best_result.quality = common.Quality.nameQuality(new_name, show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if best_result.is_air_by_date: season_number, episode_numbers = None, [] airdate = best_result.air_date.toordinal() my_db = db.DBConnection() sql_result = my_db.select( 'SELECT season, episode, name FROM tv_episodes ' + 'WHERE showid = ? and indexer = ? and airdate = ?', [show.indexerid, show.indexer, airdate]) if sql_result: season_number = int(sql_result[0]['season']) episode_numbers = [int(sql_result[0]['episode'])] if 1 < len(sql_result): # multi-eps broadcast on this day nums = {'1': 'one', '2': 'two', '3': 'three', '4': 'four', '5': 'five', '6': 'six', '7': 'seven', '8': 'eight', '9': 'nine', '10': 'ten'} patt = '(?i)(?:e(?:p(?:isode)?)?|part|pt)[. _-]?(%s)' try: src_num = str(re.findall(patt % '\w+', best_result.extra_info)[0]) alt_num = nums.get(src_num) or list(nums.keys())[list(nums.values()).index(src_num)] re_partnum = re.compile(patt % ('%s|%s' % (src_num, alt_num))) for ep_details in sql_result: if re_partnum.search(ep_details['name']): season_number = int(ep_details['season']) episode_numbers = [int(ep_details['episode'])] break except (StandardError, Exception): pass if self.indexer_lookup and not season_number or not len(episode_numbers): try: lindexer_api_parms = sickbeard.indexerApi(show.indexer).api_params.copy() if show.lang: lindexer_api_parms['language'] = show.lang t = sickbeard.indexerApi(show.indexer).indexer(**lindexer_api_parms) ep_obj = t[show.indexerid].aired_on(best_result.air_date)[0] season_number = int(ep_obj['seasonnumber']) episode_numbers = [int(ep_obj['episodenumber'])] except sickbeard.indexer_episodenotfound: logger.log(u'Unable to find episode with date ' + str(best_result.air_date) + ' for show ' + show.name + ', skipping', logger.WARNING) episode_numbers = [] except sickbeard.indexer_error as e: logger.log(u'Unable to contact ' + sickbeard.indexerApi(show.indexer).name + ': ' + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif show.is_anime and len(best_result.ab_episode_numbers) and not self.testing: scene_season = scene_exceptions.get_scene_exception_by_name(best_result.series_name)[1] for epAbsNo in best_result.ab_episode_numbers: a = epAbsNo if self.convert and show.is_scene: a = scene_numbering.get_indexer_absolute_numbering(show.indexerid, show.indexer, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number(show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif best_result.season_number and len(best_result.episode_numbers) and not self.testing: for epNo in best_result.episode_numbers: s = best_result.season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, best_result.season_number, epNo) if show.is_anime: a = helpers.get_absolute_number_from_season_and_episode(show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. new_season_numbers = list(set(new_season_numbers)) # remove duplicates if 1 < len(new_season_numbers): raise InvalidNameException('Scene numbering results episodes from ' 'seasons %s, (i.e. more than one) and ' 'SickGear does not support this. ' 'Sorry.' % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if len(new_absolute_numbers): best_result.ab_episode_numbers = new_absolute_numbers if len(new_season_numbers) and len(new_episode_numbers): best_result.episode_numbers = new_episode_numbers best_result.season_number = new_season_numbers[0] if self.convert and show.is_scene: logger.log(u'Converted parsed result %s into %s' % (best_result.original_name, str(best_result).decode('utf-8', 'xmlcharrefreplace')), logger.DEBUG) helpers.cpu_sleep() return best_result
def _parse_string(self, name): if not name: return matches = [] for regex in self.compiled_regexes: for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[regex]: match = cur_regex.match(name) if not match: continue result = ParseResult(name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name(result.series_name) result.score += 1 if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if 'bare' == cur_regex_name and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 if 'ep_num' in named_groups: ep_num = self._convert_number(match.group('ep_num')) if 'extra_ep_num' in named_groups and match.group('extra_ep_num'): result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1) result.score += 1 else: result.episode_numbers = [ep_num] result.score += 1 if 'ep_ab_num' in named_groups: ep_ab_num = self._convert_number(match.group('ep_ab_num')) if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'): result.ab_episode_numbers = range(ep_ab_num, self._convert_number(match.group('extra_ab_ep_num')) + 1) result.score += 1 else: result.ab_episode_numbers = [ep_ab_num] result.score += 1 if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: year = int(match.group('air_year')) month = int(match.group('air_month')) day = int(match.group('air_day')) # make an attempt to detect YYYY-DD-MM formats if 12 < month: tmp_month = month month = day day = tmp_month try: result.air_date = datetime.date(year, month, day) except ValueError as e: raise InvalidNameException(ex(e)) if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and 'season_only' == cur_regex_name and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = helpers.remove_non_release_groups(match.group('release_group')) result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = version else: result.version = 1 else: result.version = -1 matches.append(result) if len(matches): # pick best match with highest score based on placement best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show = None if not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(best_result.series_name, self.try_scene_exceptions) # confirm passed in show object indexer id matches result show object indexer id if show and not self.testing: if self.showObj and show.indexerid != self.showObj.indexerid: show = None elif not show and self.showObj: show = self.showObj best_result.show = show if show and show.is_anime and 1 < len(self.compiled_regexes[1]) and 1 != regex: continue # if this is a naming pattern test then return best result if not show or self.naming_pattern: return best_result # get quality best_result.quality = common.Quality.nameQuality(name, show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if best_result.is_air_by_date: airdate = best_result.air_date.toordinal() my_db = db.DBConnection() sql_result = my_db.select( 'SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?', [show.indexerid, show.indexer, airdate]) season_number = None episode_numbers = [] if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] if not season_number or not len(episode_numbers): try: lindexer_api_parms = sickbeard.indexerApi(show.indexer).api_params.copy() if show.lang: lindexer_api_parms['language'] = show.lang t = sickbeard.indexerApi(show.indexer).indexer(**lindexer_api_parms) ep_obj = t[show.indexerid].airedOn(best_result.air_date)[0] season_number = int(ep_obj['seasonnumber']) episode_numbers = [int(ep_obj['episodenumber'])] except sickbeard.indexer_episodenotfound: logger.log(u'Unable to find episode with date ' + str(best_result.air_date) + ' for show ' + show.name + ', skipping', logger.WARNING) episode_numbers = [] except sickbeard.indexer_error as e: logger.log(u'Unable to contact ' + sickbeard.indexerApi(show.indexer).name + ': ' + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif show.is_anime and len(best_result.ab_episode_numbers) and not self.testing: scene_season = scene_exceptions.get_scene_exception_by_name(best_result.series_name)[1] for epAbsNo in best_result.ab_episode_numbers: a = epAbsNo if self.convert and show.is_scene: a = scene_numbering.get_indexer_absolute_numbering(show.indexerid, show.indexer, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number(show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif best_result.season_number and len(best_result.episode_numbers) and not self.testing: for epNo in best_result.episode_numbers: s = best_result.season_number e = epNo if self.convert and show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, best_result.season_number, epNo) if show.is_anime: a = helpers.get_absolute_number_from_season_and_episode(show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. new_season_numbers = list(set(new_season_numbers)) # remove duplicates if 1 < len(new_season_numbers): raise InvalidNameException('Scene numbering results episodes from ' 'seasons %s, (i.e. more than one) and ' 'SickGear does not support this. ' 'Sorry.' % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if len(new_absolute_numbers): best_result.ab_episode_numbers = new_absolute_numbers if len(new_season_numbers) and len(new_episode_numbers): best_result.episode_numbers = new_episode_numbers best_result.season_number = new_season_numbers[0] if self.convert and show.is_scene: logger.log(u'Converted parsed result %s into %s' % (best_result.original_name, str(best_result).decode('utf-8', 'xmlcharrefreplace')), logger.DEBUG) # CPU sleep time.sleep(cpu_presets[sickbeard.CPU_PRESET]) return best_result
def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-branches, too-many-statements logger.log(u'Checking for needed subtitles in Post-Process folder', logger.INFO) providers = enabled_service_list() pool = SubtitleProviderPool() # Search for all wanted languages languages = {from_code(language) for language in wanted_languages()} if not languages: return # Dict of language exceptions to use with subliminal language_exceptions = {'pt-br': 'pob'} run_post_process = False # Check if PP folder is set if sickbeard.TV_DOWNLOAD_DIR and os.path.isdir( sickbeard.TV_DOWNLOAD_DIR): for root, _, files in os.walk(sickbeard.TV_DOWNLOAD_DIR, topdown=False): rar_files = [ rar_file for rar_file in files if isRarFile(rar_file) ] if rar_files and sickbeard.UNPACK: video_files = [ video_file for video_file in files if isMediaFile(video_file) ] if u'_UNPACK' not in root and ( not video_files or root == sickbeard.TV_DOWNLOAD_DIR): logger.log( u'Found rar files in post-process folder: {0}'. format(rar_files), logger.DEBUG) result = processTV.ProcessResult() processTV.unRAR(root, rar_files, False, result) elif rar_files and not sickbeard.UNPACK: logger.log( u'Unpack is disabled. Skipping: {0}'.format(rar_files), logger.WARNING) for root, _, files in os.walk(sickbeard.TV_DOWNLOAD_DIR, topdown=False): for filename in sorted(files): try: # Remove non release groups from video file. Needed to match subtitles new_filename = remove_non_release_groups(filename) if new_filename != filename: os.rename(filename, new_filename) filename = new_filename except Exception as error: logger.log( u"Couldn't remove non release groups from video file. Error: {0}" .format(ex(error)), logger.DEBUG) # Delete unwanted subtitles before downloading new ones if sickbeard.SUBTITLES_MULTI and sickbeard.SUBTITLES_KEEP_ONLY_WANTED and filename.rpartition( '.')[2] in subtitle_extensions: subtitle_language = filename.rsplit('.', 2)[1].lower() if len( subtitle_language ) == 2 and subtitle_language in language_converters[ 'opensubtitles'].codes: subtitle_language = Language.fromcode( subtitle_language, 'alpha2').opensubtitles elif subtitle_language in language_exceptions: subtitle_language = language_exceptions.get( subtitle_language, subtitle_language) elif subtitle_language not in language_converters[ 'opensubtitles'].codes: subtitle_language = 'unknown' if subtitle_language not in sickbeard.SUBTITLES_LANGUAGES: try: os.remove(os.path.join(root, filename)) logger.log( u"Deleted '{0}' because we don't want subtitle language '{1}'. We only want '{2}' language(s)" .format( filename, subtitle_language, ','.join( sickbeard.SUBTITLES_LANGUAGES)), logger.DEBUG) except Exception as error: logger.log( u"Couldn't delete subtitle: {0}. Error: {1}" .format(filename, ex(error)), logger.DEBUG) if isMediaFile(filename) and processTV.subtitles_enabled( filename): try: video = get_video(os.path.join(root, filename), subtitles=False, embedded_subtitles=False) subtitles_list = pool.list_subtitles( video, languages) for provider in providers: if provider in pool.discarded_providers: logger.log( u'Could not search in {0} provider. Discarding for now' .format(provider), logger.DEBUG) if not subtitles_list: logger.log( u'No subtitles found for {0}'.format( os.path.join(root, filename)), logger.DEBUG) continue logger.log( u'Found subtitle(s) canditate(s) for {0}'. format(filename), logger.INFO) hearing_impaired = sickbeard.SUBTITLES_HEARING_IMPAIRED user_score = 213 if sickbeard.SUBTITLES_PERFECT_MATCH else 198 found_subtitles = pool.download_best_subtitles( subtitles_list, video, languages=languages, hearing_impaired=hearing_impaired, min_score=user_score, only_one=not sickbeard.SUBTITLES_MULTI) for subtitle in subtitles_list: score = subliminal.score.compute_score( subtitle, video, hearing_impaired=sickbeard. SUBTITLES_HEARING_IMPAIRED) logger.log( u'[{0}] Subtitle score for {1} is: {2} (min={3})' .format(subtitle.provider_name, subtitle.id, score, user_score), logger.DEBUG) downloaded_languages = set() for subtitle in found_subtitles: logger.log( u'Found subtitle for {0} in {1} provider with language {2}' .format(os.path.join(root, filename), subtitle.provider_name, subtitle.language.opensubtitles), logger.INFO) subliminal.save_subtitles( video, found_subtitles, directory=root, single=not sickbeard.SUBTITLES_MULTI) subtitles_multi = not sickbeard.SUBTITLES_MULTI subtitle_path = subliminal.subtitle.get_subtitle_path( video.name, None if subtitles_multi else subtitle.language) if root is not None: subtitle_path = os.path.join( root, os.path.split(subtitle_path)[1]) sickbeard.helpers.chmodAsParent(subtitle_path) sickbeard.helpers.fixSetGroupID(subtitle_path) downloaded_languages.add( subtitle.language.opensubtitles) # Don't run post processor unless at least one file has all of the needed subtitles if not needs_subtitles(downloaded_languages): run_post_process = True except Exception as error: logger.log( u'Error occurred when downloading subtitles for: {0}. Error: {1}' .format(os.path.join(root, filename), ex(error))) if run_post_process: logger.log( u'Starting post-process with default settings now that we found subtitles' ) processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-branches logger.log(u'Checking for needed subtitles in Post-Process folder', logger.INFO) providers = enabled_service_list() provider_configs = {'addic7ed': {'username': sickbeard.ADDIC7ED_USER, 'password': sickbeard.ADDIC7ED_PASS}, 'legendastv': {'username': sickbeard.LEGENDASTV_USER, 'password': sickbeard.LEGENDASTV_PASS}, 'opensubtitles': {'username': sickbeard.OPENSUBTITLES_USER, 'password': sickbeard.OPENSUBTITLES_PASS}} pool = subliminal.api.ProviderPool(providers=providers, provider_configs=provider_configs) # Search for all wanted languages languages = {from_code(language) for language in wanted_languages()} if not languages: return run_post_process = False # Check if PP folder is set if sickbeard.TV_DOWNLOAD_DIR and ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR): for root, _, files in ek(os.walk, sickbeard.TV_DOWNLOAD_DIR, topdown=False): for video_filename in sorted(files): try: # Remove non release groups from video file. Needed to match subtitles new_video_filename = remove_non_release_groups(video_filename) if new_video_filename != video_filename: os.rename(video_filename, new_video_filename) video_filename = new_video_filename except Exception as error: logger.log(u'Could not remove non release groups from video file. Error: %r' % ex(error), logger.DEBUG) if isMediaFile(video_filename): try: video = subliminal.scan_video(os.path.join(root, video_filename), subtitles=False, embedded_subtitles=False) subtitles_list = pool.list_subtitles(video, languages) if not subtitles_list: logger.log(u'No subtitles found for %s' % ek(os.path.join, root, video_filename), logger.DEBUG) continue logger.log(u'Found subtitle(s) canditate(s) for %s' % video_filename, logger.INFO) hearing_impaired = sickbeard.SUBTITLES_HEARING_IMPAIRED user_score = 132 if sickbeard.SUBTITLES_PERFECT_MATCH else 111 found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, hearing_impaired=hearing_impaired, min_score=user_score, only_one=not sickbeard.SUBTITLES_MULTI) for subtitle in subtitles_list: matches = subtitle.get_matches(video, hearing_impaired=False) score = subliminal.subtitle.compute_score(matches, video) logger.log(u"[%s] Subtitle score for %s is: %s (min=%s)" % (subtitle.provider_name, subtitle.id, score, user_score), logger.DEBUG) downloaded_languages = set() for subtitle in found_subtitles: logger.log(u"Found subtitle for %s in %s provider with language %s" % (os.path.join(root, video_filename), subtitle.provider_name, subtitle.language.opensubtitles), logger.DEBUG) subliminal.save_subtitles(video, found_subtitles, directory=root, single=not sickbeard.SUBTITLES_MULTI) subtitles_multi = not sickbeard.SUBTITLES_MULTI subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if subtitles_multi else subtitle.language) if root is not None: subtitle_path = ek(os.path.join, root, ek(os.path.split, subtitle_path)[1]) sickbeard.helpers.chmodAsParent(subtitle_path) sickbeard.helpers.fixSetGroupID(subtitle_path) downloaded_languages.add(subtitle.language.opensubtitles) # Don't run post processor unless at least one file has all of the needed subtitles if not needs_subtitles(downloaded_languages): run_post_process = True except Exception as error: logger.log(u"Error occurred when downloading subtitles for: %s. Error: %r" % (os.path.join(root, video_filename), ex(error))) if run_post_process: logger.log(u"Starting post-process with default settings now that we found subtitles") processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-branches logger.log(u'Checking for needed subtitles in Post-Process folder', logger.INFO) providers = enabled_service_list() provider_configs = {'addic7ed': {'username': sickbeard.ADDIC7ED_USER, 'password': sickbeard.ADDIC7ED_PASS}, 'legendastv': {'username': sickbeard.LEGENDASTV_USER, 'password': sickbeard.LEGENDASTV_PASS}, 'opensubtitles': {'username': sickbeard.OPENSUBTITLES_USER, 'password': sickbeard.OPENSUBTITLES_PASS}} pool = ProviderPool(providers=providers, provider_configs=provider_configs) # Search for all wanted languages languages = {from_code(language) for language in wanted_languages()} if not languages: return run_post_process = False # Check if PP folder is set if sickbeard.TV_DOWNLOAD_DIR and os.path.isdir(sickbeard.TV_DOWNLOAD_DIR): for root, _, files in os.walk(sickbeard.TV_DOWNLOAD_DIR, topdown=False): rar_files = [x for x in files if isRarFile(x)] if rar_files and sickbeard.UNPACK: video_files = [x for x in files if isMediaFile(x)] if u'_UNPACK' not in root and (not video_files or root == sickbeard.TV_DOWNLOAD_DIR): logger.log(u'Found rar files in post-process folder: {}'.format(rar_files), logger.DEBUG) result = processTV.ProcessResult() processTV.unRAR(root, rar_files, False, result) elif rar_files and not sickbeard.UNPACK: logger.log(u'Unpack is disabled. Skipping: {}'.format(rar_files), logger.WARNING) for root, _, files in os.walk(sickbeard.TV_DOWNLOAD_DIR, topdown=False): for video_filename in sorted(files): try: # Remove non release groups from video file. Needed to match subtitles new_video_filename = remove_non_release_groups(video_filename) if new_video_filename != video_filename: os.rename(video_filename, new_video_filename) video_filename = new_video_filename except Exception as error: logger.log(u'Couldn\'t remove non release groups from video file. Error: {}'.format (ex(error)), logger.DEBUG) if isMediaFile(video_filename): try: video = subliminal.scan_video(os.path.join(root, video_filename), subtitles=False, embedded_subtitles=False) subtitles_list = pool.list_subtitles(video, languages) for provider in providers: if provider in pool.discarded_providers: logger.log(u'Could not search in {} provider. Discarding for now'.format(provider), logger.DEBUG) if not subtitles_list: logger.log(u'No subtitles found for {}'.format (os.path.join(root, video_filename)), logger.DEBUG) continue logger.log(u'Found subtitle(s) canditate(s) for {}'.format(video_filename), logger.INFO) hearing_impaired = sickbeard.SUBTITLES_HEARING_IMPAIRED user_score = 213 if sickbeard.SUBTITLES_PERFECT_MATCH else 204 found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, hearing_impaired=hearing_impaired, min_score=user_score, only_one=not sickbeard.SUBTITLES_MULTI) for subtitle in subtitles_list: score = subliminal.score.compute_score(subtitle, video, hearing_impaired=sickbeard.SUBTITLES_HEARING_IMPAIRED) logger.log(u'[{}] Subtitle score for {} is: {} (min={})'.format (subtitle.provider_name, subtitle.id, score, user_score), logger.DEBUG) downloaded_languages = set() for subtitle in found_subtitles: logger.log(u'Found subtitle for {} in {} provider with language {}'.format (os.path.join(root, video_filename), subtitle.provider_name, subtitle.language.opensubtitles), logger.DEBUG) subliminal.save_subtitles(video, found_subtitles, directory=root, single=not sickbeard.SUBTITLES_MULTI) subtitles_multi = not sickbeard.SUBTITLES_MULTI subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if subtitles_multi else subtitle.language) if root is not None: subtitle_path = os.path.join(root, os.path.split(subtitle_path)[1]) sickbeard.helpers.chmodAsParent(subtitle_path) sickbeard.helpers.fixSetGroupID(subtitle_path) downloaded_languages.add(subtitle.language.opensubtitles) # Don't run post processor unless at least one file has all of the needed subtitles if not needs_subtitles(downloaded_languages): run_post_process = True except Exception as error: logger.log(u'Error occurred when downloading subtitles for: {}. Error: {}'.format (os.path.join(root, video_filename), ex(error))) if run_post_process: logger.log(u'Starting post-process with default settings now that we found subtitles') processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-branches, too-many-statements logger.log(u'Checking for needed subtitles in Post-Process folder', logger.INFO) providers = enabled_service_list() pool = SubtitleProviderPool() # Search for all wanted languages languages = {from_code(language) for language in wanted_languages()} if not languages: return # Dict of language exceptions to use with subliminal language_exceptions = {'pt-br': 'pob'} run_post_process = False # Check if PP folder is set if sickbeard.TV_DOWNLOAD_DIR and os.path.isdir(sickbeard.TV_DOWNLOAD_DIR): for dirpath, dirnames_, files in os.walk(sickbeard.TV_DOWNLOAD_DIR, topdown=False): rar_files = [rar_file for rar_file in files if isRarFile(rar_file)] if rar_files and sickbeard.UNPACK: video_files = [video_file for video_file in files if isMediaFile(video_file)] if u'_UNPACK' not in dirpath and (not video_files or dirpath == sickbeard.TV_DOWNLOAD_DIR): logger.log(u'Found rar files in post-process folder: {0}'.format(rar_files), logger.DEBUG) result = processTV.ProcessResult() processTV.unRAR(dirpath, rar_files, False, result) elif rar_files and not sickbeard.UNPACK: logger.log(u'Unpack is disabled. Skipping: {0}'.format(rar_files), logger.WARNING) for dirpath, dirnames_, files in os.walk(sickbeard.TV_DOWNLOAD_DIR, topdown=False): for filename in sorted(files): try: # Remove non release groups from video file. Needed to match subtitles new_filename = remove_non_release_groups(filename) if new_filename != filename: os.rename(filename, new_filename) filename = new_filename except Exception as error: logger.log(u"Couldn't remove non release groups from video file. Error: {0}".format (ex(error)), logger.DEBUG) # Delete unwanted subtitles before downloading new ones if sickbeard.SUBTITLES_MULTI and sickbeard.SUBTITLES_KEEP_ONLY_WANTED and filename.rpartition('.')[2] in subtitle_extensions: subtitle_language = filename.rsplit('.', 2)[1].lower() if len(subtitle_language) == 2 and subtitle_language in language_converters['opensubtitles'].codes: subtitle_language = Language.fromcode(subtitle_language, 'alpha2').opensubtitles elif subtitle_language in language_exceptions: subtitle_language = language_exceptions.get(subtitle_language, subtitle_language) elif subtitle_language not in language_converters['opensubtitles'].codes: subtitle_language = 'unknown' if subtitle_language not in sickbeard.SUBTITLES_LANGUAGES: try: os.remove(os.path.join(dirpath, filename)) logger.log(u"Deleted '{0}' because we don't want subtitle language '{1}'. We only want '{2}' language(s)".format (filename, subtitle_language, ','.join(sickbeard.SUBTITLES_LANGUAGES)), logger.DEBUG) except Exception as error: logger.log(u"Couldn't delete subtitle: {0}. Error: {1}".format(filename, ex(error)), logger.DEBUG) if isMediaFile(filename) and processTV.subtitles_enabled(filename): try: video = get_video(os.path.join(dirpath, filename), subtitles=False, embedded_subtitles=False) subtitles_list = pool.list_subtitles(video, languages) for provider in providers: if provider in pool.discarded_providers: logger.log(u'Could not search in {0} provider. Discarding for now'.format(provider), logger.DEBUG) if not subtitles_list: logger.log(u'No subtitles found for {0}'.format (os.path.join(dirpath, filename)), logger.DEBUG) continue logger.log(u'Found subtitle(s) canditate(s) for {0}'.format(filename), logger.INFO) hearing_impaired = sickbeard.SUBTITLES_HEARING_IMPAIRED user_score = 213 if sickbeard.SUBTITLES_PERFECT_MATCH else 198 found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, hearing_impaired=hearing_impaired, min_score=user_score, only_one=not sickbeard.SUBTITLES_MULTI) for subtitle in subtitles_list: score = subliminal.score.compute_score(subtitle, video, hearing_impaired=sickbeard.SUBTITLES_HEARING_IMPAIRED) logger.log(u'[{0}] Subtitle score for {1} is: {2} (min={3})'.format (subtitle.provider_name, subtitle.id, score, user_score), logger.DEBUG) downloaded_languages = set() for subtitle in found_subtitles: logger.log(u'Found subtitle for {0} in {1} provider with language {2}'.format (os.path.join(dirpath, filename), subtitle.provider_name, subtitle.language.opensubtitles), logger.INFO) subliminal.save_subtitles(video, found_subtitles, directory=dirpath, single=not sickbeard.SUBTITLES_MULTI) subtitles_multi = not sickbeard.SUBTITLES_MULTI subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if subtitles_multi else subtitle.language) if dirpath is not None: subtitle_path = os.path.join(dirpath, os.path.split(subtitle_path)[1]) sickbeard.helpers.chmodAsParent(subtitle_path) sickbeard.helpers.fixSetGroupID(subtitle_path) downloaded_languages.add(subtitle.language.opensubtitles) # Don't run post processor unless at least one file has all of the needed subtitles if not needs_subtitles(downloaded_languages): run_post_process = True except Exception as error: logger.log(u'Error occurred when downloading subtitles for: {0}. Error: {1}'.format (os.path.join(dirpath, filename), ex(error))) if run_post_process: logger.log(u'Starting post-process with default settings now that we found subtitles') processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
def execute(self): backupDatabase(16) logger.log(u"Setting special episodes status to SKIPPED.") self.connection.action( "UPDATE tv_episodes SET status = ? WHERE status = ? AND season = 0", [common.SKIPPED, common.WANTED]) fix_ep_rls_group = [] fix_ep_release_name = [] # re-analyze snatched data logger.log( u"Analyzing history to correct bad data (this could take a moment, be patient)..." ) history_results = self.connection.select( "SELECT * FROM history WHERE action % 100 = 2 ORDER BY date ASC") for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select( "SELECT * FROM history WHERE action % 100 = 4 AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date > ?", [ cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"] ]) # only continue if there was a download found (thats newer than the snatched) if not download_results: logger.log( u"Found a snatch in the history for " + cur_result["resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) continue # take the snatched nzb, clean it up so we can store it for the corresponding tv_episodes entry clean_nzb_name = helpers.remove_non_release_groups( helpers.remove_extension(cur_result["resource"])) # fixed known bad release_group data if download_results[0]["provider"].upper() in [ "-1", "RP", "NZBGEEK" ] or "." in download_results[0]["provider"]: try: np = NameParser(False) parse_result = np.parse(clean_nzb_name) except InvalidNameException: continue # leave off check for episode number so we can update season rip data as well? if parse_result.series_name and parse_result.season_number is not None and parse_result.release_group: fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \ [parse_result.release_group, download_results[0]["action"], download_results[0]["showid"], download_results[0]["season"], download_results[0]["episode"], download_results[0]["quality"], download_results[0]["date"]] ]) # find the associated episode on disk ep_results = self.connection.select( "SELECT episode_id, status, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", [ cur_result["showid"], cur_result["season"], cur_result["episode"] ]) if not ep_results: logger.log( u"The episode " + cur_result["resource"] + " was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) continue # skip items that appears to have a 'scene' name already to avoid replacing locally pp/manually moved items match = re.search(".(xvid|x264|h.?264|mpeg-?2)", ep_results[0]["release_name"], re.I) if match: continue # get the status/quality of the existing ep and make sure it's what we expect ep_status, ep_quality = common.Quality.splitCompositeStatus( int(ep_results[0]["status"])) if ep_status != common.DOWNLOADED: continue if ep_quality != int(cur_result["quality"]): continue # take the extension off the filename, it's not needed file_name = ek.ek(os.path.basename, download_results[0]["resource"]) if '.' in file_name: file_name = file_name.rpartition('.')[0] # make sure this is actually a real release name and not a season pack or something for cur_name in (clean_nzb_name, file_name): logger.log( u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) except InvalidNameException: continue if parse_result.series_name and parse_result.season_number is not None and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database fix_ep_release_name.append([ "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]] ]) break logger.log(u"Corrected " + str(len(fix_ep_release_name)) + " release names (" + str(len(fix_ep_rls_group)) + " release groups) out of the " + str(len(history_results)) + " releases analyzed.") if len(fix_ep_rls_group) > 0: self.connection.mass_action(fix_ep_rls_group) if len(fix_ep_release_name) > 0: self.connection.mass_action(fix_ep_release_name) # now cleanup all downloaded release groups in the history fix_ep_rls_group = [] logger.log(u"Analyzing downloaded history release groups...") history_results = self.connection.select( "SELECT * FROM history WHERE action % 100 = 4 ORDER BY date ASC") for cur_result in history_results: clean_provider = helpers.remove_non_release_groups( helpers.remove_extension(cur_result["provider"])) # take the data on the left of the _, fixes 'LOL_repost' if clean_provider and "_" in clean_provider: clean_provider = clean_provider.rsplit('_', 1)[0] if clean_provider != cur_result["provider"]: fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \ [clean_provider, cur_result["action"], cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"]] ]) logger.log(u"Corrected " + str(len(fix_ep_rls_group)) + " release groups.") if len(fix_ep_rls_group) > 0: self.connection.mass_action(fix_ep_rls_group) self.incDBVersion() # cleanup and reduce db if any previous data was removed logger.log(u"Performing a vacuum on the database.", logger.DEBUG) self.connection.action("VACUUM")
def _analyze_name(self, name, file_name=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (tvdb_id, season, [episodes], quality) tuple. tvdb_id, season, quality may be None and episodes may be []. if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, [], None) if not name: return to_return name = helpers.remove_non_release_groups(helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode np = NameParser(False) parse_result = np.parse(name) self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (None, season, episodes, None) # do a scene reverse-lookup to get a list of all possible names name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name) if not name_list: return (None, season, episodes, None) # try finding name in DB for cur_name in name_list: self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG) db_result = helpers.searchDBForShow(cur_name) if db_result: self._log(u"Lookup successful, using tvdb id " + str(db_result[0]), logger.DEBUG) self._finalize(parse_result) return (int(db_result[0]), season, episodes, None) # try finding name in scene exceptions for cur_name in name_list: self._log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG) scene_id = scene_exceptions.get_scene_exception_by_name(cur_name) if scene_id: self._log(u"Scene exception lookup got tvdb id " + str(scene_id) + u", using that", logger.DEBUG) self._finalize(parse_result) return (scene_id, season, episodes, None) # try finding name on TVDB for cur_name in name_list: try: t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **sickbeard.TVDB_API_PARMS) self._log(u"Looking up name " + cur_name + u" on TVDB", logger.DEBUG) showObj = t[cur_name] except (tvdb_exceptions.tvdb_exception): # if none found, search on all languages try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() ltvdb_api_parms['search_all_languages'] = True t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **ltvdb_api_parms) self._log(u"Looking up name " + cur_name + u" in all languages on TVDB", logger.DEBUG) showObj = t[cur_name] except (tvdb_exceptions.tvdb_exception, IOError): pass continue except (IOError): continue self._log(u"Lookup successful, using tvdb id " + str(showObj["id"]), logger.DEBUG) self._finalize(parse_result) return (int(showObj["id"]), season, episodes, None) self._finalize(parse_result) return to_return
def _parse_string(self, name): # pylint: disable=too-many-locals, too-many-branches, too-many-statements if not name: return matches = [] bestResult = None # Remove non release groups from filename name = remove_non_release_groups(name) for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes: match = cur_regex.match(name) if not match: continue result = ParseResult(name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name(result.series_name) result.score += 1 if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if cur_regex_name == 'bare' and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 if 'ep_num' in named_groups: ep_num = self._convert_number(match.group('ep_num')) if 'extra_ep_num' in named_groups and match.group('extra_ep_num'): result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1) result.score += 1 else: result.episode_numbers = [ep_num] result.score += 1 if 'ep_ab_num' in named_groups: ep_ab_num = self._convert_number(match.group('ep_ab_num')) if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'): result.ab_episode_numbers = range(ep_ab_num, self._convert_number(match.group('extra_ab_ep_num')) + 1) result.score += 1 else: result.ab_episode_numbers = [ep_ab_num] result.score += 1 if 'air_date' in named_groups: air_date = match.group('air_date') try: result.air_date = dateutil.parser.parse(air_date, fuzzy_with_tokens=True)[0].date() result.score += 1 except Exception: continue if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and cur_regex_name == 'season_only' and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = match.group('release_group') result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = version else: result.version = 1 else: result.version = -1 matches.append(result) if matches: # pick best match with highest score based on placement bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show = None if not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(bestResult.series_name, self.tryIndexers) # confirm passed in show object indexer id matches result show object indexer id if show: if self.showObj and show.indexerid != self.showObj.indexerid: show = None bestResult.show = show elif not show and self.showObj: bestResult.show = self.showObj # if this is a naming pattern test or result doesn't have a show object then return best result if not bestResult.show or self.naming_pattern: return bestResult # get quality bestResult.quality = common.Quality.nameQuality(name, bestResult.show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if bestResult.is_air_by_date: airdate = bestResult.air_date.toordinal() main_db_con = db.DBConnection() sql_result = main_db_con.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", [bestResult.show.indexerid, bestResult.show.indexer, airdate]) season_number = None episode_numbers = [] if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] if season_number is None or not episode_numbers: try: lINDEXER_API_PARMS = sickbeard.indexerApi(bestResult.show.indexer).api_params.copy() if bestResult.show.lang: lINDEXER_API_PARMS['language'] = bestResult.show.lang t = sickbeard.indexerApi(bestResult.show.indexer).indexer(**lINDEXER_API_PARMS) epObj = t[bestResult.show.indexerid].airedOn(bestResult.air_date)[0] season_number = int(epObj["seasonnumber"]) episode_numbers = [int(epObj["episodenumber"])] except sickbeard.indexer_episodenotfound: logger.log(u"Unable to find episode with date " + str(bestResult.air_date) + " for show " + bestResult.show.name + ", skipping", logger.WARNING) episode_numbers = [] except sickbeard.indexer_error as e: logger.log(u"Unable to contact " + sickbeard.indexerApi(bestResult.show.indexer).name + ": " + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if bestResult.show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(bestResult.show.indexerid, bestResult.show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif bestResult.show.is_anime and bestResult.ab_episode_numbers: scene_season = scene_exceptions.get_scene_exception_by_name(bestResult.series_name)[1] for epAbsNo in bestResult.ab_episode_numbers: a = epAbsNo if bestResult.show.is_scene: a = scene_numbering.get_indexer_absolute_numbering(bestResult.show.indexerid, bestResult.show.indexer, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number(bestResult.show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif bestResult.season_number and bestResult.episode_numbers: for epNo in bestResult.episode_numbers: s = bestResult.season_number e = epNo if bestResult.show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(bestResult.show.indexerid, bestResult.show.indexer, bestResult.season_number, epNo) if bestResult.show.is_anime: a = helpers.get_absolute_number_from_season_and_episode(bestResult.show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. new_season_numbers = list(set(new_season_numbers)) # remove duplicates if len(new_season_numbers) > 1: raise InvalidNameException("Scene numbering results episodes from " "seasons %s, (i.e. more than one) and " "sickrage does not support this. " "Sorry." % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if new_absolute_numbers: bestResult.ab_episode_numbers = new_absolute_numbers if new_season_numbers and new_episode_numbers: bestResult.episode_numbers = new_episode_numbers bestResult.season_number = new_season_numbers[0] if bestResult.show.is_scene: logger.log( u"Converted parsed result " + bestResult.original_name + " into " + str(bestResult).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) # CPU sleep time.sleep(0.02) return bestResult