def prepare_files(self, path, files, force=False): """ Prepare files for post-processing. Separate the Rar and Video files. -> self.video_files Extract the rar files. -> self.rar_content Collect new video files. -> self.video_in_rar List unwanted files -> self.unwanted_files :param path: Path to start looking for rar/video files. :param files: Array of files. """ video_files = [] rar_files = [] for each_file in files: if helpers.is_media_file(each_file): video_files.append(each_file) elif helpers.is_rar_file(each_file): rar_files.append(each_file) rar_content = [] video_in_rar = [] if rar_files: rar_content = self.unrar(path, rar_files, force) files.extend(rar_content) video_in_rar = [ each_file for each_file in rar_content if helpers.is_media_file(each_file) ] video_files.extend(video_in_rar) self.log_and_output('Post-processing files: {files}', level=logging.DEBUG, **{'files': files}) self.log_and_output('Post-processing video files: {video_files}', level=logging.DEBUG, **{'video_files': video_files}) if rar_content: self.log_and_output('Post-processing rar content: {rar_content}', level=logging.DEBUG, **{'rar_content': rar_content}) self.log_and_output('Post-processing video in rar: {video_in_rar}', level=logging.DEBUG, **{'video_in_rar': video_in_rar}) unwanted_files = [ filename for filename in files if filename not in video_files and helpers.get_extension(filename) not in self.allowed_extensions ] if unwanted_files: self.log_and_output('Found unwanted files: {unwanted_files}', level=logging.DEBUG, **{'unwanted_files': unwanted_files}) self.video_files = video_files self.rar_content = rar_content self.video_in_rar = video_in_rar self.unwanted_files = unwanted_files
def unpack_rar_files(dirpath): """Unpack any existing rar files present in the specified dirpath. :param dirpath: the directory path to be used :type dirpath: str """ from medusa import process_tv for root, _, files in os.walk(dirpath, topdown=False): # Skip folders that are being used for unpacking if u'_UNPACK' in root.upper(): continue rar_files = [ rar_file for rar_file in files if is_rar_file(rar_file) ] if rar_files and app.UNPACK: video_files = [ video_file for video_file in files if is_media_file(video_file) ] if not video_files or root == app.TV_DOWNLOAD_DIR: logger.debug(u'Found rar files in post-process folder: %s', rar_files) process_tv.ProcessResult(app.TV_DOWNLOAD_DIR).unrar( root, rar_files, False) elif rar_files and not app.UNPACK: logger.warning(u'Unpack is disabled. Skipping: %s', rar_files)
def should_process(self, path, failed=False): """ Determine if a directory should be processed. :param path: Path we want to verify :param failed: (optional) Mark the directory as failed :return: True if the directory is valid for processing, otherwise False :rtype: Boolean """ if not self._is_valid_folder(path, failed): return False folder = os.path.basename(path) if helpers.is_hidden_folder(path) or any(f == folder for f in self.IGNORED_FOLDERS): self.log('Ignoring folder: {0}'.format(folder), logger.DEBUG) self.missedfiles.append('{0}: Hidden or ignored folder'.format(path)) return False for root, dirs, files in os.walk(path): for subfolder in dirs: if not self._is_valid_folder(os.path.join(root, subfolder), failed): return False for each_file in files: if helpers.is_media_file(each_file) or helpers.is_rar_file(each_file): return True # Stop at first subdirectories if post-processing path if self.directory == path: break self.log('No processable items found in folder: {0}'.format(path), logger.DEBUG) return False
def save_subs(tv_episode, video, found_subtitles, video_path=None): """Save subtitles. :param tv_episode: the episode to download subtitles :type tv_episode: sickbeard.tv.Episode :param video: :type video: subliminal.Video :param found_subtitles: :type found_subtitles: list of subliminal.Subtitle :param video_path: the video path. If none, the episode location will be used :type video_path: str :return: a sorted list of the opensubtitles codes for the downloaded subtitles :rtype: list of str """ video_path = video_path or tv_episode.location show_name = tv_episode.series.name season = tv_episode.season episode = tv_episode.episode episode_name = tv_episode.name show_indexerid = tv_episode.series.indexerid subtitles_dir = get_subtitles_dir(video_path) saved_subtitles = save_subtitles(video, found_subtitles, directory=_encode(subtitles_dir), single=not app.SUBTITLES_MULTI) for subtitle in saved_subtitles: logger.info(u'Found subtitle for %s in %s provider with language %s', os.path.basename(video_path), subtitle.provider_name, subtitle.language.opensubtitles) subtitle_path = compute_subtitle_path(subtitle, video_path, subtitles_dir) helpers.chmod_as_parent(subtitle_path) helpers.fix_set_group_id(subtitle_path) if app.SUBTITLES_EXTRA_SCRIPTS and is_media_file(video_path): subtitle_path = compute_subtitle_path(subtitle, video_path, subtitles_dir) run_subs_extra_scripts(video_path=video_path, subtitle_path=subtitle_path, subtitle_language=subtitle.language, show_name=show_name, season=season, episode=episode, episode_name=episode_name, show_indexerid=show_indexerid) if app.SUBTITLES_HISTORY: logger.debug( u'Logging to history downloaded subtitle from provider %s and language %s', subtitle.provider_name, subtitle.language.opensubtitles) history.log_subtitle(tv_episode, subtitle) # Refresh the subtitles property if tv_episode.location: tv_episode.refresh_subtitles() return sorted( {subtitle.language.opensubtitles for subtitle in saved_subtitles})
def should_process(self, path): """ Determine if a directory should be processed. :param path: Path we want to verify :return: True if the directory is valid for processing, otherwise False :rtype: Boolean """ if not self._is_valid_folder(path): return False folder = os.path.basename(path) if helpers.is_hidden_folder(path) or any(f == folder for f in self.IGNORED_FOLDERS): self.log_and_output('Ignoring folder: {folder}', level=logging.DEBUG, **{'folder': folder}) self.missed_files.append('{0}: Hidden or ignored folder'.format(path)) return False for root, dirs, files in os.walk(path): for subfolder in dirs: if not self._is_valid_folder(os.path.join(root, subfolder)): return False for each_file in files: if helpers.is_media_file(each_file) or helpers.is_rar_file(each_file): return True # Stop at first subdirectories if post-processing path if self.directory == path and not self.resource_name: break self.log_and_output('No processable items found in folder: {path}', level=logging.DEBUG, **{'path': path}) return False
def test_is_media_file(self): # TODO: Add unicode tests # TODO: Add MAC OS resource fork tests # TODO: Add RARBG release tests # RARBG release intros should be ignored # MAC OS's "resource fork" files should be ignored # Extras should be ignored # and the file extension should be in the list of media extensions # Test all valid media extensions temp_name = 'Show.Name.S01E01.HDTV.x264-RLSGROUP' extension_tests = { '.'.join((temp_name, ext)): True for ext in media_extensions } # ...and some invalid ones other_extensions = ['txt', 'sfv', 'srr', 'rar', 'nfo', 'zip'] extension_tests.update({ '.'.join((temp_name, ext)): False for ext in other_extensions + subtitle_extensions }) # Samples should be ignored sample_tests = { # Samples should be ignored, valid samples will return False 'Show.Name.S01E01.HDTV.sample.mkv': False, # default case 'Show.Name.S01E01.HDTV.sAmPle.mkv': False, # Ignore case 'Show.Name.S01E01.HDTV.samples.mkv': True, # sample should not be plural 'Show.Name.S01E01.HDTVsample.mkv': True, # no separation, can't identify as sample 'Sample.Show.Name.S01E01.HDTV.mkv': False, # location doesn't matter 'Show.Name.Sample.S01E01.HDTV.sample.mkv': False, # location doesn't matter 'Show.Name.S01E01.HDTV.sample1.mkv': False, # numbered samples are ok 'Show.Name.S01E01.HDTV.sample12.mkv': False, # numbered samples are ok 'Show.Name.S01E01.HDTV.sampleA.mkv': True, # samples should not be indexed alphabetically } edge_cases = { None: False, '': False, 0: False, 1: False, 42: False, 123189274981274: False, 12.23: False, ('this', 'is', 'a tuple'): False, } for cur_test in extension_tests, sample_tests, edge_cases: for cur_name, expected_result in cur_test.items(): self.assertEqual(helpers.is_media_file(cur_name), expected_result, cur_name)
def subtitleMissedPP(self): t = PageTemplate(rh=self, filename='manage_subtitleMissedPP.mako') app.RELEASES_IN_PP = [] for root, _, files in os.walk(app.TV_DOWNLOAD_DIR, topdown=False): # Skip folders that are being used for unpacking if u'_UNPACK' in root.upper(): continue for filename in sorted(files): if not is_media_file(filename): continue video_path = os.path.join(root, filename) video_date = datetime.datetime.fromtimestamp(os.stat(video_path).st_ctime) video_age = datetime.datetime.today() - video_date tv_episode = Episode.from_filepath(video_path) if not tv_episode: logger.log(u"Filename '{0}' cannot be parsed to an episode".format(filename), logger.DEBUG) continue ep_status = tv_episode.status if ep_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): status = 'snatched' elif ep_status == DOWNLOADED: status = 'downloaded' else: continue if not tv_episode.series.subtitles: continue related_files = PostProcessor(video_path).list_associated_files(video_path, subtitles_only=True) if related_files: continue age_hours = divmod(video_age.seconds, 3600)[0] age_minutes = divmod(video_age.seconds, 60)[0] if video_age.days > 0: age_unit = 'd' age_value = video_age.days elif age_hours > 0: age_unit = 'h' age_value = age_hours else: age_unit = 'm' age_value = age_minutes app.RELEASES_IN_PP.append({'release': video_path, 'seriesid': tv_episode.series.indexerid, 'show_name': tv_episode.series.name, 'season': tv_episode.season, 'episode': tv_episode.episode, 'status': status, 'age': age_value, 'age_unit': age_unit, 'date': video_date, 'indexername': tv_episode.series.indexer_name}) return t.render(releases_in_pp=app.RELEASES_IN_PP, controller='manage', action='subtitleMissedPP')
def prepare_files(self, path, files, force=False): """Prepare files for post-processing.""" video_files = [] rar_files = [] for each_file in files: if helpers.is_media_file(each_file): video_files.append(each_file) elif helpers.is_rar_file(each_file): rar_files.append(each_file) rar_content = [] video_in_rar = [] if rar_files: rar_content = self.unrar(path, rar_files, force) files.extend(rar_content) video_in_rar = [ each_file for each_file in rar_content if helpers.is_media_file(each_file) ] video_files.extend(video_in_rar) self._log('Post-processing files: {0}'.format(files), logger.DEBUG) self._log('Post-processing video files: {0}'.format(video_files), logger.DEBUG) if rar_content: self._log('Post-processing rar content: {0}'.format(rar_content), logger.DEBUG) self._log('Post-processing video in rar: {0}'.format(video_in_rar), logger.DEBUG) unwanted_files = [ filename for filename in files if filename not in video_files and helpers.get_extension(filename) not in self.allowed_extensions ] if unwanted_files: self._log('Found unwanted files: {0}'.format(unwanted_files), logger.DEBUG) self.video_files = video_files self.rar_content = rar_content self.video_in_rar = video_in_rar self.unwanted_files = unwanted_files
def should_process(self, path, failed=False): """ Determine if a directory should be processed. :param path: Path we want to verify :param failed: (optional) Mark the directory as failed :return: True if the directory is valid for processing, otherwise False :rtype: Boolean """ folder = os.path.basename(path) if folder in self.IGNORED_FOLDERS: return False if folder.startswith('_FAILED_'): self._log('The directory name indicates it failed to extract.', logger.DEBUG) failed = True elif folder.startswith('_UNDERSIZED_'): self._log( 'The directory name indicates that it was previously rejected for being undersized.', logger.DEBUG) failed = True elif folder.upper().startswith('_UNPACK'): self._log( 'The directory name indicates that this release is in the process of being unpacked.', logger.DEBUG) self.missedfiles.append('{0}: Being unpacked'.format(folder)) return False if failed: self.process_failed(path) self.missedfiles.append('{0}: Failed download'.format(folder)) return False if helpers.is_hidden_folder(path): self._log('Ignoring hidden folder: {0}'.format(folder), logger.DEBUG) self.missedfiles.append('{0}: Hidden folder'.format(folder)) return False for root, dirs, files in os.walk(path): for each_file in files: if helpers.is_media_file(each_file) or helpers.is_rar_file( each_file): return True del root # unused variable del dirs # unused variable self._log('No processable items found in folder: {0}'.format(path), logger.DEBUG) return False
def prepare_files(self, path, files, force=False): """Prepare files for post-processing.""" video_files = [] rar_files = [] for each_file in files: if helpers.is_media_file(each_file): video_files.append(each_file) elif helpers.is_rar_file(each_file): rar_files.append(each_file) rar_content = [] video_in_rar = [] if rar_files: rar_content = self.unrar(path, rar_files, force) files.extend(rar_content) video_in_rar = [each_file for each_file in rar_content if helpers.is_media_file(each_file)] video_files.extend(video_in_rar) self.log('Post-processing files: {0}'.format(files), logger.DEBUG) self.log('Post-processing video files: {0}'.format(video_files), logger.DEBUG) if rar_content: self.log('Post-processing rar content: {0}'.format(rar_content), logger.DEBUG) self.log('Post-processing video in rar: {0}'.format(video_in_rar), logger.DEBUG) unwanted_files = [filename for filename in files if filename not in video_files and helpers.get_extension(filename) not in self.allowed_extensions] if unwanted_files: self.log('Found unwanted files: {0}'.format(unwanted_files), logger.DEBUG) self.video_files = video_files self.rar_content = rar_content self.video_in_rar = video_in_rar self.unwanted_files = unwanted_files
def save_subs(tv_episode, video, found_subtitles, video_path=None): """Save subtitles. :param tv_episode: the episode to download subtitles :type tv_episode: sickbeard.tv.Episode :param video: :type video: subliminal.Video :param found_subtitles: :type found_subtitles: list of subliminal.Subtitle :param video_path: the video path. If none, the episode location will be used :type video_path: str :return: a sorted list of the opensubtitles codes for the downloaded subtitles :rtype: list of str """ video_path = video_path or tv_episode.location show_name = tv_episode.series.name season = tv_episode.season episode = tv_episode.episode episode_name = tv_episode.name show_indexerid = tv_episode.series.indexerid subtitles_dir = get_subtitles_dir(video_path) saved_subtitles = save_subtitles(video, found_subtitles, directory=subtitles_dir, single=not app.SUBTITLES_MULTI, encoding='utf-8') for subtitle in saved_subtitles: logger.info(u'Found subtitle for %s in %s provider with language %s', os.path.basename(video_path), subtitle.provider_name, subtitle.language.opensubtitles) subtitle_path = compute_subtitle_path(subtitle, video_path, subtitles_dir) helpers.chmod_as_parent(subtitle_path) helpers.fix_set_group_id(subtitle_path) if app.SUBTITLES_EXTRA_SCRIPTS and is_media_file(video_path): subtitle_path = compute_subtitle_path(subtitle, video_path, subtitles_dir) run_subs_extra_scripts(video_path=video_path, subtitle_path=subtitle_path, subtitle_language=subtitle.language, show_name=show_name, season=season, episode=episode, episode_name=episode_name, show_indexerid=show_indexerid) if app.SUBTITLES_HISTORY: logger.debug(u'Logging to history downloaded subtitle from provider %s and language %s', subtitle.provider_name, subtitle.language.opensubtitles) history.log_subtitle(tv_episode, subtitle) # Refresh the subtitles property if tv_episode.location: tv_episode.refresh_subtitles() return sorted({subtitle.language.opensubtitles for subtitle in saved_subtitles})
def test_is_media_file(self): # TODO: Add unicode tests # TODO: Add MAC OS resource fork tests # TODO: Add RARBG release tests # RARBG release intros should be ignored # MAC OS's "resource fork" files should be ignored # Extras should be ignored # and the file extension should be in the list of media extensions # Test all valid media extensions temp_name = 'Show.Name.S01E01.HDTV.x264-RLSGROUP' extension_tests = {'.'.join((temp_name, ext)): True for ext in media_extensions} # ...and some invalid ones other_extensions = ['txt', 'sfv', 'srr', 'rar', 'nfo', 'zip'] extension_tests.update({'.'.join((temp_name, ext)): False for ext in other_extensions + subtitle_extensions}) # Samples should be ignored sample_tests = { # Samples should be ignored, valid samples will return False 'Show.Name.S01E01.HDTV.sample.mkv': False, # default case 'Show.Name.S01E01.HDTV.sAmPle.mkv': False, # Ignore case 'Show.Name.S01E01.HDTV.samples.mkv': True, # sample should not be plural 'Show.Name.S01E01.HDTVsample.mkv': True, # no separation, can't identify as sample 'Sample.Show.Name.S01E01.HDTV.mkv': False, # location doesn't matter 'Show.Name.Sample.S01E01.HDTV.sample.mkv': False, # location doesn't matter 'Show.Name.S01E01.HDTV.sample1.mkv': False, # numbered samples are ok 'Show.Name.S01E01.HDTV.sample12.mkv': False, # numbered samples are ok 'Show.Name.S01E01.HDTV.sampleA.mkv': True, # samples should not be indexed alphabetically } edge_cases = { None: False, '': False, 0: False, 1: False, 42: False, 123189274981274: False, 12.23: False, ('this', 'is', 'a tuple'): False, } for cur_test in extension_tests, sample_tests, edge_cases: for cur_name, expected_result in iteritems(cur_test): self.assertEqual(helpers.is_media_file(cur_name), expected_result, cur_name)
def unpack_rar_files(dirpath): """Unpack any existing rar files present in the specified dirpath. :param dirpath: the directory path to be used :type dirpath: str """ from medusa import process_tv for root, _, files in os.walk(dirpath, topdown=False): # Skip folders that are being used for unpacking if u'_UNPACK' in root.upper(): continue rar_files = [rar_file for rar_file in files if is_rar_file(rar_file)] if rar_files and app.UNPACK: video_files = [video_file for video_file in files if is_media_file(video_file)] if not video_files or root == app.TV_DOWNLOAD_DIR: logger.debug(u'Found rar files in post-process folder: %s', rar_files) process_tv.ProcessResult(app.TV_DOWNLOAD_DIR).unrar(root, rar_files, False) elif rar_files and not app.UNPACK: logger.warning(u'Unpack is disabled. Skipping: %s', rar_files)
def remove_ratio_reached(self): """Remove all Medusa torrents that ratio was reached. It loops in all hashes returned from client and check if it is in the snatch history if its then it checks if we already processed media from the torrent (episode status `Downloaded`) If is a RARed torrent then we don't have a media file so we check if that hash is from an episode that has a `Downloaded` status 0 = Torrent is stopped 1 = Queued to check files 2 = Checking files 3 = Queued to download 4 = Downloading 5 = Queued to seed 6 = Seeding isFinished = whether seeding finished (based on seed ratio) IsStalled = Based on Tranmission setting "Transfer is stalled when inactive for" """ log.info('Checking Transmission torrent status.') return_params = { 'fields': ['name', 'hashString', 'percentDone', 'status', 'isStalled', 'errorString', 'seedRatioLimit', 'isFinished', 'uploadRatio', 'seedIdleLimit', 'files', 'activityDate'] } post_data = json.dumps({'arguments': return_params, 'method': 'torrent-get'}) if not self._request(method='post', data=post_data): log.debug('Could not connect to Transmission. Check logs') return try: returned_data = json.loads(self.response.content) except ValueError: log.warning('Unexpected data received from Transmission: {resp}', {'resp': self.response.content}) return if not returned_data['result'] == 'success': log.debug('Nothing in queue or error') return found_torrents = False for torrent in returned_data['arguments']['torrents']: # Check if that hash was sent by Medusa if not is_info_hash_in_history(str(torrent['hashString'])): continue found_torrents = True to_remove = False for i in torrent['files']: # Need to check only the media file or the .rar file to avoid checking all .r0* files in history if not (is_media_file(i['name']) or get_extension(i['name']) == 'rar'): continue # Check if media was processed # OR check hash in case of RARed torrents if is_already_processed_media(i['name']) or is_info_hash_processed(str(torrent['hashString'])): to_remove = True # Don't need to check status if we are not going to remove it. if not to_remove: log.info('Torrent not yet post-processed. Skipping: {torrent}', {'torrent': torrent['name']}) continue status = 'busy' error_string = torrent.get('errorString') if torrent.get('isStalled') and not torrent['percentDone'] == 1: status = 'stalled' elif error_string and 'unregistered torrent' in error_string.lower(): status = 'unregistered' elif torrent['status'] == 0: status = 'stopped' if torrent['percentDone'] == 1: # Check if torrent is stopped because of idle timeout seed_timed_out = False if torrent['activityDate'] > 0 and torrent['seedIdleLimit'] > 0: last_activity_date = datetime.fromtimestamp(torrent['activityDate']) seed_timed_out = (datetime.now() - timedelta( minutes=torrent['seedIdleLimit'])) > last_activity_date if torrent.get('isFinished') or seed_timed_out: status = 'completed' elif torrent['status'] == 6: status = 'seeding' if status == 'completed': log.info( 'Torrent completed and reached minimum' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}] or' ' seed idle limit: [{seed_limit} min].' ' Removing it: [{name}]', ratio=torrent['uploadRatio'], ratio_limit=torrent['seedRatioLimit'], seed_limit=torrent['seedIdleLimit'], name=torrent['name'] ) self.remove_torrent(torrent['hashString']) elif status == 'stalled': log.warning('Torrent is stalled. Check it: [{name}]', name=torrent['name']) elif status == 'unregistered': log.warning('Torrent was unregistered from tracker.' ' Check it: [{name}]', name=torrent['name']) elif status == 'seeding': if float(torrent['uploadRatio']) < float(torrent['seedRatioLimit']): log.info( 'Torrent did not reach minimum' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' ' Keeping it: [{name}]', ratio=torrent['uploadRatio'], ratio_limit=torrent['seedRatioLimit'], name=torrent['name'] ) else: log.info( 'Torrent completed and reached minimum ratio but it' ' was force started again. Current' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' ' Keeping it: [{name}]', ratio=torrent['uploadRatio'], ratio_limit=torrent['seedRatioLimit'], name=torrent['name'] ) elif status in ('stopped', 'busy'): log.info('Torrent is {status}. Keeping it: [{name}]', status=status, name=torrent['name']) else: log.warning( 'Torrent has an unmapped status. Keeping it: [{name}].' ' Report torrent info: {info}', name=torrent['name'], info=torrent ) if not found_torrents: log.info('No torrents found that were snatched by Medusa')
def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """Check for needed subtitles in the post process folder.""" from medusa import process_tv from medusa.tv import Episode logger.info(u'Checking for needed subtitles in Post-Process folder') # Check if PP folder is set if not app.TV_DOWNLOAD_DIR or not os.path.isdir(app.TV_DOWNLOAD_DIR): logger.warning( u'You must set a valid post-process folder in "Post Processing" settings' ) return # Search for all wanted languages if not wanted_languages(): return SubtitlesFinder.unpack_rar_files(app.TV_DOWNLOAD_DIR) run_post_process = False for root, _, files in os.walk(app.TV_DOWNLOAD_DIR, topdown=False): # Skip folders that are being used for unpacking if u'_UNPACK' in root.upper(): continue for filename in sorted(files): # Delete unwanted subtitles before downloading new ones delete_unwanted_subtitles(root, filename) if not is_media_file(filename): continue video_path = os.path.join(root, filename) tv_episode = Episode.from_filepath(video_path) if not tv_episode: logger.debug(u'%s cannot be parsed to an episode', filename) continue if tv_episode.status not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): continue if not tv_episode.series.subtitles: logger.debug( u'Subtitle disabled for show: %s. Running post-process to PP it', filename) run_post_process = True continue # Should not consider existing subtitles from db if it's a replacement new_release_name = remove_extension(filename) if tv_episode.release_name and new_release_name != tv_episode.release_name: logger.debug( u"As this is a release replacement I'm not going to consider existing " u'subtitles or release name from database to refine the new release' ) logger.debug( u"Replacing old release name '%s' with new release name '%s'", tv_episode.release_name, new_release_name) tv_episode.subtitles = [] tv_episode.release_name = new_release_name embedded_subtitles = bool(not app.IGNORE_EMBEDDED_SUBS and video_path.endswith('.mkv')) downloaded_languages = download_subtitles( tv_episode, video_path=video_path, subtitles=False, embedded_subtitles=embedded_subtitles) # Don't run post processor unless at least one file has all of the needed subtitles OR # if user don't want to ignore embedded subtitles and wants to consider 'unknown' as wanted sub, # and .mkv has one. if not app.PROCESS_AUTOMATICALLY and not run_post_process: if not needs_subtitles(downloaded_languages): run_post_process = True elif not app.IGNORE_EMBEDDED_SUBS: embedded_subs = get_embedded_subtitles(video_path) run_post_process = accept_unknown( embedded_subs) or accept_any(embedded_subs) if run_post_process: logger.info( u'Starting post-process with default settings now that we found subtitles' ) process_tv.ProcessResult(app.TV_DOWNLOAD_DIR, app.PROCESS_METHOD).process()
def read_torrent_status(torrent_data): """Read torrent status from Deluge and Deluged client.""" found_torrents = False info_hash_to_remove = [] for torrent in viewitems(torrent_data): info_hash = str(torrent[0]) details = torrent[1] if not is_info_hash_in_history(info_hash): continue found_torrents = True to_remove = False for i in details['files']: # Need to check only the media file or the .rar file to avoid checking all .r0* files in history if not (is_media_file(i['path']) or get_extension(i['path']) == 'rar'): continue # Check if media was processed # OR check hash in case of RARed torrents if is_already_processed_media(i['path']) or is_info_hash_processed(info_hash): to_remove = True # Don't need to check status if we are not going to remove it. if not to_remove: log.info('Torrent not yet post-processed. Skipping: {torrent}', {'torrent': details['name']}) continue if details['is_finished']: status = 'completed' elif details['is_seed']: status = 'seeding' elif details['paused']: status = 'paused' else: status = details['state'] if status == 'completed': log.info( 'Torrent completed and reached minimum' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}] or' ' seed idle limit' ' Removing it: [{name}]', ratio=details['ratio'], ratio_limit=details['stop_ratio'], name=details['name'] ) info_hash_to_remove.append(info_hash) elif status == 'seeding': if float(details['ratio']) < float(details['stop_ratio']): log.info( 'Torrent did not reach minimum' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' ' Keeping it: [{name}]', ratio=details['ratio'], ratio_limit=details['stop_ratio'], name=details['name'] ) else: log.info( 'Torrent completed and reached minimum ratio but it' ' was force started again. Current' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' ' Keeping it: [{name}]', ratio=details['ratio'], ratio_limit=details['stop_ratio'], name=details['name'] ) else: log.info('Torrent is {status}. Keeping it: [{name}]', status=status, name=details['name']) if not found_torrents: log.info('No torrents found that were snatched by Medusa') return info_hash_to_remove
def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """Check for needed subtitles in the post process folder.""" from medusa import process_tv from medusa.tv import Episode logger.info(u'Checking for needed subtitles in Post-Process folder') # Check if PP folder is set if not app.TV_DOWNLOAD_DIR or not os.path.isdir(app.TV_DOWNLOAD_DIR): logger.warning(u'You must set a valid post-process folder in "Post Processing" settings') return # Search for all wanted languages if not wanted_languages(): return SubtitlesFinder.unpack_rar_files(app.TV_DOWNLOAD_DIR) run_post_process = False for root, _, files in os.walk(app.TV_DOWNLOAD_DIR, topdown=False): # Skip folders that are being used for unpacking if u'_UNPACK' in root.upper(): continue for filename in sorted(files): # Delete unwanted subtitles before downloading new ones delete_unwanted_subtitles(root, filename) if not is_media_file(filename): continue video_path = os.path.join(root, filename) tv_episode = Episode.from_filepath(video_path) if not tv_episode: logger.debug(u'%s cannot be parsed to an episode', filename) continue if tv_episode.status not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): continue if not tv_episode.series.subtitles: logger.debug(u'Subtitle disabled for show: %s. Running post-process to PP it', filename) run_post_process = True continue # Should not consider existing subtitles from db if it's a replacement new_release_name = remove_extension(filename) if tv_episode.release_name and new_release_name != tv_episode.release_name: logger.debug(u"As this is a release replacement I'm not going to consider existing " u'subtitles or release name from database to refine the new release') logger.debug(u"Replacing old release name '%s' with new release name '%s'", tv_episode.release_name, new_release_name) tv_episode.subtitles = [] tv_episode.release_name = new_release_name embedded_subtitles = bool(not app.IGNORE_EMBEDDED_SUBS and video_path.endswith('.mkv')) downloaded_languages = download_subtitles(tv_episode, video_path=video_path, subtitles=False, embedded_subtitles=embedded_subtitles) # Don't run post processor unless at least one file has all of the needed subtitles OR # if user don't want to ignore embedded subtitles and wants to consider 'unknown' as wanted sub, # and .mkv has one. if not app.PROCESS_AUTOMATICALLY and not run_post_process: if not needs_subtitles(downloaded_languages): run_post_process = True elif not app.IGNORE_EMBEDDED_SUBS: embedded_subs = get_embedded_subtitles(video_path) run_post_process = accept_unknown(embedded_subs) or accept_any(embedded_subs) if run_post_process: logger.info(u'Starting post-process with default settings now that we found subtitles') process_tv.ProcessResult(app.TV_DOWNLOAD_DIR, app.PROCESS_METHOD).process()
def read_torrent_status(torrent_data): """Read torrent status from Deluge and Deluged client.""" found_torrents = False info_hash_to_remove = [] for torrent in viewitems(torrent_data): info_hash = str(torrent[0]) details = torrent[1] if not is_info_hash_in_history(info_hash): continue found_torrents = True to_remove = False for i in details['files']: # Need to check only the media file or the .rar file to avoid checking all .r0* files in history if not (is_media_file(i['path']) or get_extension(i['path']) == 'rar'): continue # Check if media was processed # OR check hash in case of RARed torrents if is_already_processed_media( i['path']) or is_info_hash_processed(info_hash): to_remove = True # Don't need to check status if we are not going to remove it. if not to_remove: log.info('Torrent not yet post-processed. Skipping: {torrent}', {'torrent': details['name']}) continue status = 'busy' if details['is_finished']: status = 'completed' elif details['is_seed']: status = 'seeding' elif details['paused']: status = 'paused' else: status = details['state'] if status == 'completed': log.info( 'Torrent completed and reached minimum' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}] or' ' seed idle limit' ' Removing it: [{name}]', ratio=details['ratio'], ratio_limit=details['stop_ratio'], name=details['name']) info_hash_to_remove.append(info_hash) elif status == 'seeding': if float(details['ratio']) < float(details['stop_ratio']): log.info( 'Torrent did not reach minimum' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' ' Keeping it: [{name}]', ratio=details['ratio'], ratio_limit=details['stop_ratio'], name=details['name']) else: log.info( 'Torrent completed and reached minimum ratio but it' ' was force started again. Current' ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' ' Keeping it: [{name}]', ratio=details['ratio'], ratio_limit=details['stop_ratio'], name=details['name']) else: log.info('Torrent is {status}. Keeping it: [{name}]', status=status, name=details['name']) if not found_torrents: log.info('No torrents found that were snatched by Medusa') return info_hash_to_remove