def get_episode_file_path(self, ep_obj): # type: (sickbeard.tv.TVEpisode) -> AnyStr """ Returns a full show dir/.meta/episode.txt path for Tivo episode metadata files. Note, that pyTivo requires the metadata filename to include the original extention. ie If the episode name is foo.avi, the metadata name is foo.avi.txt ep_obj: a TVEpisode object to get the path for """ if ek.ek(os.path.isfile, ep_obj.location): metadata_file_name = ek.ek( os.path.basename, ep_obj.location) + "." + self._ep_nfo_extension metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), '.meta') metadata_file_path = ek.ek(os.path.join, metadata_dir_name, metadata_file_name) else: logger.log( u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) return '' return metadata_file_path
def get_episode_file_path(self, ep_obj): # type: (sickbeard.tv.TVEpisode) -> AnyStr """ Returns a full show dir/metadata/episode.xml path for MediaBrowser episode metadata files ep_obj: a TVEpisode object to get the path for """ if ek.ek(os.path.isfile, ep_obj.location): xml_file_name = helpers.replace_extension( ek.ek(os.path.basename, ep_obj.location), self._ep_nfo_extension) metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') xml_file_path = ek.ek(os.path.join, metadata_dir_name, xml_file_name) else: logger.log( u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) return '' return xml_file_path
def fileQuality(filename): """ :param filename: filename :type filename: AnyStr :return: :rtype: int """ # noinspection PyPep8Naming import encodingKludge as ek from exceptions_helper import ex from sickbeard import logger if ek.ek(os.path.isfile, filename): from hachoir.parser import createParser from hachoir.metadata import extractMetadata from hachoir.stream import InputStreamError parser = height = None msg = 'Hachoir can\'t parse file "%s" content quality because it found error: %s' try: parser = ek.ek(createParser, filename) except InputStreamError as e: logger.log(msg % (filename, ex(e)), logger.WARNING) except (BaseException, Exception) as e: logger.log(msg % (filename, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) if parser: extract = None try: args = ({}, {'scan_index': False})['.avi' == filename[-4::].lower()] parser.parse_exif = False parser.parse_photoshop_content = False parser.parse_comments = False extract = extractMetadata(parser, **args) except (BaseException, Exception) as e: logger.log(msg % (filename, ex(e)), logger.WARNING) if extract: try: height = extract.get('height') except (AttributeError, ValueError): try: for metadata in extract.iterGroups(): if re.search('(?i)video', metadata.header): height = metadata.get('height') break except (AttributeError, ValueError): pass # noinspection PyProtectedMember parser.stream._input.close() tolerance = (lambda value, percent: int(round(value - (value * percent / 100.0)))) if height >= tolerance(352, 5): if height <= tolerance(720, 2): return Quality.SDTV return (Quality.HDTV, Quality.FULLHDTV)[height >= tolerance(1080, 1)] return Quality.UNKNOWN
def __init__(self): if None is ImageCache.base_dir and ek.ek(os.path.exists, sickbeard.CACHE_DIR): ImageCache.base_dir = ek.ek( os.path.abspath, ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images')) ImageCache.shows_dir = ek.ek( os.path.abspath, ek.ek(os.path.join, self.base_dir, 'shows'))
def _remove_zoneinfo_failed(filename): # type: (AnyStr) -> None """ helper to remove failed temp download """ try: ek.ek(os.remove, filename) except (BaseException, Exception): pass
def __init__(self, config=True, cache_dir=True, workers=4, multi=False, force=False, max_depth=3, autostart=False, plugins_config=None, files_mode=-1): # set default values self.multi = multi self.force = force self.max_depth = max_depth self.config = None self.config_file = None self.cache_dir = None self.taskQueue = Queue.Queue() self.resultQueue = Queue.Queue() self._languages = None self._plugins = self.listAPIPlugins() self.workers = workers self.plugins_config = plugins_config self.files_mode = files_mode if autostart: self.startWorkers() # handle configuration file preferences try: if config == True: # default configuration file import xdg.BaseDirectory as bd self.config = ConfigParser.SafeConfigParser({"languages": "", "plugins": ""}) self.config_file = ek.ek(os.path.join, bd.xdg_config_home, "subliminal", "config.ini") if not ek.ek(os.path.exists, self.config_file): # configuration file doesn't exist, create it self._createConfigFile() else: # configuration file exists, load it self._loadConfigFile() elif config: # custom configuration file self.config = ConfigParser.SafeConfigParser({"languages": "", "plugins": ""}) self.config_file = config if not ek.ek(os.path.isfile, self.config_file): # custom configuration file doesn't exist, create it self._createConfigFile() else: self._loadConfigFile() except: self.config = None self.config_file = None logger.error(u"Failed to use the configuration file, continue without it") raise # handle cache directory preferences try: if cache_dir == True: # default cache directory import xdg.BaseDirectory as bd self.cache_dir = ek.ek(os.path.join, bd.xdg_config_home, "subliminal", "cache") if not ek.ek(os.path.exists, self.cache_dir): # cache directory doesn't exist, create it ek.ek(os.mkdir, self.cache_dir) logger.debug(u'Creating cache directory: %s' % self.cache_dir) elif cache_dir: # custom configuration file self.cache_dir = cache_dir if not ek.ek(os.path.isdir, self.cache_dir): # custom v file doesn't exist, create it ek.ek(os.mkdir, self.cache_dir) logger.debug(u'Creating cache directory: %s' % self.cache_dir) except: self.cache_dir = None logger.error(u"Failed to use the cache directory, continue without it")
def remove_file_failed(filename): """ delete given file :param filename: filename :type filename: AnyStr """ try: ek.ek(os.remove, filename) except (BaseException, Exception): pass
def _migrate_v16(): if sickbeard.CACHE_DIR and ek.ek(os.path.isdir, sickbeard.CACHE_DIR): cache_default = sickbeard.CACHE_DIR dead_paths = ['anidb', 'imdb', 'trakt'] for path in dead_paths: sickbeard.CACHE_DIR = '%s/images/%s' % (cache_default, path) helpers.clear_cache(True) try: ek.ek(os.rmdir, sickbeard.CACHE_DIR) except OSError: pass sickbeard.CACHE_DIR = cache_default
def backup_db(self, target, backup_filename=None): # type: (AnyStr, AnyStr) -> Tuple[bool, AnyStr] """ backups the db ot target dir + optional filename Availability: SQLite 3.6.11 or higher New in version 3.7 :param target: target dir :param backup_filename: optional backup filename (default is the source name) :return: success, message """ if not db_supports_backup: logger.log('this python sqlite3 version doesn\'t support backups', logger.DEBUG) return False, 'this python sqlite3 version doesn\'t support backups' if not ek.ek(os.path.isdir, target): logger.log('Backup target invalid', logger.ERROR) return False, 'Backup target invalid' target_db = ek.ek(os.path.join, target, (backup_filename, self.filename)[None is backup_filename]) if ek.ek(os.path.exists, target_db): logger.log('Backup target file already exists', logger.ERROR) return False, 'Backup target file already exists' def progress(status, remaining, total): logger.log('Copied %s of %s pages...' % (total - remaining, total), logger.DEBUG) backup_con = None try: # copy into this DB backup_con = sqlite3.connect(target_db, 20) with backup_con: with db_lock: self.connection.backup(backup_con, progress=progress) logger.log('%s backup successful' % self.filename, logger.DEBUG) except sqlite3.Error as error: logger.log("Error while taking backup: %s" % ex(error), logger.ERROR) return False, 'Backup failed' finally: if backup_con: try: backup_con.close() except (BaseException, Exception): pass return True, 'Backup successful'
def remove_zoneinfo(cls): # delete all existing zoneinfo files for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(sickbeard.ZONEINFO_DIR)): for filename in files: if filename.endswith('.tar.gz'): file_w_path = ek.ek(os.path.join, path, filename) try: ek.ek(os.remove, file_w_path) except (BaseException, Exception): pass
def _finishEarly(self): if None is not self.show_obj: self.show_obj.delete_show() if self.new_show: # if we adding a new show, delete the empty folder that was already created try: ek.ek(os.rmdir, self.showDir) except (BaseException, Exception): pass self.finish()
def getzoneinfofile_stream(): try: # return BytesIO(get_data(__name__, ZONEFILENAME)) zonefile = ek.ek(os.path.join, sickbeard.ZONEINFO_DIR, ZONEFILENAME) if not ek.ek(os.path.isfile, zonefile): warnings.warn('Falling back to included zoneinfo file') zonefile = ek.ek(os.path.join, ek.ek(os.path.dirname, __file__), ZONEFILENAME) with open(zonefile, 'rb') as f: return BytesIO(f.read()) except IOError as e: # TODO switch to FileNotFoundError? warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) return None
def get_episode_thumb_path(self, ep_obj): # type: (sickbeard.tv.TVEpisode) -> AnyStr """ Returns a full show dir/metadata/episode.jpg path for MediaBrowser episode thumbs. ep_obj: a TVEpisode object to get the path from """ if ek.ek(os.path.isfile, ep_obj.location): metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') tbn_file_name = sg_helpers.replace_extension(ek.ek(os.path.basename, ep_obj.location), 'jpg') return ek.ek(os.path.join, metadata_dir_name, tbn_file_name)
def _thumbnails_dir(self, tvid, prodid): # type: (int, int) -> AnyStr """ Builds up the full path to the thumbnails image cache directory :param tvid: TV info source ID to use in the file name :type tvid: int :param prodid: Show ID to use in the file name :type prodid: int or long :return: path :rtype: AnyStr """ return ek.ek(os.path.abspath, ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'thumbnails'))
def get_utc(): # type: (...) -> Optional[datetime.tzinfo] if hasattr(sickbeard, 'ZONEINFO_DIR'): utc = None try: utc = tz.gettz('GMT', zoneinfo_priority=True) except (BaseException, Exception): pass if isinstance(utc, datetime.tzinfo): return utc tz_utc_file = ek.ek(os.path.join, ek.ek(os.path.dirname, zoneinfo.__file__), 'Greenwich') if ek.ek(os.path.isfile, tz_utc_file): return tz.tzfile(tz_utc_file)
def _fanart_dir(self, tvid=None, prodid=None): # type: (int, int) -> AnyStr """ Builds up the full path to the fanart image cache directory :param tvid: TV info source ID to use in the file name :type tvid: int :param prodid: Show ID to use in the file name :type prodid: int or long :return: path :rtype: AnyStr or None """ if None not in (tvid, prodid): return ek.ek(os.path.abspath, ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'fanart'))
def cleanup_old_db_backups(filename): try: d, filename = ek.ek(os.path.split, filename) if not d: d = sickbeard.DATA_DIR for f in filter_iter(lambda fn: fn.is_file() and filename in fn.name and re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), ek.ek(scandir, d)): try: ek.ek(os.unlink, f.path) except (BaseException, Exception): pass except (BaseException, Exception): pass
def write_file( filepath, # type: AnyStr data, # type: Union[AnyStr, etree.Element, requests.Response] raw=False, # type: bool xmltree=False, # type: bool utf8=False, # type: bool raise_exceptions=False # type: bool ): # type: (...) -> bool """ :param filepath: filepath :param data: data to write :param raw: write binary or text :param xmltree: use xmel tree :param utf8: use UTF8 :param raise_exceptions: raise excepitons :return: succuess """ result = False if make_dirs(ek.ek(os.path.dirname, filepath), False): try: if raw: with ek.ek(io.FileIO, filepath, 'wb') as fh: for chunk in data.iter_content(chunk_size=1024): if chunk: fh.write(chunk) fh.flush() ek.ek(os.fsync, fh.fileno()) else: w_mode = 'w' if utf8: w_mode = 'a' with ek.ek(io.FileIO, filepath, 'wb') as fh: fh.write(codecs.BOM_UTF8) if xmltree: with ek.ek(io.FileIO, filepath, w_mode) as fh: if utf8: data.write(fh, encoding='utf-8') else: data.write(fh) else: if isinstance(data, text_type): with ek.ek(io.open, filepath, w_mode, encoding='utf-8') as fh: fh.write(data) else: with ek.ek(io.FileIO, filepath, w_mode) as fh: fh.write(data) chmod_as_parent(filepath) result = True except (EnvironmentError, IOError) as e: logger.error('Unable to write file %s : %s' % (filepath, ex(e))) if raise_exceptions: raise e return result
def _remove_old_zoneinfo(): # type: (...) -> None """ helper to remove old unneeded zoneinfo files """ if None is not zoneinfo.ZONEFILENAME: current_file = helpers.real_path( ek.ek(os.path.join, sickbeard.ZONEINFO_DIR, ek.ek(os.path.basename, zoneinfo.ZONEFILENAME))) for entry in chain.from_iterable([scantree(helpers.real_path(_dir), include=r'\.tar\.gz$', filter_kind=False) for _dir in (sickbeard.ZONEINFO_DIR, )]): # type: DirEntry if current_file != entry.path: if remove_file_perm(entry.path, log_err=False): logger.log(u'Delete unneeded old zoneinfo File: %s' % entry.path) else: logger.log(u'Unable to delete: %s' % entry.path, logger.ERROR)
def has_file(image_file): # type: (AnyStr) -> bool """ :param image_file: image file :type image_file: AnyStr :return: true if a image_file exists :rtype: bool """ result = [] for filename in ek.ek(glob.glob, image_file): result.append(ek.ek(os.path.isfile, filename) and filename) logger.log(u'Found cached %s' % filename, logger.DEBUG) not any(result) and logger.log(u'No cache for %s' % image_file, logger.DEBUG) return any(result)
def get_system_temp_dir(): """ :return: Returns the [system temp dir]/tvdb_api-u501 (or tvdb_api-myuser) :rtype: AnyStr """ if hasattr(os, 'getuid'): uid = 'u%d' % (os.getuid()) else: # For Windows try: uid = getpass.getuser() except ImportError: return ek.ek(os.path.join, tempfile.gettempdir(), 'SickGear') return ek.ek(os.path.join, tempfile.gettempdir(), 'SickGear-%s' % uid)
def anidb_cache_dir(): # type: (...) -> Optional[AnyStr] cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR or get_system_temp_dir(), 'anidb') if not make_dirs(cache_dir): cache_dir = None return cache_dir
def addDefaultShow(self, tvid, prod_id, name, status): """ Adds a new show with the default settings """ if not helpers.find_show_by_id({int(tvid): int(prodid)}): logger.log(u"Adding show " + str(prod_id)) root_dirs = sickbeard.ROOT_DIRS.split('|') try: location = root_dirs[int(root_dirs[0]) + 1] except: location = None if location: showPath = ek.ek(os.path.join, location, helpers.sanitize_filename(name)) dir_exists = helpers.make_dir(showPath) if not dir_exists: logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR) return else: helpers.chmod_as_parent(showPath) sickbeard.show_queue_scheduler.action.addShow(int(tvid), int(prod_id), showPath, status, int(sickbeard.QUALITY_DEFAULT), int(sickbeard.FLATTEN_FOLDERS_DEFAULT), paused=sickbeard.TRAKT_START_PAUSED) else: logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR) return
def _get_creds(self, retry=False): with diskcache.Cache(directory=self._cachedir) as cache: try: return cache.get(self._CREDS_STORAGE_KEY) except ValueError as e: if not retry: cache.close() import encodingKludge as ek import os ek.ek( os.remove, ek.ek(os.path.join, self._cachedir, diskcache.core.DBNAME)) return self._get_creds(retry=True) else: raise e
def _createConfigFile(self): """Create a configuration file specified in self.config_file""" folder = ek.ek(os.path.dirname, self.config_file) if not ek.ek(os.path.exists, folder): logger.info(u"Creating folder: %s" % folder) ek.ek(os.mkdir, folder) # try to load a language from system self._loadLanguageFromSystem() self.config.set("DEFAULT", "languages", ",".join(self._languages)) self.config.set("DEFAULT", "plugins", ",".join(self._plugins)) self.config.add_section("SubtitleSource") self.config.set("SubtitleSource", "key", "") self._writeConfigFile() logger.info(u"Creating configuration file: %s" % self.config_file) logger.debug(u"Languages in created configuration file: %s" % self._languages) logger.debug(u"Plugins in created configuration file: %s" % self._plugins)
def nameQuality(name, anime=False): """ Return The quality from an episode File renamed by SickGear If no quality is achieved it will try sceneQuality regex :param name: name :type name: AnyStr :param anime: is anmie :type anime: bool :return: :rtype: int """ # noinspection PyPep8Naming import encodingKludge as ek name = ek.ek(os.path.basename, name) # if we have our exact text then assume we put it there for _x in sorted(iterkeys(Quality.qualityStrings), reverse=True): if Quality.UNKNOWN == _x: continue if Quality.NONE == _x: # Last chance return Quality.sceneQuality(name, anime) regex = r'\W' + Quality.qualityStrings[_x].replace(' ', r'\W') + r'\W' regex_match = re.search(regex, name, re.I) if regex_match: return _x
def long_path(path): # type: (AnyStr) -> AnyStr """add long path prefix for Windows""" if 'nt' == os.name and 260 < len(path) and not path.startswith( '\\\\?\\') and ek.ek(os.path.isabs, path): return '\\\\?\\' + path return path
def has_episode_thumb(self, ep_obj): # type: (sickbeard.tv.TVEpisode) -> AnyStr location = self.get_episode_thumb_path(ep_obj) result = None is not location and ek.ek(os.path.isfile, location) if location: logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) return result
def _has_season_banner(self, show_obj, season): # type: (sickbeard.tv.TVShow,int) -> AnyStr location = self.get_season_banner_path(show_obj, season) result = None is not location and ek.ek(os.path.isfile, location) if location: logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) return result
def _main(): if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR): logger.log( u"Automatic post-processing attempted but dir %s doesn't exist" % sickbeard.TV_DOWNLOAD_DIR, logger.ERROR) return if not ek.ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR): logger.log( u'Automatic post-processing attempted but dir %s is relative ' '(and probably not what you really want to process)' % sickbeard.TV_DOWNLOAD_DIR, logger.ERROR) return processTV.processDir(sickbeard.TV_DOWNLOAD_DIR, is_basedir=True)
def update_show_indexer_metadata(self, show_obj): # type: (sickbeard.tv.TVShow) -> bool if self.show_metadata and show_obj and self._has_show_metadata(show_obj): logger.log(u'Metadata provider %s updating show indexer metadata file for %s' % (self.name, show_obj.name), logger.DEBUG) nfo_file_path = self.get_show_file_path(show_obj) with ek.ek(io.open, nfo_file_path, 'r', encoding='utf8') as xmlFileObj: show_xml = etree.ElementTree(file=xmlFileObj) tvid = show_xml.find('indexer') prodid = show_xml.find('id') root = show_xml.getroot() show_tvid = str(show_obj.tvid) if None is not tvid: tvid.text = '%s' % show_tvid else: etree.SubElement(root, 'indexer').text = '%s' % show_tvid show_prodid = str(show_obj.prodid) if None is not prodid: prodid.text = '%s' % show_prodid else: etree.SubElement(root, 'id').text = '%s' % show_prodid # Make it purdy sg_helpers.indent_xml(root) sg_helpers.write_file(nfo_file_path, show_xml, xmltree=True, utf8=True) return True
def get_season_banner_path(self, show_obj, season): # type: (sickbeard.tv.TVShow, int) -> Optional[AnyStr] """ Season thumbs for MediaBrowser go in Show Dir/Season X/banner.jpg If no season folder exists, None is returned """ dir_list = [ x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x)) ] season_dir_regex = r'^Season\s+(\d+)$' season_dir = None for cur_dir in dir_list: # MediaBrowser 1.x only supports 'Specials' # MediaBrowser 2.x looks to only support 'Season 0' # MediaBrowser 3.x looks to mimic XBMC/Plex support if 0 == season and "Specials" == cur_dir: season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if not match: continue cur_season = int(match.group(1)) if cur_season == season: season_dir = cur_dir break if not season_dir: logger.log( u"Unable to find a season dir for season " + str(season), logger.DEBUG) return None logger.log( u"Using " + str(season_dir) + "/banner.jpg as season dir for season " + str(season), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'banner.jpg')
def _recursiveSearch(self, entry, depth=0): """ Searches files in the entry This will output a list of tuples (filename, languages) """ if depth > self.max_depth and self.max_depth != 0: # we do not want to search the whole file system except if max_depth = 0 return [] if ek.ek(os.path.isfile, entry): # a file? scan it if depth != 0: # only check for valid format if recursing, trust the user mimetypes.add_type("video/x-matroska", ".mkv") mimetype = mimetypes.guess_type(entry)[0] if mimetype not in SUPPORTED_FORMATS: return [] basepath = ek.fixStupidEncodings(ek.ek(os.path.splitext, entry)[0]) # check for .xx.srt if needed if self.multi and self.languages: if self.force: return [(self.languages, [ek.ek(os.path.normpath, entry)])] needed_languages = self.languages[:] for l in self.languages: if ek.ek(os.path.exists, basepath + '.%s.srt' % l): logger.info(u"Skipping language %s for file %s as it already exists. Use the --force option to force the download" % (l, entry)) needed_languages.remove(l) if needed_languages: return [(needed_languages, [ek.ek(os.path.normpath, entry)])] return [] # single subtitle download: .srt if self.force or not ek.ek(os.path.exists, basepath + '.srt'): return [(self.languages, [ek.ek(os.path.normpath, entry)])] if ek.ek(os.path.isdir, entry): # a dir? recurse #TODO if hidden folder, don't keep going (how to handle windows/mac/linux ?) files = [] for e in ek.ek(os.listdir, entry): files.extend(self._recursiveSearch(ek.ek(os.path.join, entry, e), depth + 1)) files.sort() grouped_files = [] for languages, group in groupby(files, lambda t: t[0]): filenames = [] for t in group: filenames.extend(t[1]) grouped_files.append((languages, filenames)) return grouped_files return [] # anything else, nothing.