if options.config: broker.register(CONFIG_DIR, options.config) # create config object using user config values try: config = get_processed_app_config(broker[RESOURCES_DIR], broker[CONFIG_DIR]) except (ConfigurationError), e: print e exit(1) # sanitize tv series filter subsection names for # consistent lookups for name, filters in config['tv']['filter'].items(): del config['tv']['filter'][name] config['tv']['filter'][Series.sanitize_series_name(name)] = build_series_filters(config, filters) """ logging setup """ # initialize and retrieve logger for later use logging.config.fileConfig(open(os.path.join(broker[CONFIG_DIR], "logging.conf"))) logger = logging.getLogger("mediarover") """ post configuration setup """ broker.register(CONFIG_OBJECT, config) broker.register(METADATA_OBJECT, Metadata()) broker.register(EPISODE_FACTORY_OBJECT, EpisodeFactory()) broker.register(FILESYSTEM_FACTORY_OBJECT, FilesystemFactory()) broker.register(NOTIFICATION_OBJECT, Notification())
def build_watch_list(config, process_aliases=True): """ use given config object and build a dictionary of watched series """ logger = logging.getLogger("mediarover.series") watched_list = {} skip_list = {} for root in config['tv']['tv_root']: # first things first, check that tv root directory exists and that we # have read access to it if not os.access(root, os.F_OK): raise FilesystemError("TV root rootectory (%s) does not exist!", root) if not os.access(root, os.R_OK): raise FilesystemError("Missing read access to tv root directory (%s)", root) logger.info("begin processing tv directory: %s", root) # grab list of shows dir_list = os.listdir(root) dir_list.sort() for name in dir_list: # skip hidden directories if name.startswith("."): continue dir = os.path.join(root, name) if os.path.isdir(dir): sanitized_name = Series.sanitize_series_name(name=name) # already seen this series and have determined that user wants to skip it if sanitized_name in skip_list: continue # we've already seen this series. Append new directory to list of series paths elif sanitized_name in watched_list: series = watched_list[sanitized_name] series.path.append(dir) # new series, create new Series object and add to the watched list else: series = Series(name, path=dir) additions = dict({sanitized_name: series}) # locate and process any filters for current series. If no user defined filters for # current series exist, build dict using default values if sanitized_name in config['tv']['filter']: config['tv']['filter'][sanitized_name] = build_series_filters(dir, config['tv']['quality'], config['tv']['filter'][sanitized_name]) else: config['tv']['filter'][sanitized_name] = build_series_filters(dir, config['tv']['quality']) # check filters to see if user wants this series skipped... if config['tv']['filter'][sanitized_name]["skip"]: skip_list[sanitized_name] = series logger.debug("found skip filter, ignoring series: %s", series.name) continue # set season ignore list for current series if len(config['tv']['filter'][sanitized_name]['ignore']): logger.debug("ignoring the following seasons of %s: %s", series.name, config['tv']['filter'][sanitized_name]['ignore']) series.ignores = config['tv']['filter'][sanitized_name]['ignore'] # process series aliases. For each new alias, register series in watched_list if process_aliases and len(config['tv']['filter'][sanitized_name]['alias']) > 0: series.aliases = config['tv']['filter'][sanitized_name]['alias'] count = 0 for alias in series.aliases: sanitized_alias = Series.sanitize_series_name(name=alias) if sanitized_alias in watched_list: logger.warning("duplicate series alias found for '%s'! Duplicate aliases can/will result in incorrect downloads and improper sorting! You've been warned..." % series) additions[sanitized_alias] = series count += 1 logger.debug("%d alias(es) identified for series '%s'" % (count, series)) # finally, add additions to watched list logger.debug("watching series: %s", series) watched_list.update(additions) return watched_list
def __episode_sort(broker, options, **kwargs): logger = logging.getLogger("mediarover.scripts.sabnzbd.episode") # ensure user has indicated a desired quality level if quality management is turned on config = broker['config'] if config['tv']['quality']['managed'] and config['tv']['quality']['desired'] is None: raise ConfigurationError("when quality management is on you must indicate a desired quality level at [tv] [[quality]] desired =") """ arguments: 1. The final directory of the job (full path) 2. The name of the NZB file 3. User modifiable job name 4. Newzbin report number (may be empty) 5. Newzbin or user-defined category 6. Group that the NZB was posted in e.g. alt.binaries.x 7. Status """ path = kwargs['path'] job = kwargs.get('job', os.path.basename(path)) nzb = kwargs.get('nzb', job + ".nzb") report_id = kwargs.get('report_id', '') category = kwargs.get('category', '') group = kwargs.get('group', '') status = kwargs.get('status', 0) tv_root = config['tv']['tv_root'] # check to ensure we have the necessary data to proceed if path is None or path == "": raise InvalidArgument("path to completed job is missing or null") elif os.path.basename(path).startswith("_FAILED_") or int(status) > 0: if job is None or job == "": raise InvalidArgument("job name is missing or null") elif int(status) == 1: raise FailedDownload("download failed verification") elif int(status) == 2: raise FailedDownload("download failed unpack") elif int(status) == 3: raise FailedDownload("download failed verification and unpack") else: raise FailedDownload("download failed") watched_list = {} skip_list = {} for root in tv_root: # make sure tv root directory exists and that we have read and # write access to it if not os.path.exists(root): raise FilesystemError("TV root directory (%s) does not exist!", (root)) if not os.access(root, os.R_OK | os.W_OK): raise FilesystemError("Missing read/write access to tv root directory (%s)", (root)) logger.info("begin processing tv directory: %s", root) # set umask for files and directories created during this session os.umask(config['tv']['umask']) # get list of shows in root tv directory dir_list = os.listdir(root) dir_list.sort() for name in dir_list: # skip hidden directories if name.startswith("."): continue dir = os.path.join(root, name) if os.path.isdir(dir): sanitized_name = Series.sanitize_series_name(name=name) # already seen this series and have determined that user wants to skip it if sanitized_name in skip_list: continue # we've already seen this series. Append new directory to list of series paths elif sanitized_name in watched_list: series = watched_list[sanitized_name] series.path.append(dir) # new series, create new Series object and add to the watched list else: series = Series(name, path=dir) additions = {sanitized_name: series} # locate and process any filters for current series. If no user defined filters for # current series exist, build dict using default values if sanitized_name in config['tv']['filter']: config['tv']['filter'][sanitized_name] = build_series_filters(dir, config['tv']['quality'], config['tv']['filter'][sanitized_name]) else: config['tv']['filter'][sanitized_name] = build_series_filters(dir, config['tv']['quality']) # check filters to see if user wants this series skipped... if config['tv']['filter'][sanitized_name]["skip"]: skip_list[sanitized_name] = series logger.debug("found skip filter, ignoring series: %s", series.name) continue # set season ignore list for current series if len(config['tv']['filter'][sanitized_name]['ignore']): logger.debug("ignoring the following seasons of %s: %s", series.name, config['tv']['filter'][sanitized_name]['ignore']) series.ignores = config['tv']['filter'][sanitized_name]['ignore'] # process series aliases. For each new alias, register series in watched_list if config['tv']['filter'][sanitized_name]['alias']: series.aliases = config['tv']['filter'][sanitized_name]['alias']; count = 0 for alias in series.aliases: sanitized_alias = Series.sanitize_series_name(name=alias) if sanitized_alias in watched_list: logger.warning("duplicate series alias found for '%s'! Duplicate aliases can/will result in incorrect downloads and improper sorting! You've been warned..." % series) additions[sanitized_alias] = series count += 1 logger.debug("%d alias(es) identified for series '%s'" % (count, series)) # finally, add additions to watched list logger.debug("watching series: %s", series) watched_list.update(additions) # register series dictionary with dependency broker broker.register('watched_series', watched_list) logger.info("watching %d tv show(s)", len(watched_list)) logger.debug("finished processing watched tv") ignored = [ext.lower() for ext in config['tv']['ignored_extensions']] # locate episode file in given download directory orig_path = None filename = None extension = None size = 0 for dirpath, dirnames, filenames in os.walk(path): for file in filenames: # check if current file's extension is in list # of ignored extensions (name, ext) = os.path.splitext(file) ext = ext.lstrip(".") if ext.lower() in ignored: continue # get size of current file (in bytes) stat = os.stat(os.path.join(dirpath, file)) if stat.st_size > size: filename = file extension = ext size = stat.st_size logger.debug("identified possible download: filename => %s, size => %d", filename, size) if filename is None: raise FilesystemError("unable to find episode file in given download path %r" % path) orig_path = os.path.join(path, filename) logger.info("found download file at '%s'", orig_path) # retrieve the proper factory object if report_id is not None and report_id != "": factory = broker['newzbin'] else: factory = broker['episode_factory'] # build episode object using job name try: episode = factory.create_episode(job) except (InvalidMultiEpisodeData, MissingParameterError), e: raise InvalidJobTitle("unable to parse job title and create Episode object: %s" % e)
def list(self, series=None, skip=None, ignore=None): logger = logging.getLogger("mediarover.interface.tv.filter") vars = build_default_template_vars(self._config) all_filters = {} for name in self._config['tv']['filter']: clean = Series.sanitize_series_name(name, self._config['tv']['ignore_series_metadata']) all_filters[clean] = { 'name': name, 'filters': self._config['tv']['filter'][name], } # build list of template filters vars['dir_list'] = [] for root in self._config['tv']['tv_root']: for dir in os.listdir(root): if dir.startswith("."): continue if os.path.isdir(os.path.join(root, dir)): # look for existing filters. If none are found, # build list using default values clean = Series.sanitize_series_name(dir, self._config['tv']['ignore_series_metadata']) if clean in all_filters: filters = self._config['tv']['filter'][all_filters[clean]['name']] else: filters = build_series_filters(os.path.join(root, dir)) # add current series to all_filters list all_filters[clean] = { 'name': dir, 'filters': filters } # generate template filters. Add to all_filters # and template variables all_filters[clean]['template'] = self._build_template_filters(dir, os.path.join(root, dir), filters) vars['dir_list'].append(all_filters[clean]['template']) # add / modify filter if cherrypy.request.method == "POST": error = "" # parse ignore list # TODO check for non-integers if ignore in ("", None): ignore = [] else: ignore = [int(i) for i in ignore.split(",")] # check if given series already exists. If it does, modify # its existing filters. If not, we need to create a directory # on disk clean = Series.sanitize_series_name(series, self._config['tv']['ignore_series_metadata']) if clean in all_filters and 'template' in all_filters[clean]: filters = all_filters[clean]['filters'] template = all_filters[clean]['template'] message = "Successfully updated filter for %s" % series # create new series directory on disk else: # before creating a directory on disk, check if there are filters for current series # if yes, this means that we have some stale filters. Delete them and proceed if clean in all_filters: logger.info("Found stale filters for '%s', deleting", series) del self._config['tv']['filter'][all_filters[clean]['name']] del all_filters[clean] # TODO trap exceptions path = os.path.join(self._config['tv']['tv_root'][0], series) try: os.makedirs(path, self._config['tv']['umask']) except IOError: raise else: filters = build_series_filters(path) template = self._build_template_filters(series, path, filters) all_filters[clean] = { 'name': series, 'filters': filters, 'template': template } vars['dir_list'].append(template) message = "Successfully created filter for %s" % series # update current filter with new values if skip is not None: filters['skip'] = template['skip'] = True elif filters['skip'] == True: filters['skip'] = template['skip'] = False if len(ignore): filters['ignore'] = template['ignore'] = ignore if error == "": self._config['tv']['filter'][series] = filters save_config(self._config) vars['message'] = message vars['error'] = error # sort filters self._sort_filters(vars['dir_list'], 0, len(vars['dir_list'])-1) t = Template(file=os.path.join(vars['template_dir'], "tv", "filter", "list.tmpl"), searchList=[vars]) return t.respond()