Пример #1
0
    def create_episode(self, string, **kwargs):

        # parse given string and extract episode attributes
        if MultiEpisode.handle(string):
            params = MultiEpisode.extract_from_string(string, **kwargs)
        elif DailyEpisode.handle(string):
            params = DailyEpisode.extract_from_string(string, **kwargs)
        elif SingleEpisode.handle(string):
            params = SingleEpisode.extract_from_string(string, **kwargs)
        else:
            raise InvalidEpisodeString("unable to identify episode type: %r" % string)

            # locate series object.  If series is unknown, create new series
        if type(params["series"]) is not Series:
            sanitized_series = Series.sanitize_series_name(name=params["series"])
            if sanitized_series in self.watched_series:
                params["series"] = self.watched_series[sanitized_series]
            else:
                params["series"] = Series(params["series"])
        else:
            sanitized_series = Series.sanitize_series_name(series=params["series"])

        if "quality" not in kwargs:
            if sanitized_series in self.config["tv"]["filter"]:
                params["quality"] = self.config["tv"]["filter"][sanitized_series]["quality"]["desired"]
            else:
                params["quality"] = self.config["tv"]["quality"]["desired"]

        if "start_episode" in params:
            return MultiEpisode(**params)
        elif "year" in params:
            return DailyEpisode(**params)
        else:
            return SingleEpisode(**params)
Пример #2
0
	def create_episode(self, string, **kwargs):

		# parse given string and extract episode attributes
		if FilesystemMultiEpisode.handle(string):
			params = FilesystemMultiEpisode.extract_from_string(string, **kwargs)
		elif FilesystemDailyEpisode.handle(string):
			params = FilesystemDailyEpisode.extract_from_string(string, **kwargs)
		elif FilesystemSingleEpisode.handle(string):
			params = FilesystemSingleEpisode.extract_from_string(string, **kwargs)
		else:
			raise InvalidEpisodeString("unable to identify episode type: %r" % string)

		# locate series object.  If series is unknown, create new series
		if type(params['series']) is not Series:
			sanitized_series = Series.sanitize_series_name(name=params['series'])
			if sanitized_series in self.watched_series:
				params['series'] = self.watched_series[sanitized_series]
			else:
				params['series'] = Series(params['series'])
		else:
			sanitized_series = Series.sanitize_series_name(series=params['series'])

		if 'quality' not in kwargs:
			if sanitized_series in self.config['tv']['filter']:
				params['quality'] = self.config['tv']['filter'][sanitized_series]['quality']['desired']
			else:
				params['quality'] = self.config['tv']['quality']['desired']

		if 'start_episode' in params:
			return FilesystemMultiEpisode(**params)
		elif 'year' in params:
			return FilesystemDailyEpisode(**params)
		else:
			return FilesystemSingleEpisode(**params)
Пример #3
0
	if options.config:
		broker.register(CONFIG_DIR, options.config)

	# create config object using user config values
	try:
		config = get_processed_app_config(broker[RESOURCES_DIR], broker[CONFIG_DIR])
	except (ConfigurationError), e:
		print e
		exit(1)

	# sanitize tv series filter subsection names for 
	# consistent lookups
	for name, filters in config['tv']['filter'].items():
		del config['tv']['filter'][name]
		config['tv']['filter'][Series.sanitize_series_name(name)] = build_series_filters(config, filters)

	""" logging setup """

	# initialize and retrieve logger for later use
	logging.config.fileConfig(open(os.path.join(broker[CONFIG_DIR], "logging.conf")))
	logger = logging.getLogger("mediarover")

	""" post configuration setup """

	broker.register(CONFIG_OBJECT, config)
	broker.register(METADATA_OBJECT, Metadata())
	broker.register(EPISODE_FACTORY_OBJECT, EpisodeFactory())
	broker.register(FILESYSTEM_FACTORY_OBJECT, FilesystemFactory())
	broker.register(NOTIFICATION_OBJECT, Notification())
Пример #4
0
def __set_quality(broker, options, series_name=None, season_num=None, episode_num=None):
	logger = logging.getLogger("mediarover")

	help = """
Options:
(y)es    - process series and specify episode quality
(n)o     - skip to next series
(q)uit   - exit application"""

	series_help = """
Series Options:
(l)ow    - mark episodes as being of low quality
(m)edium - mark episodes as being of medium quality
(h)igh   - mark episodes as being of high quality"""

	config = broker[CONFIG_OBJECT]

	# build dict of watched series
	# register series dictionary with dependency broker
	series_lists = build_series_lists(config, process_aliases=False)
	broker.register(WATCHED_SERIES_LIST, series_lists[0])

	# build list of series to iterate over
	if series_name:
		names = [Series.sanitize_series_name(series_name)]
		if names[0] not in broker[WATCHED_SERIES_LIST]:
			print "ERROR: Unable to find series matching %r" % series_name
			exit(2)
		else:
			if season_num is not None:
				season_num = int(season_num)
			if episode_num is not None:
				episode_num = int(episode_num)
	else:
		names = broker[WATCHED_SERIES_LIST].keys()
		names.sort()

	displayed_series_help = 0
	quality_levels = [LOW, MEDIUM, HIGH]

	if options.series_prompt:
		print help

	for sanitized in names:
		series = broker[WATCHED_SERIES_LIST][sanitized]

		if options.series_prompt:
			answer = __query_user("Process '%s'? ([y]/n/q/?)" % series.name, ['y','n','q','?'], 'y', help)
			if answer == 'n':
				continue
			elif answer == 'q':
				exit(0)
		else:
			# ATTENTION: get files list now so that processing statement follows logging code 
			# resulting from filesystem scan
			series.files
			print "Processing '%s'..." % series.name

		# determine default quality for current series
		if config['tv']['filter'][sanitized]['desired_quality'] is not None:
			default = config['tv']['filter'][sanitized]['desired_quality']
		else:
			default = config['tv']['library']['quality']['desired']

		# if quality guessing is on, populate extension lists (if they weren't 
		# provided by user)
		if config['tv']['library']['quality']['managed'] and config['tv']['library']['quality']['guess']:
			if len(options.low) == 0:
				options.low = config['tv']['library']['quality']['extension'][LOW]
			if len(options.medium) == 0:
				options.medium = config['tv']['library']['quality']['extension'][MEDIUM]
			if len(options.high) == 0:
				options.high = config['tv']['library']['quality']['extension'][HIGH]

		low = list()
		medium = list()
		high = list()

		avg_sizes = dict()
		for file in series.files:
			if season_num:
				if file.episode.season != season_num:
					continue
				elif episode_num and file.episode.episode != episode_num:
						continue

			if hasattr(file.episode, 'episodes'):
				parts = file.episode.episodes
			else:
				parts = [file.episode]

			# first things first: check if user has chosen a quality level
			# for files with the current extension
			ext = file.extension
			if ext in options.low:
				low.extend(parts)
			elif ext in options.medium:
				medium.extend(parts)
			elif ext in options.high:
				high.extend(parts)

			# guess not, group files by average file size
			else:
				size = file.size
				for avg_size in avg_sizes.keys():
					difference = abs(float(avg_size)/float(size/len(parts)) - 1)

					# if the difference is 10% or less, update average value
					# and add current part(s) to list
					if difference <= 0.1:
						# add current file size to running total
						avg_sizes[avg_size]['total_size'] += size
						avg_sizes[avg_size]['episodes'].extend(parts)

						# calculate new average size and update dict
						new_avg = avg_sizes[avg_size]['total_size'] / len(avg_sizes[avg_size]['episodes'])
						avg_sizes[new_avg] = avg_sizes[avg_size]
						del avg_sizes[avg_size]
						break
					else:
						continue

				# no comparable size in current list, add and move on
				else:
					avg_sizes[size] = {'total_size': size, 'episodes': parts}

		# build quality prompt
		quality_prompt = list()
		for level in quality_levels:
			if level == default:
				quality_prompt.append("[%c]" % level[0])
			else:
				quality_prompt.append(level[0])
		quality_prompt.extend(['q','?'])
		quality_prompt = "/".join(quality_prompt)

		if not displayed_series_help:
			displayed_series_help += 1
			print series_help

		sizes = avg_sizes.keys()
		sizes.sort()
		for avg_size in sizes:
			approx_size = avg_size / (1024 * 1024)
			print "Found %d episode(s) with average size of %dMB" % (len(avg_sizes[avg_size]['episodes']), approx_size)
			answer = __query_user("Quality? (%s)" % quality_prompt, ['l','m','h','q','?'], default, series_help)
			if answer == 'q':
				exit(1)
			elif answer == 'l':
				quality = LOW
			elif answer == 'm':
				quality = MEDIUM
			else:
				quality = HIGH

			# set quality for all episodes in given size list
			for episode in avg_sizes[avg_size]['episodes']:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

		# set quality for all episodes that were matched by extension
		extension_msg = "Setting quality of '%s' for %d episode(s) with extension found in %s"
		if len(low):
			quality = LOW
			print extension_msg % (quality, len(low), options.low)
			for episode in low:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

		if len(medium):
			quality = MEDIUM
			print extension_msg % (quality, len(medium), options.medium)
			for episode in medium:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

		if len(high):
			quality = HIGH
			print extension_msg % (quality, len(high), options.high)
			for episode in high:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

	print "DONE"
Пример #5
0
def __episode_sort(broker, options, **kwargs):

	logger = logging.getLogger("mediarover.scripts.sabnzbd.episode")

	# ensure user has indicated a desired quality level if quality management is turned on
	config = broker['config']
	if config['tv']['quality']['managed'] and config['tv']['quality']['desired'] is None:
		raise ConfigurationError("when quality management is on you must indicate a desired quality level at [tv] [[quality]] desired =")

	"""
	arguments:
	  1. The final directory of the job (full path)
	  2. The name of the NZB file
	  3. User modifiable job name
	  4. Newzbin report number (may be empty)
	  5. Newzbin or user-defined category
	  6. Group that the NZB was posted in e.g. alt.binaries.x
	  7. Status
	"""
	path = kwargs['path']
	job = kwargs.get('job', os.path.basename(path))
	nzb = kwargs.get('nzb', job + ".nzb")
	report_id = kwargs.get('report_id', '')
	category = kwargs.get('category', '')
	group = kwargs.get('group', '')
	status = kwargs.get('status', 0)

	tv_root = config['tv']['tv_root']

	# check to ensure we have the necessary data to proceed
	if path is None or path == "":
		raise InvalidArgument("path to completed job is missing or null")
	elif os.path.basename(path).startswith("_FAILED_") or int(status) > 0:
		if job is None or job == "":
			raise InvalidArgument("job name is missing or null")
		elif int(status) == 1:
			raise FailedDownload("download failed verification")
		elif int(status) == 2:
			raise FailedDownload("download failed unpack")
		elif int(status) == 3:
			raise FailedDownload("download failed verification and unpack")
		else:
			raise FailedDownload("download failed")

	watched_list = {}
	skip_list = {}
	for root in tv_root:

		# make sure tv root directory exists and that we have read and 
		# write access to it
		if not os.path.exists(root):
			raise FilesystemError("TV root directory (%s) does not exist!", (root))
		if not os.access(root, os.R_OK | os.W_OK):
			raise FilesystemError("Missing read/write access to tv root directory (%s)", (root))

		logger.info("begin processing tv directory: %s", root)

		# set umask for files and directories created during this session
		os.umask(config['tv']['umask'])

		# get list of shows in root tv directory
		dir_list = os.listdir(root)
		dir_list.sort()
		for name in dir_list:

			# skip hidden directories
			if name.startswith("."):
				continue

			dir = os.path.join(root, name)
			if os.path.isdir(dir):

				sanitized_name = Series.sanitize_series_name(name=name)

				# already seen this series and have determined that user wants to skip it
				if sanitized_name in skip_list:
					continue

				# we've already seen this series.  Append new directory to list of series paths
				elif sanitized_name in watched_list:
					series = watched_list[sanitized_name]
					series.path.append(dir)

				# new series, create new Series object and add to the watched list
				else:
					series = Series(name, path=dir)
					additions = {sanitized_name: series}

					# locate and process any filters for current series.  If no user defined filters for 
					# current series exist, build dict using default values
					if sanitized_name in config['tv']['filter']:
						config['tv']['filter'][sanitized_name] = build_series_filters(dir, config['tv']['quality'], config['tv']['filter'][sanitized_name])
					else:
						config['tv']['filter'][sanitized_name] = build_series_filters(dir, config['tv']['quality'])

					# check filters to see if user wants this series skipped...
					if config['tv']['filter'][sanitized_name]["skip"]:
						skip_list[sanitized_name] = series
						logger.debug("found skip filter, ignoring series: %s", series.name)
						continue

					# set season ignore list for current series
					if len(config['tv']['filter'][sanitized_name]['ignore']):
						logger.debug("ignoring the following seasons of %s: %s", series.name, config['tv']['filter'][sanitized_name]['ignore'])
						series.ignores = config['tv']['filter'][sanitized_name]['ignore']

					# process series aliases.  For each new alias, register series in watched_list
					if config['tv']['filter'][sanitized_name]['alias']:
						series.aliases = config['tv']['filter'][sanitized_name]['alias'];
						count = 0
						for alias in series.aliases:
							sanitized_alias = Series.sanitize_series_name(name=alias)
							if sanitized_alias in watched_list:
								logger.warning("duplicate series alias found for '%s'! Duplicate aliases can/will result in incorrect downloads and improper sorting! You've been warned..." % series)
							additions[sanitized_alias] = series
							count += 1
						logger.debug("%d alias(es) identified for series '%s'" % (count, series))

					# finally, add additions to watched list
					logger.debug("watching series: %s", series)
					watched_list.update(additions)

	# register series dictionary with dependency broker
	broker.register('watched_series', watched_list)

	logger.info("watching %d tv show(s)", len(watched_list))
	logger.debug("finished processing watched tv")

	ignored = [ext.lower() for ext in config['tv']['ignored_extensions']]

	# locate episode file in given download directory
	orig_path = None
	filename = None
	extension = None
	size = 0
	for dirpath, dirnames, filenames in os.walk(path):
		for file in filenames:
			# check if current file's extension is in list
			# of ignored extensions
			(name, ext) = os.path.splitext(file)
			ext = ext.lstrip(".")
			if ext.lower() in ignored:
				continue

			# get size of current file (in bytes)
			stat = os.stat(os.path.join(dirpath, file))
			if stat.st_size > size:
				filename = file
				extension = ext
				size = stat.st_size
				logger.debug("identified possible download: filename => %s, size => %d", filename, size)

	if filename is None:
		raise FilesystemError("unable to find episode file in given download path %r" % path)

	orig_path = os.path.join(path, filename)
	logger.info("found download file at '%s'", orig_path)

	# retrieve the proper factory object
	if report_id is not None and report_id != "":
		factory = broker['newzbin']
	else:
		factory = broker['episode_factory']

	# build episode object using job name
	try:
		episode = factory.create_episode(job)
	except (InvalidMultiEpisodeData, MissingParameterError), e:
		raise InvalidJobTitle("unable to parse job title and create Episode object: %s" % e)
Пример #6
0
	if options.config:
		broker.register('config_dir', options.config)

	# create config object using user config values
	try:
		config = read_config(broker['resources_dir'], broker['config_dir'])
	except (ConfigurationError), e:
		print e
		exit(1)

	# sanitize tv series filter subsection names for 
	# consistent lookups
	for name, filters in config['tv']['filter'].items():
		del config['tv']['filter'][name]
		config['tv']['filter'][Series.sanitize_series_name(name=name)] = filters

	""" logging setup """

	# initialize and retrieve logger for later use
	logging.config.fileConfig(open(os.path.join(broker['config_dir'], "logging.conf")))
	logger = logging.getLogger("mediarover")

	""" post configuration setup """

	broker.register('config', config)
	broker.register('metadata_data_store', Metadata())
	broker.register('episode_factory', EpisodeFactory())
	broker.register('filesystem_factory', FilesystemFactory())

	# register source dependencies
Пример #7
0
	def list(self, series=None, skip=None, ignore=None):
		logger = logging.getLogger("mediarover.interface.tv.filter")

		vars = build_default_template_vars(self._config)

		all_filters = {}
		for name in self._config['tv']['filter']:
			clean = Series.sanitize_series_name(name, self._config['tv']['ignore_series_metadata'])
			all_filters[clean] = {
				'name': name,
				'filters': self._config['tv']['filter'][name],
			}

		# build list of template filters
		vars['dir_list'] = []
		for root in self._config['tv']['tv_root']:
			for dir in os.listdir(root):
				if dir.startswith("."):
					continue

				if os.path.isdir(os.path.join(root, dir)):

					# look for existing filters.  If none are found, 
					# build list using default values
					clean = Series.sanitize_series_name(dir, self._config['tv']['ignore_series_metadata'])
					if clean in all_filters:
						filters = self._config['tv']['filter'][all_filters[clean]['name']]
					else:
						filters = build_series_filters(os.path.join(root, dir))
						
						# add current series to all_filters list
						all_filters[clean] = {
							'name': dir,
							'filters': filters
						}

					# generate template filters. Add to all_filters 
					# and template variables
					all_filters[clean]['template'] = self._build_template_filters(dir, os.path.join(root, dir), filters)
					vars['dir_list'].append(all_filters[clean]['template'])

		# add / modify filter
		if cherrypy.request.method == "POST":

			error = ""

			# parse ignore list
			# TODO check for non-integers
			if ignore in ("", None):
				ignore = []
			else:
				ignore = [int(i) for i in ignore.split(",")]

			# check if given series already exists.  If it does, modify 
			# its existing filters.  If not, we need to create a directory
			# on disk
			clean = Series.sanitize_series_name(series, self._config['tv']['ignore_series_metadata'])
			if clean in all_filters and 'template' in all_filters[clean]:
				filters = all_filters[clean]['filters']
				template = all_filters[clean]['template']
				message = "Successfully updated filter for %s" % series

			# create new series directory on disk
			else:
	
				# before creating a directory on disk, check if there are filters for current series
				# if yes, this means that we have some stale filters.  Delete them and proceed
				if clean in all_filters:
					logger.info("Found stale filters for '%s', deleting", series)
					del self._config['tv']['filter'][all_filters[clean]['name']]
					del all_filters[clean]

				# TODO trap exceptions
				path = os.path.join(self._config['tv']['tv_root'][0], series)
				try:
					os.makedirs(path, self._config['tv']['umask'])
				except IOError:
					raise
				else:
					filters = build_series_filters(path)
					template = self._build_template_filters(series, path, filters)
					all_filters[clean] = {
						'name': series,
						'filters': filters,
						'template': template
					}
					vars['dir_list'].append(template)
					message = "Successfully created filter for %s" % series

			# update current filter with new values
			if skip is not None:
				filters['skip'] = template['skip'] = True
			elif filters['skip'] == True:
				filters['skip'] = template['skip'] = False

			if len(ignore):
				filters['ignore'] = template['ignore'] = ignore

			if error == "":
				self._config['tv']['filter'][series] = filters
				save_config(self._config)

			vars['message'] = message
			vars['error'] = error

		# sort filters
		self._sort_filters(vars['dir_list'], 0, len(vars['dir_list'])-1)

		t = Template(file=os.path.join(vars['template_dir'], "tv", "filter", "list.tmpl"), searchList=[vars])
		return t.respond()