Example #1
0
def __schedule(broker, options):

	logger = logging.getLogger("mediarover")

	# grab config object
	config = broker[CONFIG_OBJECT]

	# grab quality management flag.  This will determine if Media Rover
	# will actively manage the quality of filesystem episodes or not
	manage_quality = config['tv']['library']['quality']['managed']
	if manage_quality and config['tv']['library']['quality']['desired'] is None:
		raise ConfigurationError("when quality management is on you must indicate a desired quality level at [tv] [[quality]] desired =")

	# check if user has requested a dry-run
	if options.dry_run:
		logger.info("--dry-run flag detected!  No new downloads will be queued during execution!")

	tv_root = config['tv']['tv_root']
	if not len(tv_root):
		raise ConfigurationError("You must declare at least one tv_root directory!")

	# build dict of watched series
	series_lists = build_series_lists(config)
	logger.info("watching %d tv show(s)", len(series_lists[0]))

	# register series dictionary with dependency broker
	broker.register(WATCHED_SERIES_LIST, series_lists[0])

	logger.debug("finished processing watched tv")
	logger.info("begin processing sources")

	# grab list of source url's from config file and build appropriate Source objects
	sources = []
	for name, params in config['source'].items():
		logger.debug("found feed '%s'", name)

		# first things first: if manage_quality is True, make sure the user
		# has specified a quality for this source
		if manage_quality and params['quality'] is None:
			raise ConfigurationError("missing quality flag for source '%s'" % name)

		params['name'] = name
		params['priority'] = config[params['type']]['priority']
		
		provider = params['provider']
		del params['provider']

		# grab source object
		factory = broker[provider]

		logger.debug("creating source for feed %r", name)
		try:
			source = factory.create_source(**params)
		except UrlRetrievalError, e:
			logger.error("skipping source '%s', reason: %s" % (name, e))
			continue
		except InvalidRemoteData, e:
			logger.error("skipping source '%s', unable to process remote data: %s", name, e)
			continue
Example #2
0
def __set_quality(broker, options, series_name=None, season_num=None, episode_num=None):
	logger = logging.getLogger("mediarover")

	help = """
Options:
(y)es    - process series and specify episode quality
(n)o     - skip to next series
(q)uit   - exit application"""

	series_help = """
Series Options:
(l)ow    - mark episodes as being of low quality
(m)edium - mark episodes as being of medium quality
(h)igh   - mark episodes as being of high quality"""

	config = broker[CONFIG_OBJECT]

	# build dict of watched series
	# register series dictionary with dependency broker
	series_lists = build_series_lists(config, process_aliases=False)
	broker.register(WATCHED_SERIES_LIST, series_lists[0])

	# build list of series to iterate over
	if series_name:
		names = [Series.sanitize_series_name(series_name)]
		if names[0] not in broker[WATCHED_SERIES_LIST]:
			print "ERROR: Unable to find series matching %r" % series_name
			exit(2)
		else:
			if season_num is not None:
				season_num = int(season_num)
			if episode_num is not None:
				episode_num = int(episode_num)
	else:
		names = broker[WATCHED_SERIES_LIST].keys()
		names.sort()

	displayed_series_help = 0
	quality_levels = [LOW, MEDIUM, HIGH]

	if options.series_prompt:
		print help

	for sanitized in names:
		series = broker[WATCHED_SERIES_LIST][sanitized]

		if options.series_prompt:
			answer = __query_user("Process '%s'? ([y]/n/q/?)" % series.name, ['y','n','q','?'], 'y', help)
			if answer == 'n':
				continue
			elif answer == 'q':
				exit(0)
		else:
			# ATTENTION: get files list now so that processing statement follows logging code 
			# resulting from filesystem scan
			series.files
			print "Processing '%s'..." % series.name

		# determine default quality for current series
		if config['tv']['filter'][sanitized]['desired_quality'] is not None:
			default = config['tv']['filter'][sanitized]['desired_quality']
		else:
			default = config['tv']['library']['quality']['desired']

		# if quality guessing is on, populate extension lists (if they weren't 
		# provided by user)
		if config['tv']['library']['quality']['managed'] and config['tv']['library']['quality']['guess']:
			if len(options.low) == 0:
				options.low = config['tv']['library']['quality']['extension'][LOW]
			if len(options.medium) == 0:
				options.medium = config['tv']['library']['quality']['extension'][MEDIUM]
			if len(options.high) == 0:
				options.high = config['tv']['library']['quality']['extension'][HIGH]

		low = list()
		medium = list()
		high = list()

		avg_sizes = dict()
		for file in series.files:
			if season_num:
				if file.episode.season != season_num:
					continue
				elif episode_num and file.episode.episode != episode_num:
						continue

			if hasattr(file.episode, 'episodes'):
				parts = file.episode.episodes
			else:
				parts = [file.episode]

			# first things first: check if user has chosen a quality level
			# for files with the current extension
			ext = file.extension
			if ext in options.low:
				low.extend(parts)
			elif ext in options.medium:
				medium.extend(parts)
			elif ext in options.high:
				high.extend(parts)

			# guess not, group files by average file size
			else:
				size = file.size
				for avg_size in avg_sizes.keys():
					difference = abs(float(avg_size)/float(size/len(parts)) - 1)

					# if the difference is 10% or less, update average value
					# and add current part(s) to list
					if difference <= 0.1:
						# add current file size to running total
						avg_sizes[avg_size]['total_size'] += size
						avg_sizes[avg_size]['episodes'].extend(parts)

						# calculate new average size and update dict
						new_avg = avg_sizes[avg_size]['total_size'] / len(avg_sizes[avg_size]['episodes'])
						avg_sizes[new_avg] = avg_sizes[avg_size]
						del avg_sizes[avg_size]
						break
					else:
						continue

				# no comparable size in current list, add and move on
				else:
					avg_sizes[size] = {'total_size': size, 'episodes': parts}

		# build quality prompt
		quality_prompt = list()
		for level in quality_levels:
			if level == default:
				quality_prompt.append("[%c]" % level[0])
			else:
				quality_prompt.append(level[0])
		quality_prompt.extend(['q','?'])
		quality_prompt = "/".join(quality_prompt)

		if not displayed_series_help:
			displayed_series_help += 1
			print series_help

		sizes = avg_sizes.keys()
		sizes.sort()
		for avg_size in sizes:
			approx_size = avg_size / (1024 * 1024)
			print "Found %d episode(s) with average size of %dMB" % (len(avg_sizes[avg_size]['episodes']), approx_size)
			answer = __query_user("Quality? (%s)" % quality_prompt, ['l','m','h','q','?'], default, series_help)
			if answer == 'q':
				exit(1)
			elif answer == 'l':
				quality = LOW
			elif answer == 'm':
				quality = MEDIUM
			else:
				quality = HIGH

			# set quality for all episodes in given size list
			for episode in avg_sizes[avg_size]['episodes']:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

		# set quality for all episodes that were matched by extension
		extension_msg = "Setting quality of '%s' for %d episode(s) with extension found in %s"
		if len(low):
			quality = LOW
			print extension_msg % (quality, len(low), options.low)
			for episode in low:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

		if len(medium):
			quality = MEDIUM
			print extension_msg % (quality, len(medium), options.medium)
			for episode in medium:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

		if len(high):
			quality = HIGH
			print extension_msg % (quality, len(high), options.high)
			for episode in high:
				episode.quality = quality
				broker[METADATA_OBJECT].add_episode(episode)

	print "DONE"
Example #3
0
def __episode_sort(broker, options, **kwargs):

	logger = logging.getLogger("mediarover.scripts.sabnzbd.episode")

	# ensure user has indicated a desired quality level if quality management is turned on
	config = broker[CONFIG_OBJECT]
	if config['tv']['library']['quality']['managed'] and config['tv']['library']['quality']['desired'] is None:
		raise ConfigurationError("when quality management is on you must indicate a desired quality level at [tv] [[quality]] desired =")

	"""
	arguments:
	  1. The final directory of the job (full path)
	  2. The name of the NZB file
	  3. User modifiable job name
	  4. Newzbin report number (may be empty)
	  5. Newzbin or user-defined category
	  6. Group that the NZB was posted in e.g. alt.binaries.x
	  7. Status
	"""
	path = kwargs['path']
	job = kwargs['job']
	nzb = kwargs['nzb']
	report_id = kwargs.get('report_id', '')
	category = kwargs.get('category', '')
	group = kwargs.get('group', '')
	status = kwargs.get('status', 0)

	tv_root = config['tv']['tv_root']

	# check to ensure we have the necessary data to proceed
	if path is None or path == "":
		raise InvalidArgument("path to completed job is missing or null")
	elif os.path.basename(path).startswith("_FAILED_") or int(status) > 0:
		if job is None or job == "":
			raise InvalidArgument("job name is missing or null")
		elif int(status) == 1:
			raise FailedDownload("download failed verification")
		elif int(status) == 2:
			raise FailedDownload("download failed unpack")
		elif int(status) == 3:
			raise FailedDownload("download failed verification and unpack")
		else:
			raise FailedDownload("download failed")

	# build dict of watched series
	# register series dictionary with dependency broker
	series_lists = build_series_lists(config)
	broker.register(WATCHED_SERIES_LIST, series_lists[0])
	broker.register(IGNORED_SERIES_LIST, series_lists[1])

	logger.info("watching %d tv show(s)", len(series_lists[0]))
	logger.debug("finished processing watched tv")

	ignored = [ext.lower() for ext in config['tv']['ignored_extensions']]

	# locate episode file in given download directory
	orig_path = None
	extension = None
	size = 0
	for dirpath, dirnames, filenames in os.walk(path):
		for file in filenames:
			# check if current file's extension is in list
			# of ignored extensions
			(name, ext) = os.path.splitext(file)
			ext = ext.lstrip(".")
			if ext.lower() in ignored:
				continue

			# get size of current file (in bytes)
			stat = os.stat(os.path.join(dirpath, file))
			if stat.st_size > size:
				orig_path = os.path.join(dirpath, file)
				extension = ext
				size = stat.st_size
				logger.debug("identified possible download: filename => %s, size => %d", file, size)

	if orig_path is None:
		broker[NOTIFICATION_OBJECT].process(SORT_FAILED_NOTIFICATION, "unable to find episode file in given download path %r" % path)
		raise FilesystemError("unable to find episode file in given download path %r" % path)
	else:
		logger.info("found download file at '%s'", orig_path)

	# retrieve the proper factory object
	in_progress = broker[METADATA_OBJECT].get_in_progress(job)
	if in_progress is None:
		if report_id == "":
			factory = broker[EPISODE_FACTORY_OBJECT]
		else:
			factory = broker[NEWZBIN_FACTORY_OBJECT]
	else:
		factory = broker[in_progress['source']]

	# build episode object using job name
	try:
		episode = factory.create_episode(job)
	except (InvalidMultiEpisodeData, MissingParameterError), e:
		raise InvalidJobTitle("unable to parse job title and create Episode object: %s" % e)