def update_show_indexer_metadata(self, show_obj): if self.show_metadata and show_obj and self._has_show_metadata( show_obj): logger.debug("Metadata provider " + self.name + " updating show indexer info metadata file for " + show_obj.name) nfo_file_path = self.get_show_file_path(show_obj) try: with open(nfo_file_path, 'rb') as xmlFileObj: showXML = ElementTree.ElementTree(file=xmlFileObj) indexerid = showXML.find('id') root = showXML.getroot() if indexerid is not None: if indexerid.text == str(show_obj.indexerid): return True indexerid.text = str(show_obj.indexerid) else: ElementTree.SubElement(root, "id").text = str(show_obj.indexerid) # Make it purdy helpers.indentXML(root) showXML.write(nfo_file_path, encoding='UTF-8') helpers.chmodAsParent(nfo_file_path) return True except IOError as e: logger.error("Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + str(e))
def _migrate_metadata(metadata, metadata_name, _use_banner): cur_metadata = metadata.split("|") # if target has the old number of values, do upgrade if len(cur_metadata) == 6: logger.info("Upgrading " + metadata_name + " metadata, old value: " + metadata) cur_metadata.insert(4, "0") cur_metadata.append("0") cur_metadata.append("0") cur_metadata.append("0") # swap show fanart, show poster cur_metadata[3], cur_metadata[2] = cur_metadata[ 2], cur_metadata[3] # if user was using _use_banner to override the poster, instead enable the banner option and deactivate poster if metadata_name == "XBMC" and _use_banner: cur_metadata[4], cur_metadata[3] = cur_metadata[3], "0" # write new format metadata = "|".join(cur_metadata) logger.info("Upgrading " + metadata_name + " metadata, new value: " + metadata) elif len(cur_metadata) == 10: metadata = "|".join(cur_metadata) logger.info("Keeping " + metadata_name + " metadata, value: " + metadata) else: logger.error("Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format") metadata = "0|0|0|0|0|0|0|0|0|0" logger.info("Setting " + metadata_name + " metadata, new value: " + metadata) return metadata
def _migrate_v4(self): """Update newznab providers so that the category IDs can be set independently via the config""" new_newznab_data = [] old_newznab_data = check_setting_str(self.config_obj, "Newznab", "newznab_data") if old_newznab_data: old_newznab_data_list = old_newznab_data.split("!!!") for cur_provider_data in old_newznab_data_list: try: name, url, key, enabled = cur_provider_data.split("|") except ValueError: logger.error("Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format") continue if name == "Sick Beard Index": key = "0" if name == "NZBs.org": catIDs = "5030,5040,5060,5070,5090" else: catIDs = "5030,5040,5060" cur_provider_data_list = [name, url, key, catIDs, enabled] new_newznab_data.append("|".join(cur_provider_data_list)) settings.NEWZNAB_DATA = "!!!".join(new_newznab_data)
def check_setting_str(config, cfg_name, item_name, def_val=str(""), silent=True, censor_log=False): """ Checks config setting of string types :param config: config object :type config: ConfigObj() :param cfg_name: section name of config :param item_name: item name of section :param def_val: default value to return in case a value can't be retrieved from config or if couldn't be converted (default: empty str) :param silent: don't log result to debug log (default: True) :param censor_log: overrides and adds this setting to logger censored items (default: False) :return: decrypted value of `config[cfg_name][item_name]` or `def_val` (see cases of def_val) :rtype: str """ if not isinstance(def_val, str): logger.error( "{dom}:{key} default value is not the correct type. Expected {t}, got {dt}" .format(dom=cfg_name, key=item_name, t="string", dt=type(def_val))) # For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() encryption_version = (0, settings.ENCRYPTION_VERSION)["password" in item_name] try: if not (check_section(config, cfg_name) and item_name in config[cfg_name]): raise ValueError my_val = helpers.decrypt(config[cfg_name][item_name], encryption_version) if str(my_val) == str(None) or not str(my_val): raise ValueError except (ValueError, IndexError, KeyError): my_val = def_val if cfg_name not in config: config[cfg_name] = {} config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version) if (censor_log or (cfg_name, item_name) in logger.censored_items.items() ) and not item_name.endswith("custom_url"): logger.censored_items[cfg_name, item_name] = my_val if not silent: logger.debug(item_name + " -> " + my_val) return str(my_val)
def check_setting_bool(config, cfg_name, item_name, def_val=False, silent=True): """ Checks config setting of boolean type :param config: config object :type config: ConfigObj() :param cfg_name: section name of config :param item_name: item name of section :param def_val: default value to return in case a value can't be retrieved from config or if couldn't be converted (default: False) :param silent: don't log result to debug log (default: True) :return: value of `config[cfg_name][item_name]` or `def_val` (see cases of def_val) :rtype: bool """ try: if not isinstance(def_val, bool): logger.error( "{dom}:{key} default value is not the correct type. Expected {t}, got {dt}" .format(dom=cfg_name, key=item_name, t="bool", dt=type(def_val))) if not (check_section(config, cfg_name) and item_name in config[cfg_name]): raise ValueError my_val = config[cfg_name][item_name] my_val = str(my_val) if my_val == str(None) or not my_val: raise ValueError my_val = checkbox_to_value(my_val) except (KeyError, IndexError, ValueError): my_val = bool(def_val) if cfg_name not in config: config[cfg_name] = {} config[cfg_name][item_name] = my_val if not silent: logger.debug(item_name + " -> " + str(my_val)) return my_val
def write_ep_file(self, ep_obj): """ Generates and writes ep_obj's metadata under the given path with the given filename root. Uses the episode's name with the extension in _ep_nfo_extension. ep_obj: TVEpisode object for which to create the metadata file_name_path: The file name to use for this metadata. Note that the extension will be automatically added based on _ep_nfo_extension. This should include an absolute path. Note that this method expects that _ep_data will return an ElementTree object. If your _ep_data returns data in another format yo'll need to override this method. """ data = self._ep_data(ep_obj) if not data: return False # def print_data(d): # for child in d.getroot(): # print(str(child.tag), str(child.text)) # print_data(data) nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_dir = os.path.dirname(nfo_file_path) try: if not os.path.isdir(nfo_file_dir): logger.debug("Metadata dir didn't exist, creating it at " + nfo_file_dir) os.makedirs(nfo_file_dir) helpers.chmodAsParent(nfo_file_dir) logger.debug("Writing episode nfo file to " + nfo_file_path) nfo_file = open(nfo_file_path, 'wb') data.write(nfo_file, encoding='UTF-8') nfo_file.close() helpers.chmodAsParent(nfo_file_path) except IOError as e: logger.error("Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + str(e)) return False return True
def write_show_file(self, show_obj): """ Generates and writes show_obj's metadata under the given path to the filename given by get_show_file_path() show_obj: TVShow object for which to create the metadata path: An absolute or relative path where we should put the file. Note that the file name will be the default show_file_name. Note that this method expects that _show_data will return an ElementTree object. If your _show_data returns data in another format yo'll need to override this method. """ data = self._show_data(show_obj) if not data: return False nfo_file_path = self.get_show_file_path(show_obj) nfo_file_dir = os.path.dirname(nfo_file_path) try: if not os.path.isdir(nfo_file_dir): logger.debug("Metadata dir didn't exist, creating it at " + nfo_file_dir) os.makedirs(nfo_file_dir) helpers.chmodAsParent(nfo_file_dir) logger.debug("Writing show nfo file to " + nfo_file_path) nfo_file = open(nfo_file_path, 'wb') data.write(nfo_file, encoding='UTF-8') nfo_file.close() helpers.chmodAsParent(nfo_file_path) except IOError as e: logger.error("Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + str(e)) return False return True
def _write_image(image_data, image_path, overwrite=False): """ Saves the data in image_data to the location image_path. Returns True/False to represent success or failure. image_data: binary image data to write to file image_path: file location to save the image to """ # don't bother overwriting it if not overwrite and os.path.isfile(image_path): logger.debug("Image already exists, not downloading") return False image_dir = os.path.dirname(image_path) if not image_data: logger.debug( "Unable to retrieve image to save in {0}, skipping".format( image_path)) return False try: if not os.path.isdir(image_dir): logger.debug("Metadata dir didn't exist, creating it at " + image_dir) os.makedirs(image_dir) helpers.chmodAsParent(image_dir) outFile = open(image_path, 'wb') outFile.write(image_data) outFile.close() helpers.chmodAsParent(image_path) except IOError as e: logger.error("Unable to write image to " + image_path + " - are you sure the show folder is writable? " + str(e)) return False return True
def check_setting_int(config, cfg_name, item_name, def_val=0, min_val=None, max_val=None, fallback_def=True, silent=True): """ Checks config setting of integer type :param config: config object :type config: ConfigObj() :param cfg_name: section name of config :param item_name: item name of section :param def_val: default value to return in case a value can't be retrieved from config, or in case value couldn't be converted, or if `value < min_val` or `value > max_val` (default: 0) :param min_val: force value to be greater than or equal to `min_val` (optional) :param max_val: force value to be lesser than or equal to `max_val` (optional) :param fallback_def: if True, `def_val` will be returned when value not in range of `min_val` and `max_val`. otherwise, `min_val`/`max_val` value will be returned respectively (default: True) :param silent: don't log result to debug log (default: True) :return: value of `config[cfg_name][item_name]` or `min_val`/`max_val` (see def_low_high) `def_val` (see def_val) :rtype: int """ if not isinstance(def_val, int): logger.error("{dom}:{key} default value is not the correct type. Expected {t}, got {dt}".format(dom=cfg_name, key=item_name, t="int", dt=type(def_val))) if not (min_val is None or isinstance(min_val, int)): logger.error("{dom}:{key} min_val value is not the correct type. Expected {t}, got {dt}".format(dom=cfg_name, key=item_name, t="int", dt=type(min_val))) if not (max_val is None or isinstance(max_val, int)): logger.error("{dom}:{key} max_val value is not the correct type. Expected {t}, got {dt}".format(dom=cfg_name, key=item_name, t="int", dt=type(max_val))) try: if not (check_section(config, cfg_name) and check_section(config[cfg_name], item_name)): raise ValueError my_val = config[cfg_name][item_name] if str(my_val).lower() == "true": my_val = 1 elif str(my_val).lower() == "false": my_val = 0 my_val = int(my_val) if isinstance(min_val, int) and my_val < min_val: my_val = config[cfg_name][item_name] = (min_val, def_val)[fallback_def] if isinstance(max_val, int) and my_val > max_val: my_val = config[cfg_name][item_name] = (max_val, def_val)[fallback_def] except (ValueError, IndexError, KeyError, TypeError): my_val = def_val if cfg_name not in config: config[cfg_name] = {} config[cfg_name][item_name] = my_val if not silent: logger.debug(item_name + " -> " + str(my_val)) return my_val
def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False): if not (settings.USE_PUSHALOT or force): return False pushalot_authorizationtoken = pushalot_authorizationtoken or settings.PUSHALOT_AUTHORIZATIONTOKEN logger.debug("Pushalot event: {0}".format(event)) logger.debug("Pushalot message: {0}".format(message)) logger.debug("Pushalot api: {0}".format(pushalot_authorizationtoken)) post_data = { "AuthorizationToken": pushalot_authorizationtoken, "Title": event or "", "Body": message or "" } jdata = sickchill.oldbeard.helpers.getURL( "https://pushalot.com/api/sendmessage", post_data=post_data, session=self.session, returns="json") or {} """ {'Status': 200, 'Description': 'The request has been completed successfully.', 'Success': True} """ success = jdata.pop("Success", False) if success: logger.debug("Pushalot notifications sent.") else: logger.error("Pushalot notification failed: {0} {1}".format( jdata.get("Status", ""), jdata.get("Description", "Unknown"))) return success
def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False): if not (settings.USE_PUSHALOT or force): return False pushalot_authorizationtoken = pushalot_authorizationtoken or settings.PUSHALOT_AUTHORIZATIONTOKEN logger.debug('Pushalot event: {0}'.format(event)) logger.debug('Pushalot message: {0}'.format(message)) logger.debug('Pushalot api: {0}'.format(pushalot_authorizationtoken)) post_data = { 'AuthorizationToken': pushalot_authorizationtoken, 'Title': event or '', 'Body': message or '' } jdata = sickchill.oldbeard.helpers.getURL( 'https://pushalot.com/api/sendmessage', post_data=post_data, session=self.session, returns='json' ) or {} ''' {'Status': 200, 'Description': 'The request has been completed successfully.', 'Success': True} ''' success = jdata.pop('Success', False) if success: logger.debug('Pushalot notifications sent.') else: logger.error('Pushalot notification failed: {0} {1}'.format( jdata.get('Status', ''), jdata.get('Description', 'Unknown') )) return success
def addNewShow( self, whichSeries=None, indexerLang=None, rootDir=None, defaultStatus=None, quality_preset=None, anyQualities=None, bestQualities=None, season_folders=None, subtitles=None, subtitles_sr_metadata=None, fullShowPath=None, other_shows=None, skipShow=None, providedIndexer=None, anime=None, scene=None, blacklist=None, whitelist=None, defaultStatusAfter=None, ): """ Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are provided then it forwards back to newShow, if not it goes to /home. """ if not indexerLang: indexerLang = settings.INDEXER_DEFAULT_LANGUAGE # grab our list of other dirs if given if not other_shows: other_shows = [] elif not isinstance(other_shows, list): other_shows = [other_shows] def finishAddShow(): # if there are no extra shows then go home if not other_shows: return self.redirect("/home/") # peel off the next one next_show_dir = other_shows[0] rest_of_show_dirs = other_shows[1:] # go to add the next show return self.newShow(next_show_dir, rest_of_show_dirs) # if we're skipping then behave accordingly if skipShow: return finishAddShow() # sanity check on our inputs if (not rootDir and not fullShowPath) or not whichSeries: return _( "Missing params, no Indexer ID or folder: {show_to_add} and {root_dir}/{show_path}" ).format(show_to_add=whichSeries, root_dir=rootDir, show_path=fullShowPath) # figure out what show we're adding and where series_pieces = whichSeries.split("|") if (whichSeries and rootDir) or (whichSeries and fullShowPath and len(series_pieces) > 1): if len(series_pieces) < 6: logger.error( "Unable to add show due to show selection. Not enough arguments: {0}" .format((repr(series_pieces)))) ui.notifications.error( _("Unknown error. Unable to add show due to problem with show selection." )) return self.redirect("/addShows/existingShows/") indexer = int(series_pieces[1]) indexer_id = int(series_pieces[3]) # Show name was sent in UTF-8 in the form show_name = xhtml_unescape(series_pieces[4]) else: # if no indexer was provided use the default indexer set in General settings if not providedIndexer: providedIndexer = settings.INDEXER_DEFAULT indexer = int(providedIndexer) indexer_id = int(whichSeries) show_name = os.path.basename( os.path.normpath(xhtml_unescape(fullShowPath))) # use the whole path if it's given, or else append the show name to the root dir to get the full show path if fullShowPath: show_dir = os.path.normpath(xhtml_unescape(fullShowPath)) extra_check_dir = show_dir else: folder_name = show_name s = sickchill.indexer.series_by_id(indexerid=indexer_id, indexer=indexer, language=indexerLang) if settings.ADD_SHOWS_WITH_YEAR and s.firstAired: try: year = "({0})".format( dateutil.parser.parse(s.firstAired).year) if year not in folder_name: folder_name = "{0} {1}".format(s.seriesName, year) except (TypeError, ValueError): logger.info( _("Could not append the show year folder for the show: {0}" ).format(folder_name)) show_dir = os.path.join( rootDir, sanitize_filename(xhtml_unescape(folder_name))) extra_check_dir = os.path.join( rootDir, sanitize_filename(xhtml_unescape(show_name))) # blanket policy - if the dir exists you should have used "add existing show" numbnuts if (os.path.isdir(show_dir) or os.path.isdir(extra_check_dir)) and not fullShowPath: ui.notifications.error( _("Unable to add show"), _("Folder {show_dir} exists already").format( show_dir=show_dir)) return self.redirect("/addShows/existingShows/") # don't create show dir if config says not to if settings.ADD_SHOWS_WO_DIR: logger.info("Skipping initial creation of " + show_dir + " due to config.ini setting") else: dir_exists = helpers.makeDir(show_dir) if not dir_exists: logger.exception("Unable to create the folder " + show_dir + ", can't add the show") ui.notifications.error( _("Unable to add show"), _("Unable to create the folder {show_dir}, can't add the show" ).format(show_dir=show_dir)) # Don't redirect to default page because user wants to see the new show return self.redirect("/home/") else: helpers.chmodAsParent(show_dir) # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) season_folders = config.checkbox_to_value(season_folders) subtitles = config.checkbox_to_value(subtitles) subtitles_sr_metadata = config.checkbox_to_value(subtitles_sr_metadata) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not anyQualities: anyQualities = [] if not bestQualities or try_int(quality_preset, None): bestQualities = [] if not isinstance(anyQualities, list): anyQualities = [anyQualities] if not isinstance(bestQualities, list): bestQualities = [bestQualities] newQuality = Quality.combineQualities([int(q) for q in anyQualities], [int(q) for q in bestQualities]) # add the show settings.showQueueScheduler.action.add_show( indexer, indexer_id, showDir=show_dir, default_status=int(defaultStatus), quality=newQuality, season_folders=season_folders, lang=indexerLang, subtitles=subtitles, subtitles_sr_metadata=subtitles_sr_metadata, anime=anime, scene=scene, paused=None, blacklist=blacklist, whitelist=whitelist, default_status_after=int(defaultStatusAfter), root_dir=rootDir, ) ui.notifications.message( _("Show added"), _("Adding the specified show into {show_dir}").format( show_dir=show_dir)) return finishAddShow()
def run(self, force=False): if not settings.USE_SUBTITLES: return if not enabled_service_list(): logger.warning( "Not enough services selected. At least 1 service is required to " "search subtitles in the background") return self.amActive = True def dhm(td): days = td.days hours = td.seconds // 60**2 minutes = (td.seconds // 60) % 60 ret = ("", "{0} days, ".format(days))[days > 0] + ( "", "{0} hours, ".format(hours))[hours > 0] + ( "", "{0} minutes".format(minutes))[minutes > 0] if days == 1: ret = ret.replace("days", "day") if hours == 1: ret = ret.replace("hours", "hour") if minutes == 1: ret = ret.replace("minutes", "minute") return ret.rstrip(", ") logger.info("Checking for missed subtitles") database = db.DBConnection() sql_results = database.select( "SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) as age FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) " "WHERE s.subtitles = 1 AND e.subtitles NOT LIKE ? " + ("AND e.season != 0 ", "")[settings.SUBTITLES_INCLUDE_SPECIALS] + "AND e.location != '' AND e.status IN ({}) ORDER BY age ASC". format(",".join(["?"] * len(Quality.DOWNLOADED))), [datetime.datetime.now().toordinal(), wanted_languages(True)] + Quality.DOWNLOADED, ) if not sql_results: logger.info("No subtitles to download") self.amActive = False return for ep_to_sub in sql_results: if not os.path.isfile(ep_to_sub["location"]): logger.debug( "Episode file does not exist, cannot download subtitles for {0} {1}" .format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), )) continue if not needs_subtitles(ep_to_sub["subtitles"]): logger.debug( "Episode already has all needed subtitles, skipping {0} {1}" .format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), )) continue try: lastsearched = datetime.datetime.strptime( ep_to_sub["lastsearch"], dateTimeFormat) except ValueError: lastsearched = datetime.datetime.min try: if not force: now = datetime.datetime.now() days = int(ep_to_sub["age"]) delay_time = datetime.timedelta( hours=8 if days < 10 else 7 * 24 if days < 30 else 30 * 24) # Search every hour for the first 24 hours since aired, then every 8 hours until 10 days passes # After 10 days, search every 7 days, after 30 days search once a month # Will always try an episode regardless of age at least 2 times if lastsearched + delay_time > now and int( ep_to_sub["searchcount"]) > 2 and days: logger.debug( "Subtitle search for {0} {1} delayed for {2}". format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), dhm(lastsearched + delay_time - now), )) continue logger.info( "Searching for missing subtitles of {0} {1}".format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), )) show_object = Show.find(settings.showList, int(ep_to_sub["showid"])) if not show_object: logger.debug( "Show with ID {0} not found in the database".format( ep_to_sub["showid"])) continue episode_object = show_object.getEpisode( ep_to_sub["season"], ep_to_sub["episode"]) if isinstance(episode_object, str): logger.debug("{0} {1} not found in the database".format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), )) continue try: new_subtitles = episode_object.download_subtitles() except Exception as error: logger.error( "Unable to find subtitles for {0} {1}. Error: {2}". format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), str(error), )) continue if new_subtitles: logger.info("Downloaded {0} subtitles for {1} {2}".format( ", ".join(new_subtitles), ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), )) except Exception as error: logger.error( "Error while searching subtitles for {0} {1}. Error: {2}". format( ep_to_sub["show_name"], episode_num(ep_to_sub["season"], ep_to_sub["episode"]) or episode_num(ep_to_sub["season"], ep_to_sub["episode"], numbering="absolute"), str(error), )) continue logger.info("Finished checking for missed subtitles") self.amActive = False