def test_notify(self, username, blacklist_name=None): """ Sends a test notification to trakt with the given authentication info and returns a boolean representing success. api: The api string to use username: The username to use blacklist_name: slug of trakt list used to hide not interested show Returns: True if the request succeeded, False otherwise """ try: trakt_api = TraktAPI(sickbeard.SSL_VERIFY, sickbeard.TRAKT_TIMEOUT) trakt_api.validateAccount() if blacklist_name and blacklist_name is not None: trakt_lists = trakt_api.traktRequest("users/" + username + "/lists") found = False for trakt_list in trakt_lists: if trakt_list['ids']['slug'] == blacklist_name: return "Test notice sent successfully to Trakt" if not found: return "Trakt blacklist doesn't exists" else: return "Test notice sent successfully to Trakt" except (traktException, traktAuthException, traktServerBusy) as e: logger.log("Could not connect to Trakt service: {0}".format(ex(e)), logger.WARNING) return "Test notice failed to Trakt: {0}".format(ex(e))
def _notify_emby(self, message, host=None, emby_apikey=None): """Handles notifying Emby host via HTTP API Returns: Returns True for no issue or False if there was an error """ # fill in omitted parameters if not host: host = sickbeard.EMBY_HOST if not emby_apikey: emby_apikey = sickbeard.EMBY_APIKEY url = 'http://{0}/emby/Notifications/Admin'.format(host) values = {'Name': 'SickChill', 'Description': message, 'ImageUrl': sickbeard.LOGO_URL} data = json.dumps(values) try: req = urllib.request.Request(url, data) req.add_header('X-MediaBrowser-Token', emby_apikey) req.add_header('Content-Type', 'application/json') response = urllib.request.urlopen(req) result = response.read() response.close() logger.log('EMBY: HTTP response: ' + result.replace('\n', ''), logger.DEBUG) return True except (urllib.error.URLError, IOError) as e: logger.log('EMBY: Warning: Couldn\'t contact Emby at ' + url + ' ' + ex(e), logger.WARNING) return False
def process_failed(process_path, release_name, result): """Process a download that did not complete correctly""" if sickbeard.USE_FAILED_DOWNLOADS: processor = None try: processor = failedProcessor.FailedProcessor(process_path, release_name) result.result = processor.process() process_fail_message = "" except FailedPostProcessingFailedException as e: result.result = False process_fail_message = ex(e) if processor: result.output += processor.log if sickbeard.DELETE_FAILED and result.result: if delete_folder(process_path, check_empty=False): result.output += log_helper("Deleted folder: {0}".format(process_path), logger.DEBUG) if result.result: result.output += log_helper("Failed Download Processing succeeded: ({0}, {1})".format(release_name, process_path)) else: result.output += log_helper("Failed Download Processing failed: ({0}, {1}): {2}".format(release_name, process_path, process_fail_message), logger.WARNING)
def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): server = jsonrpclib.Server(self.urls['base_url']) parsed_json = {} try: parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except jsonrpclib.jsonrpc.ProtocolError as error: if error.message == (-32001, 'Invalid API Key'): logger.log("The API key you provided was rejected because it is invalid. Check your provider configuration.", logger.WARNING) elif error.message == (-32002, 'Call Limit Exceeded'): logger.log("You have exceeded the limit of 150 calls per hour, per API key which is unique to your user account", logger.WARNING) else: logger.log("JSON-RPC protocol error while accessing provider. Error: {0} ".format(repr(error)), logger.ERROR) parsed_json = {'api-error': ex(error)} return parsed_json except socket.timeout: logger.log("Timeout while accessing provider", logger.WARNING) except socket.error as error: # Note that sometimes timeouts are thrown as socket errors logger.log("Socket error while accessing provider. Error: {0} ".format(error[1]), logger.WARNING) except Exception as error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] logger.log("Unknown error while accessing provider. Error: {0} ".format(errorstring), logger.WARNING) return parsed_json
def _season_posters_dict(self, show_obj, season): """ Should return a dict like: result = {<season number>: {1: '<url 1>', 2: <url 2>, ...},} """ # This holds our resulting dictionary of season art result = {} indexer_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere lINDEXER_API_PARMS = sickbeard.indexerApi(show_obj.indexer).api_params.copy() lINDEXER_API_PARMS['banners'] = True lINDEXER_API_PARMS['language'] = indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE if show_obj.dvdorder: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] except (sickbeard.indexer_error, IOError) as e: logger.log("Unable to look up show on " + sickbeard.indexerApi( show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING) logger.log("{0} may be experiencing some problems. Try again later.".format(sickbeard.indexerApi(show_obj.indexer).name), logger.DEBUG) return result # if we have no season banners then just finish if not getattr(indexer_show_obj, '_banners', None): return result if 'season' not in indexer_show_obj['_banners'] or 'season' not in indexer_show_obj['_banners']['season']: return result # Give us just the normal poster-style season graphics seasonsArtObj = indexer_show_obj['_banners']['season']['season'] # Returns a nested dictionary of season art with the season # number as primary key. It's really overkill but gives the option # to present to user via ui to pick down the road. result[season] = {} # find the correct season in the TVDB object and just copy the dict into our result dict for seasonArtID in seasonsArtObj.keys(): if int(seasonsArtObj[seasonArtID]['season']) == season and seasonsArtObj[seasonArtID]['language'] == ( indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE): result[season][seasonArtID] = seasonsArtObj[seasonArtID]['_bannerpath'] return result
def run(self, force=False): # pylint: disable=unused-argument, too-many-locals, too-many-branches, too-many-statements if self.amActive: return self.amActive = True update_timestamp = time.mktime(datetime.datetime.now().timetuple()) cache_db_con = db.DBConnection('cache.db') result = cache_db_con.select('SELECT `time` FROM lastUpdate WHERE provider = ?', ['theTVDB']) if result: last_update = int(result[0][0]) else: last_update = int(time.mktime(datetime.datetime.min.timetuple())) cache_db_con.action('INSERT INTO lastUpdate (provider, `time`) VALUES (?, ?)', ['theTVDB', last_update]) network_timezones.update_network_dict() url = 'http://thetvdb.com/api/Updates.php?type=series&time={0}'.format(last_update) data = helpers.getURL(url, session=self.session, returns='text', hooks={'response': self.request_hook}) if not data: logger.log('Could not get the recently updated show data from {0}. Retrying later. Url was: {1}'.format(sickbeard.indexerApi(INDEXER_TVDB).name, url)) self.amActive = False return updated_shows = set() try: tree = etree.fromstring(data) for show in tree.findall('Series'): updated_shows.add(int(show.text)) except SyntaxError: update_timestamp = last_update pi_list = [] for cur_show in sickbeard.showList: if int(cur_show.indexer) in [INDEXER_TVRAGE]: logger.log('Indexer is no longer available for show [{0}] '.format(cur_show.name), logger.WARNING) continue try: cur_show.nextEpisode() if sickbeard.indexerApi(cur_show.indexer).name == 'theTVDB': if cur_show.indexerid in updated_shows: pi_list.append(sickbeard.showQueueScheduler.action.update_show(cur_show, True)) else: pi_list.append(sickbeard.showQueueScheduler.action.refresh_show(cur_show, False)) except (CantUpdateShowException, CantRefreshShowException) as error: logger.log('Automatic update failed: {0}'.format(ex(error)), logger.DEBUG) ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list)) cache_db_con.action('UPDATE lastUpdate SET `time` = ? WHERE provider=?', [update_timestamp, 'theTVDB']) self.amActive = False
def _send_synologyNotifier(self, message, title): synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message] logger.log("Executing command " + str(synodsmnotify_cmd)) logger.log("Absolute path to command: " + ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG) try: p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) out, err = p.communicate() # @UnusedVariable logger.log("Script result: " + str(out), logger.DEBUG) except OSError as e: logger.log("Unable to run synodsmnotify: " + ex(e))
def makeObject(self, cmd_arg, cur_path): if sickbeard.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek(os.path.abspath, cur_path)] logger.log("Executing command " + str(synoindex_cmd), logger.DEBUG) logger.log("Absolute path to command: " + ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) out, err = p.communicate() # @UnusedVariable logger.log("Script result: " + str(out), logger.DEBUG) except OSError as e: logger.log("Unable to run synoindex: " + ex(e), logger.ERROR)
def delete_folder(folder, check_empty=True): """ Removes a folder from the filesystem :param folder: Path to folder to remove :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True :return: True on success, False on failure """ # check if it's a folder if not ek(os.path.isdir, folder): return False # check if it isn't TV_DOWNLOAD_DIR if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR): return False # check if it's empty folder when wanted checked if check_empty: check_files = ek(os.listdir, folder) if check_files: logger.log("Not deleting folder {0} found the following files: {1}".format(folder, check_files), logger.INFO) return False try: logger.log("Deleting folder (if it's empty): {0}".format(folder)) ek(os.rmdir, folder) except (OSError, IOError) as e: logger.log("Warning: unable to delete folder: {0}: {1}".format(folder, ex(e)), logger.WARNING) return False else: try: logger.log("Deleting folder: " + folder) shutil.rmtree(folder) except (OSError, IOError) as e: logger.log("Warning: unable to delete folder: {0}: {1}".format(folder, ex(e)), logger.WARNING) return False return True
def save_nzb(nzb_name, nzb_string): """ Save NZB to disk :param nzb_name: Filename/path to write to :param nzb_string: Content to write in file """ try: with ek(open, nzb_name + ".nzb", 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as error: logger.log("Unable to save NZB: " + ex(error), logger.ERROR) # pylint: disable=no-member
def dumpHTML(data): dumpName = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') try: fileOut = io.open(dumpName, 'wb') fileOut.write(data) fileOut.close() helpers.chmodAsParent(dumpName) except IOError as error: logger.log('Unable to save the file: {0}'.format(ex(error)), logger.ERROR) return False logger.log('Saved custom_torrent html dump {0} '.format(dumpName), logger.INFO) return True
def _process_upgrade(connection, upgrade_class): instance = upgrade_class(connection) # logger.log("Checking " + pretty_name(upgrade_class.__name__) + " database upgrade", logger.DEBUG) if not instance.test(): logger.log("Database upgrade required: " + pretty_name(upgrade_class.__name__), logger.DEBUG) try: instance.execute() except Exception as e: logger.log("Error in " + str(upgrade_class.__name__) + ": " + ex(e), logger.ERROR) raise logger.log(upgrade_class.__name__ + " upgrade completed", logger.DEBUG) # else: # logger.log(upgrade_class.__name__ + " upgrade not required", logger.DEBUG) for upgradeSubClass in upgrade_class.__subclasses__(): _process_upgrade(connection, upgradeSubClass)
def update_library(self, ep_obj): """ Sends a request to trakt indicating that the given episode is part of our library. ep_obj: The TVEpisode object to add to trakt """ trakt_id = sickbeard.indexerApi(ep_obj.show.indexer).config['trakt_id'] trakt_api = TraktAPI(sickbeard.SSL_VERIFY, sickbeard.TRAKT_TIMEOUT) if sickbeard.USE_TRAKT: try: # URL parameters data = { 'shows': [ { 'title': ep_obj.show.name, 'year': ep_obj.show.startyear, 'ids': {}, } ] } if trakt_id == 'tvdb_id': data['shows'][0]['ids']['tvdb'] = ep_obj.show.indexerid else: data['shows'][0]['ids']['tvrage'] = ep_obj.show.indexerid if sickbeard.TRAKT_SYNC_WATCHLIST and sickbeard.TRAKT_REMOVE_SERIESLIST: trakt_api.traktRequest("sync/watchlist/remove", data, method='POST') # Add Season and Episode + Related Episodes data['shows'][0]['seasons'] = [{'number': ep_obj.season, 'episodes': []}] for relEp_Obj in [ep_obj] + ep_obj.relatedEps: data['shows'][0]['seasons'][0]['episodes'].append({'number': relEp_Obj.episode}) if sickbeard.TRAKT_SYNC_WATCHLIST and sickbeard.TRAKT_REMOVE_WATCHLIST: trakt_api.traktRequest("sync/watchlist/remove", data, method='POST') # update library trakt_api.traktRequest("sync/collection", data, method='POST') except (traktException, traktAuthException, traktServerBusy) as e: logger.log("Could not connect to Trakt service: {0}".format(ex(e)), logger.WARNING)
def _send_slack(self, message=None): slack_webhook = self.SLACK_WEBHOOK_URL + sickbeard.SLACK_WEBHOOK.replace(self.SLACK_WEBHOOK_URL, '') logger.log("Sending slack message: " + message, logger.INFO) logger.log("Sending slack message to url: " + slack_webhook, logger.INFO) if isinstance(message, six.text_type): message = message.encode('utf-8') headers = {b"Content-Type": b"application/json"} try: r = requests.post(slack_webhook, data=json.dumps(dict(text=message, username="******")), headers=headers) r.raise_for_status() except Exception as e: logger.log("Error Sending Slack message: " + ex(e), logger.ERROR) return False return True
def update_library(self, show=None): """Handles updating the Emby Media Server host via HTTP API Returns: Returns True for no issue or False if there was an error """ if sickbeard.USE_EMBY: if not sickbeard.EMBY_HOST: logger.log('EMBY: No host specified, check your settings', logger.DEBUG) return False if show: if show.indexer == 1: provider = 'tvdb' elif show.indexer == 2: logger.log('EMBY: TVRage Provider no longer valid', logger.WARNING) return False else: logger.log('EMBY: Provider unknown', logger.WARNING) return False query = '?{0}id={1}'.format(provider, show.indexerid) else: query = '' url = 'http://{0}/emby/Library/Series/Updated{1}'.format(sickbeard.EMBY_HOST, query) values = {} data = urllib.parse.urlencode(values) try: req = urllib.request.Request(url, data) req.add_header('X-MediaBrowser-Token', sickbeard.EMBY_APIKEY) response = urllib.request.urlopen(req) result = response.read() response.close() logger.log('EMBY: HTTP response: ' + result.replace('\n', ''), logger.DEBUG) return True except (urllib.error.URLError, IOError) as e: logger.log('EMBY: Warning: Couldn\'t contact Emby at ' + url + ' ' + ex(e), logger.WARNING) return False
def _error_log_helper(self, exception, severity, local_variables, attempts, called_method): if attempts in (0, self.MAX_ATTEMPTS): # Only log the first try and the final failure prefix = ("Database", "Fatal")[severity == logger.ERROR] # noinspection PyUnresolvedReferences logger.log( _("{exception_severity} error executing query with {method} in database {db_location}: ").format( db_location=self.full_path, method=called_method, exception_severity=prefix ) + ex(exception), severity ) # Lets print out all of the arguments so we can debug this better # noinspection PyUnresolvedReferences logger.log(_("If this happened in cache.db, you can safely stop SickChill, and delete the cache.db file without losing any data")) # noinspection PyUnresolvedReferences logger.log( _("Here is the arguments that were passed to this function (This is what the developers need to know): {local_variables:s}").format( local_variables=local_variables ) )
def markFailed(epObj): """ Mark an episode as failed :param epObj: Episode object to mark as failed :return: empty string """ log_str = "" try: with epObj.lock: quality = Quality.splitCompositeStatus(epObj.status)[1] epObj.status = Quality.compositeStatus(FAILED, quality) epObj.saveToDB() except EpisodeNotFoundException as e: logger.log("Unable to get episode, please set its status manually: " + ex(e), logger.WARNING) return log_str
def getTrendingShows(self, traktList=None): """ Display the new show page which collects a tvdb id, folder, and extra options and posts them to addNewShow """ t = PageTemplate(rh=self, filename="trendingShows.mako") if not traktList: traktList = "" traktList = traktList.lower() if traktList == "trending": page_url = "shows/trending" elif traktList == "popular": page_url = "shows/popular" elif traktList == "anticipated": page_url = "shows/anticipated" elif traktList == "collected": page_url = "shows/collected" elif traktList == "watched": page_url = "shows/watched" elif traktList == "played": page_url = "shows/played" elif traktList == "recommended": page_url = "recommendations/shows" elif traktList == "newshow": page_url = 'calendars/all/shows/new/{0}/30'.format( datetime.date.today().strftime("%Y-%m-%d")) elif traktList == "newseason": page_url = 'calendars/all/shows/premieres/{0}/30'.format( datetime.date.today().strftime("%Y-%m-%d")) else: page_url = "shows/anticipated" trending_shows = [] black_list = False try: trending_shows, black_list = trakt_trending.fetch_trending_shows( traktList, page_url) except Exception as e: logger.log("Could not get trending shows: {0}".format(ex(e)), logger.WARNING) return t.render(black_list=black_list, trending_shows=trending_shows)
def _notifyTwilio(self, message='', force=False, allow_raise=False): if not (sickbeard.USE_TWILIO or force or self.number_regex.match(sickbeard.TWILIO_TO_NUMBER)): return False logger.log('Sending Twilio SMS: ' + message, logger.DEBUG) try: self.client.messages.create( body=message, to=sickbeard.TWILIO_TO_NUMBER, from_=self.number.phone_number, ) except twilio.TwilioRestException as e: logger.log('Twilio notification failed:' + ex(e), logger.ERROR) if allow_raise: raise e return True
def get_image_url(indexer_id): """ Get poster image url from TVDB """ image_url = None try: lINDEXER_API_PARMS = sickbeard.indexerApi(INDEXER_TVDB).api_params.copy() lINDEXER_API_PARMS['banners'] = True t = sickbeard.indexerApi(INDEXER_TVDB).indexer(**lINDEXER_API_PARMS) indexer_show_obj = t[int(indexer_id)] except (sickbeard.indexer_error, IOError) as e: logger.log("Show id " + indexer_id + " not found on " + sickbeard.indexerApi(INDEXER_TVDB).name + ", not downloading poster: " + ex(e), logger.DEBUG) return None if getattr(indexer_show_obj, 'poster', None): image_url = indexer_show_obj['poster'].replace('posters', '_cache/posters') return image_url
def refresh(indexer_id): """ Try to refresh a show :param indexer_id: The unique id of the show to refresh :return: A tuple containing: - an error message if the show could not be refreshed, ``None`` otherwise - the show object that was refreshed, if it exists, ``None`` otherwise """ error, show = Show._validate_indexer_id(indexer_id) if error is not None: return error, show try: sickbeard.showQueueScheduler.action.refresh_show(show) except CantRefreshShowException as exception: return ex(exception), show return None, show
def moveObject(self, old_path, new_path): if sickbeard.USE_SYNOINDEX: synoindex_cmd = [ '/usr/syno/bin/synoindex', '-N', ek(os.path.abspath, new_path), ek(os.path.abspath, old_path) ] logger.log("Executing command " + str(synoindex_cmd), logger.DEBUG) logger.log( "Absolute path to command: " + ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) out, err = p.communicate() # @UnusedVariable logger.log("Script result: " + str(out), logger.DEBUG) except OSError as e: logger.log("Unable to run synoindex: " + ex(e), logger.ERROR)
def _notifyTwilio(self, message='', force=False, allow_raise=False): if not (sickbeard.USE_TWILIO or force or self.number_regex.match(sickbeard.TWILIO_TO_NUMBER)): return False logger.log('Sending Twilio SMS: ' + message, logger.DEBUG) try: self.client.messages.create( body=message, to=sickbeard.TWILIO_TO_NUMBER, from_=self.number.phone_number, ) except twilio.TwilioRestException as e: logger.log('Twilio notification failed:' + ex(e), logger.ERROR) if allow_raise: raise e return True
def _send_tweet(self, message=None): """ Sends a tweet. :param message: Message to send :return: True if succeeded, False otherwise """ api = twitter.Api(consumer_key=self.consumer_key, consumer_secret=self.consumer_hash, access_token_key=sickbeard.TWITTER_USERNAME, access_token_secret=sickbeard.TWITTER_PASSWORD) logger.log("Sending tweet: {}".format(message), logger.DEBUG) try: api.PostUpdate(message.encode('utf8')[:139]) except Exception as e: logger.log("Error Sending Tweet: {}".format(ex(e)), logger.ERROR) return False return True
def _send_tweet(self, message=None): """ Sends a tweet. :param message: Message to send :return: True if succeeded, False otherwise """ api = twitter.Api(consumer_key=self.consumer_key, consumer_secret=self.consumer_hash, access_token_key=sickbeard.TWITTER_USERNAME, access_token_secret=sickbeard.TWITTER_PASSWORD) logger.log("Sending tweet: {}".format(message), logger.DEBUG) try: api.PostUpdate(message.encode('utf8')[:139]) except Exception as e: logger.log("Error Sending Tweet: {}".format(ex(e)), logger.ERROR) return False return True
def refresh(indexer_id): """ Try to refresh a show :param indexer_id: The unique id of the show to refresh :return: A tuple containing: - an error message if the show could not be refreshed, ``None`` otherwise - the show object that was refreshed, if it exists, ``None`` otherwise """ error, show = Show._validate_indexer_id(indexer_id) if error is not None: return error, show try: sickbeard.showQueueScheduler.action.refresh_show(show) except CantRefreshShowException as exception: return ex(exception), show return None, show
def _downloadResult(result): """ Downloads a result to the appropriate black hole folder. :param result: SearchResult instance to download. :return: boolean, True on success """ resProvider = result.provider if resProvider is None: logger.log("Invalid provider name - this is a coding error, report it please", logger.ERROR) return False # nzbs/torrents with an URL can just be downloaded from the provider if result.resultType in (GenericProvider.NZB, GenericProvider.TORRENT): newResult = resProvider.download_result(result) # if it's an nzb data result elif result.resultType == GenericProvider.NZBDATA: # get the final file path to the nzb file_name = ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb") logger.log("Saving NZB to " + file_name) newResult = True # save the data to disk try: with ek(open, file_name, 'w') as fileOut: fileOut.write(result.extraInfo[0]) helpers.chmodAsParent(file_name) except EnvironmentError as e: logger.log("Error trying to save NZB to black hole: " + ex(e), logger.ERROR) newResult = False else: logger.log("Invalid provider type - this is a coding error, report it please", logger.ERROR) newResult = False return newResult
def _send_discord(self, message=None): discord_webhook = sickbeard.DISCORD_WEBHOOK discord_name = sickbeard.DISCORD_NAME avatar_icon = sickbeard.DISCORD_AVATAR_URL discord_tts = bool(sickbeard.DISCORD_TTS) logger.log("Sending discord message: " + message, logger.INFO) logger.log("Sending discord message to url: " + discord_webhook, logger.INFO) if isinstance(message, six.text_type): message = message.encode('utf-8') headers = {b"Content-Type": b"application/json"} try: r = requests.post(discord_webhook, data=json.dumps(dict(content=message, username=discord_name, avatar_url=avatar_icon, tts=discord_tts)), headers=headers) r.raise_for_status() except Exception as e: logger.log("Error Sending Discord message: " + ex(e), logger.ERROR) return False return True
def _runbackup(self): # Do a system backup before update logger.log("Config backup in progress...") ui.notifications.message(_('Backup'), _('Config backup in progress...')) try: backupDir = ek(os.path.join, sickbeard.DATA_DIR, 'backup') if not ek(os.path.isdir, backupDir): ek(os.mkdir, backupDir) if self._keeplatestbackup(backupDir) and self._backup(backupDir): logger.log("Config backup successful, updating...") ui.notifications.message(_('Backup'), _('Config backup successful, updating...')) return True else: logger.log("Config backup failed, aborting update", logger.ERROR) ui.notifications.message(_('Backup'), _('Config backup failed, aborting update')) return False except Exception as e: logger.log('Update: Config backup failed. Error: {0}'.format(ex(e)), logger.ERROR) ui.notifications.message(_('Backup'), _('Config backup failed, aborting update')) return False
def _process_upgrade(connection, upgrade_class): instance = upgrade_class(connection) # logger.log("Checking " + pretty_name(upgrade_class.__name__) + " database upgrade", logger.DEBUG) if not instance.test(): logger.log( "Database upgrade required: " + pretty_name(upgrade_class.__name__), logger.DEBUG) try: instance.execute() except Exception as e: logger.log( "Error in " + str(upgrade_class.__name__) + ": " + ex(e), logger.ERROR) raise logger.log(upgrade_class.__name__ + " upgrade completed", logger.DEBUG) # else: # logger.log(upgrade_class.__name__ + " upgrade not required", logger.DEBUG) for upgradeSubClass in upgrade_class.__subclasses__(): _process_upgrade(connection, upgradeSubClass)
def _runbackup(self): # Do a system backup before update logger.log("Config backup in progress...") ui.notifications.message(_('Backup'), _('Config backup in progress...')) try: backupDir = ek(os.path.join, sickbeard.DATA_DIR, 'backup') if not ek(os.path.isdir, backupDir): ek(os.mkdir, backupDir) if self._keeplatestbackup(backupDir) and self._backup(backupDir): logger.log("Config backup successful, updating...") ui.notifications.message(_('Backup'), _('Config backup successful, updating...')) return True else: logger.log("Config backup failed, aborting update", logger.ERROR) ui.notifications.message(_('Backup'), _('Config backup failed, aborting update')) return False except Exception as e: logger.log('Update: Config backup failed. Error: {0}'.format(ex(e)), logger.ERROR) ui.notifications.message(_('Backup'), _('Config backup failed, aborting update')) return False
def popularShows(self): """ Fetches data from IMDB to show a list of popular shows. """ t = PageTemplate(rh=self, filename="addShows_popularShows.mako") e = None try: popular_shows = imdb_popular.fetch_popular_shows() except Exception as e: logger.log("Could not get popular shows: {0}".format(ex(e)), logger.WARNING) popular_shows = None return t.render(title=_("Popular Shows"), header=_("Popular Shows"), popular_shows=popular_shows, imdb_exception=e, topmenu="home", controller="addShows", action="popularShows")
def _verify_download(self, file_name=None): try: parser = createParser(file_name) if parser: # Access to a protected member of a client class mime_type = parser._getMimeType() try: parser.stream._input.close() except Exception: pass if mime_type == 'application/x-bittorrent': return True except Exception as e: logger.log('Failed to validate torrent file: {0}'.format(ex(e)), logger.DEBUG) logger.log('Result is not a valid torrent file', logger.DEBUG) return False
def getFeed(url, params=None, request_hook=None): try: data = request_hook(url, params=params, returns='text', timeout=30) if not data: raise Exception feed = parse(data, response_headers={'content-type': 'application/xml'}) if feed: if 'entries' in feed: return feed elif 'error' in feed.feed: err_code = feed.feed['error']['code'] err_desc = feed.feed['error']['description'] logger.log('RSS ERROR:[{0}] CODE:[{1}]'.format(err_desc, err_code), logger.DEBUG) else: logger.log('RSS error loading data: ' + url, logger.DEBUG) except Exception as e: logger.log('RSS error: ' + ex(e), logger.DEBUG) return {'entries': []}
def run(self): """ Runs the thread """ try: while not self.stop.is_set(): if self.enable: current_time = datetime.datetime.now() should_run = False # Is self.force enable if self.force: should_run = True # check if interval has passed elif current_time - self.lastRun >= self.cycleTime: # check if wanting to start around certain time taking interval into account if self.start_time is not None: hour_diff = current_time.time().hour - self.start_time.hour if not hour_diff < 0 and hour_diff < self.cycleTime.seconds / 3600: should_run = True else: # set lastRun to only check start_time after another cycleTime self.lastRun = current_time else: should_run = True if should_run: self.lastRun = current_time if not self.silent: logger.log("Starting new thread: " + self.name, logger.DEBUG) self.action.run(self.force) if self.force: self.force = False time.sleep(1) # exiting thread self.stop.clear() except Exception as e: logger.log("Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR) logger.log(repr(traceback.format_exc()), logger.DEBUG)
def _notify_emby(self, message, host=None, emby_apikey=None): """Handles notifying Emby host via HTTP API Returns: Returns True for no issue or False if there was an error """ # fill in omitted parameters if not host: host = sickbeard.EMBY_HOST if not emby_apikey: emby_apikey = sickbeard.EMBY_APIKEY url = '{0}/emby/Notifications/Admin'.format(host) values = { 'Name': 'SickChill', 'Description': message, 'ImageUrl': sickbeard.LOGO_URL } data = json.dumps(values) try: req = urllib.request.Request(url, data) req.add_header('X-MediaBrowser-Token', emby_apikey) req.add_header('Content-Type', 'application/json') response = urllib.request.urlopen(req) result = response.read() response.close() logger.log('EMBY: HTTP response: ' + result.replace('\n', ''), logger.DEBUG) return True except (urllib.error.URLError, IOError) as e: logger.log( 'EMBY: Warning: Couldn\'t contact Emby at ' + url + ' ' + ex(e), logger.WARNING) return False
def _send_dm(self, message=None): """ Sends a direct message. :param message: Message to send :return: True if succeeded, False otherwise """ dmdest = sickbeard.TWITTER_DMTO api = twitter.Api(consumer_key=self.consumer_key, consumer_secret=self.consumer_hash, access_token_key=sickbeard.TWITTER_USERNAME, access_token_secret=sickbeard.TWITTER_PASSWORD) logger.log("Sending DM @{0}: {1}".format(dmdest, message), logger.DEBUG) try: api.PostDirectMessage(message.encode('utf8')[:139], screen_name=dmdest) except Exception as e: logger.log("Error Sending Tweet (DM): {}".format(ex(e)), logger.ERROR) return False return True
def _send_dm(self, message=None): """ Sends a direct message. :param message: Message to send :return: True if succeeded, False otherwise """ dmdest = sickbeard.TWITTER_DMTO api = twitter.Api(consumer_key=self.consumer_key, consumer_secret=self.consumer_hash, access_token_key=sickbeard.TWITTER_USERNAME, access_token_secret=sickbeard.TWITTER_PASSWORD) logger.log("Sending DM @{0}: {1}".format(dmdest, message), logger.DEBUG) try: api.PostDirectMessage(message.encode('utf8')[:139], screen_name=dmdest) except Exception as e: logger.log("Error Sending Tweet (DM): {}".format(ex(e)), logger.ERROR) return False return True
def delete(indexer_id, remove_files=False): """ Try to delete a show :param indexer_id: The unique id of the show to delete :param remove_files: ``True`` to remove the files associated with the show, ``False`` otherwise :return: A tuple containing: - an error message if the show could not be deleted, ``None`` otherwise - the show object that was deleted, if it exists, ``None`` otherwise """ error, show = Show._validate_indexer_id(indexer_id) if error is not None: return error, show if show: try: sickbeard.showQueueScheduler.action.remove_show(show, bool(remove_files)) except CantRemoveShowException as exception: return ex(exception), show return None, show
def _error_log_helper(self, exception, severity, local_variables, attempts, called_method): if attempts in (0, self.MAX_ATTEMPTS ): # Only log the first try and the final failure prefix = ("Database", "Fatal")[severity == logger.ERROR] # noinspection PyUnresolvedReferences logger.log( _("{exception_severity} error executing query with {method} in database {db_location}: " ).format(db_location=self.full_path, method=called_method, exception_severity=prefix) + ex(exception), severity) # Lets print out all of the arguments so we can debug this better # noinspection PyUnresolvedReferences logger.log( _("If this happened in cache.db, you can safely stop SickChill, and delete the cache.db file without losing any data" )) # noinspection PyUnresolvedReferences logger.log( _("Here is the arguments that were passed to this function (This is what the developers need to know): {local_variables:s}" ).format(local_variables=local_variables))
def process_media(process_path, video_files, release_name, process_method, force, is_priority, result): """ Postprocess mediafiles :param process_path: Path to process in :param video_files: Filenames to look for and postprocess :param release_name: Name of NZB/Torrent file related :param process_method: auto/manual :param force: Postprocess currently postprocessing file :param is_priority: Boolean, is this a priority download :param result: Previous results """ processor = None for cur_video_file in video_files: cur_video_file_path = ek(os.path.join, process_path, cur_video_file) if already_processed(process_path, cur_video_file, force, result): result.output += log_helper("Skipping already processed file: {0}".format(cur_video_file), logger.DEBUG) continue try: processor = postProcessor.PostProcessor(cur_video_file_path, release_name, process_method, is_priority) result.result = processor.process() process_fail_message = "" except EpisodePostProcessingFailedException as e: result.result = False process_fail_message = ex(e) if processor: result.output += processor.log if result.result: result.output += log_helper("Processing succeeded for {0}".format(cur_video_file_path)) else: result.output += log_helper("Processing failed for {0}: {1}".format(cur_video_file_path, process_fail_message), logger.WARNING) result.missed_files.append("{0} : Processing failed: {1}".format(cur_video_file_path, process_fail_message)) result.aggresult = False
def process_media(process_path, video_files, release_name, process_method, force, is_priority, result): # pylint: disable=too-many-arguments """ Postprocess mediafiles :param process_path: Path to process in :param video_files: Filenames to look for and postprocess :param release_name: Name of NZB/Torrent file related :param process_method: auto/manual :param force: Postprocess currently postprocessing file :param is_priority: Boolean, is this a priority download :param result: Previous results """ processor = None for cur_video_file in video_files: cur_video_file_path = ek(os.path.join, process_path, cur_video_file) if already_processed(process_path, cur_video_file, force, result): result.output += log_helper("Skipping already processed file: {0}".format(cur_video_file), logger.DEBUG) continue try: processor = postProcessor.PostProcessor(cur_video_file_path, release_name, process_method, is_priority) result.result = processor.process() process_fail_message = "" except EpisodePostProcessingFailedException as e: result.result = False process_fail_message = ex(e) if processor: result.output += processor.log if result.result: result.output += log_helper("Processing succeeded for {0}".format(cur_video_file_path)) else: result.output += log_helper("Processing failed for {0}: {1}".format(cur_video_file_path, process_fail_message), logger.WARNING) result.missed_files.append("{0} : Processing failed: {1}".format(cur_video_file_path, process_fail_message)) result.aggresult = False
def run_subs_extra_scripts(episode, subtitle, video, single=False): for script_name in sickbeard.SUBTITLES_EXTRA_SCRIPTS: script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()] script_cmd[0] = os.path.abspath(script_cmd[0]) logger.log('Absolute path to script: {0}'.format(script_cmd[0]), logger.DEBUG) subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if single else subtitle.language) inner_cmd = script_cmd + [video.name, subtitle_path, subtitle.language.opensubtitles, episode.show.name, str(episode.season), str(episode.episode), episode.name, str(episode.show.indexerid)] # use subprocess to run the command and capture output logger.log('Executing command: {0}'.format(inner_cmd)) try: process = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) stdout, stderr_ = process.communicate() logger.log('Script result: {0}'.format(stdout), logger.DEBUG) except Exception as error: logger.log('Unable to run subs_extra_script: {0}'.format(ex(error)))
def revertEpisode(epObj): """Restore the episodes of a failed download to their original state""" failed_db_con = db.DBConnection('failed.db') sql_results = failed_db_con.select("SELECT episode, old_status FROM history WHERE showid=? AND season=?", [epObj.show.indexerid, epObj.season]) history_eps = {res[b"episode"]: res for res in sql_results} try: logger.log("Reverting episode ({0}, {1}): {2}".format(epObj.season, epObj.episode, epObj.name)) with epObj.lock: if epObj.episode in history_eps: logger.log("Found in history") epObj.status = history_eps[epObj.episode][b'old_status'] else: logger.log("Episode don't have a previous snatched status to revert. Setting it back to WANTED", logger.DEBUG) epObj.status = WANTED epObj.saveToDB() except EpisodeNotFoundException as e: logger.log("Unable to create episode, please set its status manually: " + ex(e), logger.WARNING)
def run_subs_extra_scripts(episode, subtitle, video, single=False): for script_name in sickbeard.SUBTITLES_EXTRA_SCRIPTS: script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()] script_cmd[0] = os.path.abspath(script_cmd[0]) logger.log('Absolute path to script: {0}'.format(script_cmd[0]), logger.DEBUG) subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if single else subtitle.language) inner_cmd = script_cmd + [video.name, subtitle_path, subtitle.language.opensubtitles, episode.show.name, str(episode.season), str(episode.episode), episode.name, str(episode.show.indexerid)] # use subprocess to run the command and capture output logger.log('Executing command: {0}'.format(inner_cmd)) try: process = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) stdout, stderr_ = process.communicate() logger.log('Script result: {0}'.format(stdout), logger.DEBUG) except Exception as error: logger.log('Unable to run subs_extra_script: {0}'.format(ex(error)))
def _send_slack(self, message=None): slack_webhook = self.SLACK_WEBHOOK_URL + sickbeard.SLACK_WEBHOOK.replace( self.SLACK_WEBHOOK_URL, '') logger.log("Sending slack message: " + message, logger.INFO) logger.log("Sending slack message to url: " + slack_webhook, logger.INFO) if isinstance(message, six.text_type): message = message.encode('utf-8') headers = {b"Content-Type": b"application/json"} try: r = requests.post(slack_webhook, data=json.dumps( dict(text=message, username="******")), headers=headers) r.raise_for_status() except Exception as e: logger.log("Error Sending Slack message: " + ex(e), logger.ERROR) return False return True
def delete(indexer_id, remove_files=False): """ Try to delete a show :param indexer_id: The unique id of the show to delete :param remove_files: ``True`` to remove the files associated with the show, ``False`` otherwise :return: A tuple containing: - an error message if the show could not be deleted, ``None`` otherwise - the show object that was deleted, if it exists, ``None`` otherwise """ error, show = Show._validate_indexer_id(indexer_id) if error is not None: return error, show if show: try: sickbeard.showQueueScheduler.action.remove_show( show, bool(remove_files)) except CantRemoveShowException as exception: return ex(exception), show return None, show
def update_show_indexer_metadata(self, show_obj): if self.show_metadata and show_obj and self._has_show_metadata( show_obj): logger.log( "Metadata provider " + self.name + " updating show indexer info metadata file for " + show_obj.name, logger.DEBUG) nfo_file_path = self.get_show_file_path(show_obj) assert isinstance(nfo_file_path, six.text_type) try: with io.open(nfo_file_path, 'rb') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) indexerid = showXML.find('id') root = showXML.getroot() if indexerid is not None: if indexerid.text == str(show_obj.indexerid): return True indexerid.text = str(show_obj.indexerid) else: etree.SubElement(root, "id").text = str(show_obj.indexerid) # Make it purdy helpers.indentXML(root) showXML.write(nfo_file_path, encoding='UTF-8') helpers.chmodAsParent(nfo_file_path) return True except IOError as e: logger.log( "Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
def getFeed(url, params=None, request_hook=None): try: data = request_hook(url, params=params, returns='text', timeout=30) if not data: raise Exception feed = parse(data, response_headers={'content-type': 'application/xml'}) if feed: if 'entries' in feed: return feed elif 'error' in feed.feed: err_code = feed.feed['error']['code'] err_desc = feed.feed['error']['description'] logger.log( 'RSS ERROR:[{0}] CODE:[{1}]'.format(err_desc, err_code), logger.DEBUG) else: logger.log('RSS error loading data: ' + url, logger.DEBUG) except Exception as e: logger.log('RSS error: ' + ex(e), logger.DEBUG) return {'entries': []}
def _season_banners_dict(self, show_obj, season): """ Should return a dict like: result = {<season number>: {1: '<url 1>', 2: <url 2>, ...},} """ # This holds our resulting dictionary of season art result = {} indexer_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere lINDEXER_API_PARMS = sickbeard.indexerApi( show_obj.indexer).api_params.copy() lINDEXER_API_PARMS['banners'] = True lINDEXER_API_PARMS[ 'language'] = indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE t = sickbeard.indexerApi( show_obj.indexer).indexer(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] except (sickbeard.indexer_error, IOError) as e: logger.log( "Unable to look up show on " + sickbeard.indexerApi(show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING) logger.log( "{0} may be experiencing some problems. Try again later.". format(sickbeard.indexerApi(show_obj.indexer).name), logger.DEBUG) return result # if we have no season banners then just finish if not getattr(indexer_show_obj, '_banners', None): return result # if we have no season banners then just finish if 'season' not in indexer_show_obj[ '_banners'] or 'seasonwide' not in indexer_show_obj[ '_banners']['season']: return result # Give us just the normal season graphics seasonsArtObj = indexer_show_obj['_banners']['season']['seasonwide'] # Returns a nested dictionary of season art with the season # number as primary key. It's really overkill but gives the option # to present to user via ui to pick down the road. result[season] = {} # find the correct season in the TVDB object and just copy the dict into our result dict for seasonArtID in seasonsArtObj.keys(): if int(seasonsArtObj[seasonArtID]['season'] ) == season and seasonsArtObj[seasonArtID]['language'] == ( indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE): result[season][seasonArtID] = seasonsArtObj[seasonArtID][ '_bannerpath'] return result
def _retrieve_show_image(self, image_type, show_obj, which=None): """ Gets an image URL from theTVDB.com and TMDB.com, downloads it and returns the data. image_type: type of image to retrieve (currently supported: fanart, poster, banner) show_obj: a TVShow object to use when searching for the image which: optional, a specific numbered poster to look for Returns: the binary image data if available, or else None """ image_url = None indexer_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere lINDEXER_API_PARMS = sickbeard.indexerApi( show_obj.indexer).api_params.copy() lINDEXER_API_PARMS['banners'] = True lINDEXER_API_PARMS[ 'language'] = indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE if show_obj.dvdorder: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi( show_obj.indexer).indexer(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] except (sickbeard.indexer_error, IOError) as e: logger.log( "Unable to look up show on " + sickbeard.indexerApi(show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING) logger.log( "{0} may be experiencing some problems. Try again later.". format(sickbeard.indexerApi(show_obj.indexer).name), logger.DEBUG) return None if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'): logger.log( "Invalid image type " + str(image_type) + ", couldn't find it in the " + sickbeard.indexerApi(show_obj.indexer).name + " object", logger.ERROR) return None if image_type == 'poster_thumb': if getattr(indexer_show_obj, 'poster', None): image_url = re.sub('posters', '_cache/posters', indexer_show_obj['poster']) if not image_url: # Try and get images from Fanart.TV image_url = self._retrieve_show_images_from_fanart( show_obj, image_type) if not image_url: # Try and get images from TMDB image_url = self._retrieve_show_images_from_tmdb( show_obj, image_type) elif image_type == 'banner_thumb': if getattr(indexer_show_obj, 'banner', None): image_url = re.sub('graphical', '_cache/graphical', indexer_show_obj['banner']) if not image_url: # Try and get images from Fanart.TV image_url = self._retrieve_show_images_from_fanart( show_obj, image_type) else: if getattr(indexer_show_obj, image_type, None): image_url = indexer_show_obj[image_type] if not image_url: # Try and get images from Fanart.TV image_url = self._retrieve_show_images_from_fanart( show_obj, image_type) if not image_url: # Try and get images from TMDB image_url = self._retrieve_show_images_from_tmdb( show_obj, image_type) if image_url: image_data = metadata_helpers.getShowImage(image_url, which) return image_data return None
def retrieveShowMetadata(self, folder): """ Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB. """ empty_return = (None, None, None) assert isinstance(folder, six.text_type) metadata_path = ek(os.path.join, folder, self._show_metadata_filename) if not ek(os.path.isdir, folder) or not ek(os.path.isfile, metadata_path): logger.log( "Can't load the metadata file from " + metadata_path + ", it doesn't exist", logger.DEBUG) return empty_return logger.log("Loading show info from metadata file in " + metadata_path, logger.DEBUG) try: with io.open(metadata_path, 'rb') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) if showXML.findtext('title') is None or ( showXML.findtext('tvdbid') is None and showXML.findtext('id') is None): logger.log( "Invalid info in tvshow.nfo (missing name or id): {0} {1} {2}" .format(showXML.findtext('title'), showXML.findtext('tvdbid'), showXML.findtext('id'))) return empty_return name = showXML.findtext('title') indexer_id_text = showXML.findtext('tvdbid') or showXML.findtext( 'id') if indexer_id_text: indexer_id = try_int(indexer_id_text, None) if indexer_id is None or indexer_id < 1: logger.log( "Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.DEBUG) return empty_return else: logger.log( "Empty <id> or <tvdbid> field in NFO, unable to find a ID, not using metadata file", logger.DEBUG) return empty_return indexer = 1 epg_url_text = showXML.findtext('episodeguide/url') if epg_url_text: epg_url = epg_url_text.lower() if str(indexer_id) in epg_url and 'tvrage' in epg_url: logger.log( "Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file because it has TVRage info", logger.WARNING) return empty_return except Exception as e: logger.log( "There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), logger.WARNING) return empty_return return indexer_id, name, indexer
def _send_to_kodi_json(command, host=None, username=None, password=None, dest_app="KODI"): """Handles communication to KODI servers via JSONRPC Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the KODI JSON-RPC via HTTP host: KODI webserver host:port username: KODI webserver username password: KODI webserver password Returns: Returns response.result for successful commands or False if there was an error """ # fill in omitted parameters if not username: username = sickbeard.KODI_USERNAME if not password: password = sickbeard.KODI_PASSWORD if not host: logger.log('No {0} host passed, aborting update'.format(dest_app), logger.WARNING) return False command = command.encode('utf-8') logger.log("{0} JSON command: {1}".format(dest_app, command), logger.DEBUG) url = 'http://{0}/jsonrpc'.format(host) try: req = urllib.request.Request(url, command) req.add_header("Content-type", "application/json") # if we have a password, use authentication if password: base64string = base64.encodestring('{0}:{1}'.format( username, password))[:-1] authheader = "Basic {0}".format(base64string) req.add_header("Authorization", authheader) logger.log( "Contacting {0} (with auth header) via url: {1}".format( dest_app, ss(url)), logger.DEBUG) else: logger.log( "Contacting {0} via url: {1}".format(dest_app, ss(url)), logger.DEBUG) try: response = urllib.request.urlopen(req) except (http_client.BadStatusLine, urllib.error.URLError) as e: if sickbeard.KODI_ALWAYS_ON: logger.log( "Error while trying to retrieve {0} API version for {1}: {2!r}" .format(dest_app, host, ex(e)), logger.WARNING) return False # parse the json result try: result = json.load(response) response.close() logger.log("{0} JSON response: {1}".format(dest_app, result), logger.DEBUG) return result # need to return response for parsing except ValueError as e: logger.log("Unable to decode JSON: " + str(response.read()), logger.WARNING) return False except IOError as e: if sickbeard.KODI_ALWAYS_ON: logger.log( "Warning: Couldn't contact {0} JSON API at {1}: {2!r}". format(dest_app, ss(url), ex(e)), logger.WARNING) return False
def _update_library(self, host=None, showName=None): # pylint: disable=too-many-locals, too-many-return-statements """Handles updating KODI host via HTTP API Attempts to update the KODI video library for a specific tv show if passed, otherwise update the whole library if enabled. Args: host: KODI webserver host:port showName: Name of a TV show to specifically target the library update for Returns: Returns True or False """ if not host: logger.log('No KODI host passed, aborting update', logger.WARNING) return False logger.log("Updating KODI library via HTTP method for host: " + host, logger.DEBUG) # if we're doing per-show if showName: logger.log( "Updating library in KODI via HTTP method for show " + showName, logger.DEBUG) pathSql = ( 'select path.strPath from path, tvshow, tvshowlinkpath where ' 'tvshow.c00 = "{}" and tvshowlinkpath.idShow = tvshow.idShow ' 'and tvshowlinkpath.idPath = path.idPath').format(showName) # use this to get xml back for the path lookups xmlCommand = { 'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)' } # sql used to grab path(s) sqlCommand = {'command': 'QueryVideoDatabase({0})'.format(pathSql)} # set output back to default resetCommand = {'command': 'SetResponseFormat()'} # set xml response format, if this fails then don't bother with the rest request = self._send_to_kodi(xmlCommand, host) if not request: return False sqlXML = self._send_to_kodi(sqlCommand, host) request = self._send_to_kodi(resetCommand, host) if not sqlXML: logger.log("Invalid response for " + showName + " on " + host, logger.DEBUG) return False encSqlXML = urllib.parse.quote(sqlXML, ':\\/<>') try: et = etree.fromstring(encSqlXML) except SyntaxError as e: logger.log("Unable to parse XML returned from KODI: " + ex(e), logger.ERROR) return False paths = et.findall('.//field') if not paths: logger.log( "No valid paths found for " + showName + " on " + host, logger.DEBUG) return False for path in paths: # we do not need it double-encoded, gawd this is dumb unEncPath = urllib.parse.unquote(path.text).decode( sickbeard.SYS_ENCODING) logger.log( "KODI Updating " + showName + " on " + host + " at " + unEncPath, logger.DEBUG) updateCommand = { 'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video, {0})'.format(unEncPath) } request = self._send_to_kodi(updateCommand, host) if not request: logger.log( "Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.WARNING) return False # sleep for a few seconds just to be sure kodi has a chance to finish each directory if len(paths) > 1: time.sleep(5) # do a full update if requested else: logger.log("Doing Full Library KODI update on host: " + host, logger.DEBUG) updateCommand = { 'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video)' } request = self._send_to_kodi(updateCommand, host) if not request: logger.log("KODI Full Library update failed on: " + host, logger.WARNING) return False return True
def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KODI"): # pylint: disable=too-many-arguments """Handles communication to KODI servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the KODI API via HTTP host: KODI webserver host:port username: KODI webserver username password: KODI webserver password Returns: Returns response.result for successful commands or False if there was an error """ # fill in omitted parameters if not username: username = sickbeard.KODI_USERNAME if not password: password = sickbeard.KODI_PASSWORD if not host: logger.log('No {0} host passed, aborting update'.format(dest_app), logger.WARNING) return False for key in command: if isinstance(command[key], six.text_type): command[key] = command[key].encode('utf-8') enc_command = urllib.parse.urlencode(command) logger.log( "{0} encoded API command: {1!r}".format(dest_app, enc_command), logger.DEBUG) # url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) # maybe need for old plex? url = 'http://{0}/kodiCmds/kodiHttp/?{1}'.format(host, enc_command) try: req = urllib.request.Request(url) # if we have a password, use authentication if password: base64string = base64.encodestring('{0}:{1}'.format( username, password))[:-1] authheader = "Basic {0}".format(base64string) req.add_header("Authorization", authheader) logger.log( "Contacting {0} (with auth header) via url: {1}".format( dest_app, ss(url)), logger.DEBUG) else: logger.log( "Contacting {0} via url: {1}".format(dest_app, ss(url)), logger.DEBUG) try: response = urllib.request.urlopen(req) except (http_client.BadStatusLine, urllib.error.URLError) as e: logger.log( "Couldn't contact {0} HTTP at {1!r} : {2!r}".format( dest_app, url, ex(e)), logger.DEBUG) return False result = response.read().decode(sickbeard.SYS_ENCODING) response.close() logger.log( "{0} HTTP response: {1}".format(dest_app, result.replace('\n', '')), logger.DEBUG) return result except Exception as e: logger.log( "Couldn't contact {0} HTTP at {1!r} : {2!r}".format( dest_app, url, ex(e)), logger.DEBUG) return False
def searchForNeededEpisodes(): """ Check providers for details on wanted episodes :return: episodes we have a search hit for """ foundResults = {} didSearch = False show_list = sickbeard.showList fromDate = datetime.date.fromordinal(1) episodes = [] for curShow in show_list: if not curShow.paused: sickbeard.name_cache.buildNameCache(curShow) episodes.extend(wantedEpisodes(curShow, fromDate)) if not episodes: # nothing wanted so early out, ie: avoid whatever abritrarily # complex thing a provider cache update entails, for example, # reading rss feeds logger.log("No episodes needed.", logger.INFO) return foundResults.values() origThreadName = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList( sickbeard.RANDOMIZE_PROVIDERS) if x.is_active and x.enable_daily and x.can_daily ] for curProvider in providers: threading.currentThread( ).name = origThreadName + " :: [" + curProvider.name + "]" curProvider.cache.update_cache() for curProvider in providers: threading.currentThread( ).name = origThreadName + " :: [" + curProvider.name + "]" try: curFoundResults = curProvider.search_rss(episodes) except AuthException as e: logger.log("Authentication error: " + ex(e), logger.WARNING) continue except Exception as e: logger.log( "Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue didSearch = True # pick a single result for each episode, respecting existing results for curEp in curFoundResults: if not curEp.show or curEp.show.paused: logger.log( "Skipping {0} because the show is paused ".format( curEp.pretty_name()), logger.DEBUG) continue bestResult = pickBestResult(curFoundResults[curEp], curEp.show) # if all results were rejected move on to the next episode if not bestResult: logger.log( "All found results for " + curEp.pretty_name() + " were rejected.", logger.DEBUG) continue # if it's already in the list (from another provider) and the newly found quality is no better then skip it if curEp in foundResults and bestResult.quality <= foundResults[ curEp].quality: continue foundResults[curEp] = bestResult threading.currentThread().name = origThreadName if not didSearch: logger.log( "No NZB/Torrent providers found or enabled in the sickchill config for daily searches. Please check your settings.", logger.INFO) return foundResults.values()
def _ep_data(self, ep_obj): """ Creates an elementTree XML structure for a MediaBrowser style episode.xml and returns the resulting data object. show_obj: a TVShow instance to create the NFO for """ eps_to_write = [ep_obj] + ep_obj.relatedEps persons_dict = { 'Director': [], 'GuestStar': [], 'Writer': [] } indexer_lang = ep_obj.show.lang try: lINDEXER_API_PARMS = sickbeard.indexerApi(ep_obj.show.indexer).api_params.copy() lINDEXER_API_PARMS['actors'] = True lINDEXER_API_PARMS['language'] = indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE if ep_obj.show.dvdorder: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) myShow = t[ep_obj.show.indexerid] except sickbeard.indexer_shownotfound as e: raise ShowNotFoundException(e.message) except sickbeard.indexer_error as e: logger.log("Unable to connect to " + sickbeard.indexerApi( ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) return False rootNode = etree.Element("Item") # write an MediaBrowser XML containing info for all matching episodes for curEpToWrite in eps_to_write: try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): logger.log("Metadata writer is unable to find episode {0:d}x{1:d} of {2} on {3}..." "has it been removed? Should I delete from db?".format( curEpToWrite.season, curEpToWrite.episode, curEpToWrite.show.name, sickbeard.indexerApi(ep_obj.show.indexer).name)) return None if curEpToWrite == ep_obj: # root (or single) episode # default to today's date for specials if firstaired is not set if ep_obj.season == 0 and not getattr(myEp, 'firstaired', None): myEp['firstaired'] = str(datetime.date.fromordinal(1)) if not (getattr(myEp, 'episodename', None) and getattr(myEp, 'firstaired', None)): return None episode = rootNode if curEpToWrite.name: EpisodeName = etree.SubElement(episode, "EpisodeName") EpisodeName.text = curEpToWrite.name EpisodeNumber = etree.SubElement(episode, "EpisodeNumber") EpisodeNumber.text = str(ep_obj.episode) if ep_obj.relatedEps: EpisodeNumberEnd = etree.SubElement(episode, "EpisodeNumberEnd") EpisodeNumberEnd.text = str(curEpToWrite.episode) SeasonNumber = etree.SubElement(episode, "SeasonNumber") SeasonNumber.text = str(curEpToWrite.season) if not ep_obj.relatedEps and getattr(myEp, 'absolute_number', None): absolute_number = etree.SubElement(episode, "absolute_number") absolute_number.text = str(myEp['absolute_number']) if curEpToWrite.airdate != datetime.date.fromordinal(1): FirstAired = etree.SubElement(episode, "FirstAired") FirstAired.text = str(curEpToWrite.airdate) MetadataType = etree.SubElement(episode, "Type") MetadataType.text = "Episode" if curEpToWrite.description: Overview = etree.SubElement(episode, "Overview") Overview.text = curEpToWrite.description if not ep_obj.relatedEps: if getattr(myEp, 'rating', None): Rating = etree.SubElement(episode, "Rating") Rating.text = myEp['rating'] if getattr(myShow, 'imdb_id', None): IMDB_ID = etree.SubElement(episode, "IMDB_ID") IMDB_ID.text = myShow['imdb_id'] IMDB = etree.SubElement(episode, "IMDB") IMDB.text = myShow['imdb_id'] IMDbId = etree.SubElement(episode, "IMDbId") IMDbId.text = myShow['imdb_id'] indexerid = etree.SubElement(episode, "id") indexerid.text = str(curEpToWrite.indexerid) Persons = etree.SubElement(episode, "Persons") if getattr(myShow, '_actors', None): for actor in myShow['_actors']: if not ('name' in actor and actor['name'].strip()): continue cur_actor = etree.SubElement(Persons, "Person") cur_actor_name = etree.SubElement(cur_actor, "Name") cur_actor_name.text = actor['name'].strip() cur_actor_type = etree.SubElement(cur_actor, "Type") cur_actor_type.text = "Actor" if 'role' in actor and actor['role'].strip(): cur_actor_role = etree.SubElement(cur_actor, "Role") cur_actor_role.text = actor['role'].strip() Language = etree.SubElement(episode, "Language") try: Language.text = myEp['language'] except Exception: Language.text = sickbeard.INDEXER_DEFAULT_LANGUAGE # tvrage api doesn't provide language so we must assume a value here thumb = etree.SubElement(episode, "filename") # TODO: See what this is needed for.. if its still needed # just write this to the NFO regardless of whether it actually exists or not # note: renaming files after nfo generation will break this, tough luck thumb_text = self.get_episode_thumb_path(ep_obj) if thumb_text: thumb.text = thumb_text else: # append data from (if any) related episodes EpisodeNumberEnd.text = str(curEpToWrite.episode) if curEpToWrite.name: if not EpisodeName.text: EpisodeName.text = curEpToWrite.name else: EpisodeName.text = EpisodeName.text + ", " + curEpToWrite.name if curEpToWrite.description: if not Overview.text: Overview.text = curEpToWrite.description else: Overview.text = Overview.text + "\r" + curEpToWrite.description # collect all directors, guest stars and writers if getattr(myEp, 'director', None): persons_dict['Director'] += [x.strip() for x in myEp['director'].split('|') if x.strip()] if getattr(myEp, 'gueststars', None): persons_dict['GuestStar'] += [x.strip() for x in myEp['gueststars'].split('|') if x.strip()] if getattr(myEp, 'writer', None): persons_dict['Writer'] += [x.strip() for x in myEp['writer'].split('|') if x.strip()] # fill in Persons section with collected directors, guest starts and writers for person_type, names in six.iteritems(persons_dict): # remove doubles names = list(set(names)) for cur_name in names: Person = etree.SubElement(Persons, "Person") cur_person_name = etree.SubElement(Person, "Name") cur_person_name.text = cur_name cur_person_type = etree.SubElement(Person, "Type") cur_person_type.text = person_type helpers.indentXML(rootNode) data = etree.ElementTree(rootNode) return data