def action_error(e): if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: logger.log(u'DB error: ' + ex(e), logger.WARNING) time.sleep(1) return True logger.log(u'DB error: ' + ex(e), logger.ERROR)
def set_up_anidb_connection(): if not sickbeard.USE_ANIDB: logger.log(u'Usage of anidb disabled. Skipping', logger.DEBUG) return False if not sickbeard.ANIDB_USERNAME and not sickbeard.ANIDB_PASSWORD: logger.log( u'anidb username and/or password are not set. Aborting anidb lookup.', logger.DEBUG) return False if not sickbeard.ADBA_CONNECTION: # anidb_logger = (lambda x: logger.log('ANIDB: ' + str(x)), logger.DEBUG) sickbeard.ADBA_CONNECTION = adba.Connection( keepAlive=True) # , log=anidb_logger) auth = False try: auth = sickbeard.ADBA_CONNECTION.authed() except (BaseException, Exception) as e: logger.log(u'exception msg: ' + ex(e)) pass if not auth: try: sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD) except (BaseException, Exception) as e: logger.log(u'exception msg: ' + ex(e)) return False else: return True return sickbeard.ADBA_CONNECTION.authed()
def _notify(self, title, body, access_token=None, sound=None, **kwargs): """ Sends a boxcar2 notification to the address provided title: The title of the message body: The message to send access_token: To send to this device sound: Sound profile to use returns: True if the message succeeded, False otherwise """ access_token = self._choose(access_token, sickbeard.BOXCAR2_ACCESSTOKEN) sound = self._choose(sound, sickbeard.BOXCAR2_SOUND) # build up the URL and parameters # more info goes here - # https://boxcar.uservoice.com/knowledgebase/articles/306788-how-to-send-your-boxcar-account-a-notification body = body.strip().encode('utf-8') data = urlencode({ 'user_credentials': access_token, 'notification[title]': '%s - %s' % (title, body), 'notification[long_message]': body, 'notification[sound]': sound, 'notification[source_name]': 'SickGear', 'notification[icon_url]': self._sg_logo_url }) # send the request to boxcar2 result = None try: req = urllib.request.Request('https://new.boxcar.io/api/notifications') http_response_obj = urllib.request.urlopen(req, data) # PY2 http_response_obj has no `with` context manager http_response_obj.close() except urllib.error.HTTPError as e: if not hasattr(e, 'code'): self._log_error(u'Notification failed: %s' % ex(e)) else: result = 'Notification failed. Error code: %s' % e.code self._log_error(result) if 503 == e.code: result = 'Server too busy to handle the request at this time' self._log_warning(result) else: if 404 == e.code: result = 'Access token is wrong/not associated to a device' self._log_error(result) elif 401 == e.code: result = 'Access token not recognized' self._log_error(result) elif 400 == e.code: result = 'Wrong data sent to Boxcar' self._log_error(result) except urllib.error.URLError as e: self._log_error(u'Notification failed: %s' % ex(e)) return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))
def fileQuality(filename): """ :param filename: filename :type filename: AnyStr :return: :rtype: int """ # noinspection PyPep8Naming import encodingKludge as ek from exceptions_helper import ex from sickbeard import logger if ek.ek(os.path.isfile, filename): from hachoir.parser import createParser from hachoir.metadata import extractMetadata from hachoir.stream import InputStreamError parser = height = None msg = 'Hachoir can\'t parse file "%s" content quality because it found error: %s' try: parser = ek.ek(createParser, filename) except InputStreamError as e: logger.log(msg % (filename, ex(e)), logger.WARNING) except (BaseException, Exception) as e: logger.log(msg % (filename, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) if parser: extract = None try: args = ({}, {'scan_index': False})['.avi' == filename[-4::].lower()] parser.parse_exif = False parser.parse_photoshop_content = False parser.parse_comments = False extract = extractMetadata(parser, **args) except (BaseException, Exception) as e: logger.log(msg % (filename, ex(e)), logger.WARNING) if extract: try: height = extract.get('height') except (AttributeError, ValueError): try: for metadata in extract.iterGroups(): if re.search('(?i)video', metadata.header): height = metadata.get('height') break except (AttributeError, ValueError): pass # noinspection PyProtectedMember parser.stream._input.close() tolerance = (lambda value, percent: int(round(value - (value * percent / 100.0)))) if height >= tolerance(352, 5): if height <= tolerance(720, 2): return Quality.SDTV return (Quality.HDTV, Quality.FULLHDTV)[height >= tolerance(1080, 1)] return Quality.UNKNOWN
def make_dirs(path, syno=True): """ Creates any folders that are missing and assigns them the permissions of their parents :param path: path :type path: AnyStr :param syno: :type syno: bool :return: success :rtype: bool """ if not ek.ek(os.path.isdir, path): # Windows, create all missing folders if os.name in ('nt', 'ce'): try: logger.debug(u'Path %s doesn\'t exist, creating it' % path) ek.ek(os.makedirs, path) except (OSError, IOError) as e: logger.error(u'Failed creating %s : %s' % (path, ex(e))) return False # not Windows, create all missing folders and set permissions else: sofar = '' folder_list = path.split(os.path.sep) # look through each sub folder and make sure they all exist for cur_folder in folder_list: sofar += cur_folder + os.path.sep # if it exists then just keep walking down the line if ek.ek(os.path.isdir, sofar): continue try: logger.debug(u'Path %s doesn\'t exist, creating it' % sofar) ek.ek(os.mkdir, sofar) # use normpath to remove end separator, otherwise checks permissions against itself chmod_as_parent(ek.ek(os.path.normpath, sofar)) # todo: reenable if syno: # do the library update for synoindex NOTIFIERS.NotifierFactory().get('SYNOINDEX').addFolder( sofar) except (OSError, IOError) as e: logger.error(u'Failed creating %s : %s' % (sofar, ex(e))) return False return True
def _search_provider(cur_provider, provider_propers, aired_since_shows, recent_shows, recent_anime): # type: (GenericProvider, List, datetime.datetime, List[Tuple[int, int]], List[Tuple[int, int]]) -> None try: # we need to extent the referenced list from parameter to update the original var provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime)) except AuthException as e: logger.log('Authentication error: %s' % ex(e), logger.ERROR) except (BaseException, Exception) as e: logger.log('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) if not provider_propers: logger.log('No Proper releases found at [%s]' % cur_provider.name)
def response(self): try: response = requests.get(str(self)) rjson = response.json() image_type = self._types or u'showbackground' rhtml = self.scrape_web(image_type) if not isinstance(rjson, dict) and 0 == len(rhtml[image_type]): raise Exception(response.text) if not isinstance(rjson, dict): rjson = {image_type: []} if 0 != len(rhtml[image_type]): rjson_ids = map(lambda i: i['id'], rjson[image_type]) for item in filter(lambda i: i['id'] not in rjson_ids, rhtml[image_type]): rjson[image_type] += [item] for item in rjson[image_type]: item['lang'] = item.get('lang', '').lower() if item.get('lang') in ( '00', '' ): # adjust data of no language to a default 'en (default)' item['lang'] = u'en (default)' return rjson except (BaseException, Exception) as e: raise ResponseFanartError(ex(e))
def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtp_debug=False): use_tls = 1 == sickbeard.helpers.try_int(use_tls) login = any(user) and any(pwd) self._log_debug(u'Sendmail HOST: %s; PORT: %s; LOGIN: %s, TLS: %s, USER: %s, FROM: %s, TO: %s' % ( host, port, login, use_tls, user, smtp_from, to)) try: srv = smtplib.SMTP(host, int(port)) if smtp_debug: srv.set_debuglevel(1) if use_tls or login: srv.ehlo() self._log_debug(u'Sent initial EHLO command') if use_tls: srv.starttls() srv.ehlo() self._log_debug(u'Sent STARTTLS and EHLO command') if login: srv.login(user, pwd) self._log_debug(u'Sent LOGIN command') srv.sendmail(smtp_from, to, msg.as_string()) srv.quit() except (BaseException, Exception) as e: self.last_err = '%s' % ex(e) return False return True
def get_feed(self, url, **kwargs): if self.provider: success, err_msg = self.provider.check_auth_cookie() if not success: return response = self.provider.get_url(url, **kwargs) if not self.provider.should_skip() and response: try: data = feedparser.parse(response) data['rq_response'] = self.provider.session.response if data and 'entries' in data: return data if data and 'error' in data.feed: err_code = data.feed['error']['code'] err_desc = data.feed['error']['description'] logger.log( u'RSS error:[%s] code:[%s]' % (err_desc, err_code), logger.DEBUG) else: logger.log(u'RSS error loading url: ' + url, logger.DEBUG) except (BaseException, Exception) as e: logger.log(u'RSS error: ' + ex(e), logger.DEBUG)
def compile_word_list( lookup_words, # type: AnyStr re_prefix=r'(^|[\W_])', # type: AnyStr re_suffix=r'($|[\W_])' # type: AnyStr ): # type: (...) -> List[AnyStr] result = [] if lookup_words: search_raw = isinstance(lookup_words, list) if not search_raw: search_raw = not lookup_words.startswith('regex:') lookup_words = lookup_words[(6, 0)[search_raw]:].split(',') lookup_words = [x.strip() for x in lookup_words] for word in [x for x in lookup_words if x]: try: # !0 == regex and subject = s / 'what\'s the "time"' / what\'s\ the\ \"time\" subject = search_raw and re.escape(word) or re.sub( r'([\" \'])', r'\\\1', word) result.append( re.compile('(?i)%s%s%s' % (re_prefix, subject, re_suffix))) except re.error as e: logger.log( u'Failure to compile filter expression: %s ... Reason: %s' % (word, ex(e)), logger.DEBUG) diff = len(lookup_words) - len(result) if diff: logger.log( u'From %s expressions, %s was discarded during compilation' % (len(lookup_words), diff), logger.DEBUG) return result
def push_anidb_mylist(filepath, anidb_episode): # type: (AnyStr, Any) -> Tuple[Optional[bool], Optional[Tuple[AnyStr, int]]] """ :param filepath: file path :type filepath: AnyStr :param anidb_episode: :type anidb_episode: :return """ result, log = None, None if set_up_anidb_connection(): if not anidb_episode: # seems like we could parse the name before, build the anidb object # build an anidb episode anidb_episode = adba.Episode( sickbeard.ADBA_CONNECTION, filePath=filepath, paramsF=['quality', 'anidb_file_name', 'crc32'], paramsA=[ 'epno', 'english_name', 'short_name_list', 'other_name', 'synonym_list' ]) try: anidb_episode.add_to_mylist( state=1 ) # status = 1 sets the status of the file to "internal HDD" log = ('Adding the file to the anidb mylist', logger.DEBUG) result = True except (BaseException, Exception) as e: log = (u'exception msg: %s' % ex(e), logger.MESSAGE) result = False return result, log
def action(self, query, args=None): # type: (AnyStr, Optional[List, Tuple]) -> Optional[Union[List, sqlite3.Cursor]] with db_lock: if None is query: return sqlResult = None attempt = 0 while 5 > attempt: try: if None is args: logger.log(self.filename + ': ' + query, logger.DB) sqlResult = self.connection.execute(query) else: logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB) sqlResult = self.connection.execute(query, args) self.connection.commit() # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError as e: if not self.action_error(e): raise attempt += 1 except sqlite3.DatabaseError as e: logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise return sqlResult
def updateCache(self, **kwargs): try: self._checkAuth() except AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) return [] if self.should_update(): data = self._cache_data(**kwargs) # clear cache if data: self._clearCache() # parse data cl = [] for item in data or []: title, url = self._title_and_url(item) ci = self._parseItem(title, url) if None is not ci: cl.append(ci) if 0 < len(cl): my_db = self.get_db() try: my_db.mass_action(cl) except (BaseException, Exception) as e: logger.log('Warning could not save cache value [%s], caught err: %s' % (cl, ex(e))) # set updated as time the attempt to fetch data is self.setLastUpdate()
def extractMetadata(parser, quality=QUALITY_NORMAL, **kwargs): """ Create a Metadata class from a parser. Returns None if no metadata extractor does exist for the parser class. """ try: extractor = extractors[parser.__class__] except KeyError: return None metadata = extractor(quality) meta_extract_error = True try: if 'scan_index' in kwargs: metadata.extract(parser, scan_index=kwargs['scan_index']) else: metadata.extract(parser) meta_extract_error = False except (BaseException, Exception) as err: error("Error during metadata extraction: %s" % ex(err)) if meta_extract_error: try: parser.stream._input.close() except (BaseException, Exception): pass return None if metadata: metadata.mime_type = parser.mime_type metadata.endian = endian_name[parser.endian] return metadata
def write_file( filepath, # type: AnyStr data, # type: Union[AnyStr, etree.Element, requests.Response] raw=False, # type: bool xmltree=False, # type: bool utf8=False, # type: bool raise_exceptions=False # type: bool ): # type: (...) -> bool """ :param filepath: filepath :param data: data to write :param raw: write binary or text :param xmltree: use xmel tree :param utf8: use UTF8 :param raise_exceptions: raise excepitons :return: succuess """ result = False if make_dirs(ek.ek(os.path.dirname, filepath), False): try: if raw: with ek.ek(io.FileIO, filepath, 'wb') as fh: for chunk in data.iter_content(chunk_size=1024): if chunk: fh.write(chunk) fh.flush() ek.ek(os.fsync, fh.fileno()) else: w_mode = 'w' if utf8: w_mode = 'a' with ek.ek(io.FileIO, filepath, 'wb') as fh: fh.write(codecs.BOM_UTF8) if xmltree: with ek.ek(io.FileIO, filepath, w_mode) as fh: if utf8: data.write(fh, encoding='utf-8') else: data.write(fh) else: if isinstance(data, text_type): with ek.ek(io.open, filepath, w_mode, encoding='utf-8') as fh: fh.write(data) else: with ek.ek(io.FileIO, filepath, w_mode) as fh: fh.write(data) chmod_as_parent(filepath) result = True except (EnvironmentError, IOError) as e: logger.error('Unable to write file %s : %s' % (filepath, ex(e))) if raise_exceptions: raise e return result
def _cmdline_run(self, synoindex_cmd): self._log_debug(u'Executing command ' + str(synoindex_cmd)) self._log_debug(u'Absolute path to command: ' + ek.ek(os.path.abspath, synoindex_cmd[0])) try: from sickbeard.helpers import cmdline_runner output, err, exit_status = cmdline_runner(synoindex_cmd) self._log_debug(u'Script result: %s' % output) except (BaseException, Exception) as e: self._log_error('Unable to run synoindex: %s' % ex(e))
def execute_events(self, event_type, *args, **kwargs): # type: (int, Tuple, Dict) -> None if event_type in self.events: for event in self.events.get(event_type): try: event(*args, **kwargs) except (BaseException, Exception) as e: logger.log('Error executing Event: %s' % ex(e), logger.ERROR)
def pull_anidb_groups(show_name): # type: (AnyStr) -> Optional[bool, List] if set_up_anidb_connection(): try: anime = create_anidb_obj(name=show_name) return anime.get_groups() except (BaseException, Exception) as e: logger.log(u'Anidb exception: %s' % ex(e), logger.DEBUG) return False
def __init__(self): db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: if not self.hasTable('lastUpdate'): self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except (BaseException, Exception) as e: if ex(e) != 'table lastUpdate already exists': raise e
def _notify(self, title, body, **kwargs): synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, body] self._log(u'Executing command ' + str(synodsmnotify_cmd)) self._log_debug(u'Absolute path to command: ' + ek.ek(os.path.abspath, synodsmnotify_cmd[0])) try: from sickbeard.helpers import cmdline_runner output, err, exit_status = cmdline_runner(synodsmnotify_cmd) self._log_debug(u'Script result: %s' % output) except (BaseException, Exception) as e: self._log('Unable to run synodsmnotify: %s' % ex(e))
def _send_to_xbmc(self, command, host=None, username=None, password=None): """Handles communication to XBMC servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the XBMC API via HTTP host: XBMC webserver host:port username: XBMC webserver username password: XBMC webserver password Returns: Returns response.result for successful commands or False if there was an error """ if not host: self._log_debug(u'No host passed, aborting update') return False username = self._choose(username, sickbeard.XBMC_USERNAME) password = self._choose(password, sickbeard.XBMC_PASSWORD) for key in command: if not PY2 or type(command[key]) == text_type: command[key] = command[key].encode('utf-8') enc_command = urlencode(command) self._log_debug(u'Encoded API command: ' + enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) try: req = urllib.request.Request(url) # if we have a password, use authentication if password: req.add_header( 'Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) self._log_debug(u'Contacting (with auth header) via url: ' + fixStupidEncodings(url)) else: self._log_debug(u'Contacting via url: ' + fixStupidEncodings(url)) http_response_obj = urllib.request.urlopen( req) # PY2 http_response_obj has no `with` context manager result = decode_str(http_response_obj.read(), sickbeard.SYS_ENCODING) http_response_obj.close() self._log_debug(u'HTTP response: ' + result.replace('\n', '')) return result except (urllib.error.URLError, IOError) as e: self._log_warning(u'Couldn\'t contact HTTP at %s %s' % (fixStupidEncodings(url), ex(e))) return False
def remove_event(self, event_type, method): # type: (int, Callable) -> None if isinstance(event_type, integer_types) and callable(method): if event_type in self.events and method in self.events[event_type]: try: self.events[event_type].remove(method) if 0 == len(self.events[event_type]): del self.events[event_type] except (BaseException, Exception) as e: logger.log( 'Error removing event method from queue: %s' % ex(e), logger.ERROR)
def create_show_metadata(self, show_obj, force=False): # type: (sickbeard.tv.TVShow, bool) -> bool result = False if self.show_metadata and show_obj and (not self._has_show_metadata(show_obj) or force): logger.log('Metadata provider %s creating show metadata for %s' % (self.name, show_obj.name), logger.DEBUG) try: result = self.write_show_file(show_obj) except BaseTVinfoError as e: logger.log('Unable to find useful show metadata for %s on %s: %s' % ( self.name, sickbeard.TVInfoAPI(show_obj.tvid).name, ex(e)), logger.WARNING) return result
def backup_db(self, target, backup_filename=None): # type: (AnyStr, AnyStr) -> Tuple[bool, AnyStr] """ backups the db ot target dir + optional filename Availability: SQLite 3.6.11 or higher New in version 3.7 :param target: target dir :param backup_filename: optional backup filename (default is the source name) :return: success, message """ if not db_supports_backup: logger.log('this python sqlite3 version doesn\'t support backups', logger.DEBUG) return False, 'this python sqlite3 version doesn\'t support backups' if not ek.ek(os.path.isdir, target): logger.log('Backup target invalid', logger.ERROR) return False, 'Backup target invalid' target_db = ek.ek(os.path.join, target, (backup_filename, self.filename)[None is backup_filename]) if ek.ek(os.path.exists, target_db): logger.log('Backup target file already exists', logger.ERROR) return False, 'Backup target file already exists' def progress(status, remaining, total): logger.log('Copied %s of %s pages...' % (total - remaining, total), logger.DEBUG) backup_con = None try: # copy into this DB backup_con = sqlite3.connect(target_db, 20) with backup_con: with db_lock: self.connection.backup(backup_con, progress=progress) logger.log('%s backup successful' % self.filename, logger.DEBUG) except sqlite3.Error as error: logger.log("Error while taking backup: %s" % ex(error), logger.ERROR) return False, 'Backup failed' finally: if backup_con: try: backup_con.close() except (BaseException, Exception): pass return True, 'Backup successful'
def create_episode_metadata(self, ep_obj, force=False): # type: (sickbeard.tv.TVEpisode, bool) -> bool result = False if self.episode_metadata and ep_obj and (not self.has_episode_metadata(ep_obj) or force): logger.log('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name()), logger.DEBUG) try: result = self.write_ep_file(ep_obj) except BaseTVinfoError as e: logger.log('Unable to find useful episode metadata for %s on %s: %s' % ( self.name, sickbeard.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.WARNING) return result
def mass_action(self, querylist, log_transaction=False): # type: (List[Union[List[AnyStr], Tuple[AnyStr, List], Tuple[AnyStr]]], bool) -> Optional[List, sqlite3.Cursor] from . import helpers with db_lock: if None is querylist: return sqlResult = [] attempt = 0 while 5 > attempt: try: affected = 0 for qu in querylist: cursor = self.connection.cursor() if 1 == len(qu): if log_transaction: logger.log(qu[0], logger.DB) sqlResult.append(cursor.execute(qu[0]).fetchall()) elif 1 < len(qu): if log_transaction: logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DB) sqlResult.append( cursor.execute(qu[0], qu[1]).fetchall()) affected += cursor.rowcount self.connection.commit() if 0 < affected: logger.log( u'Transaction with %s queries executed affected %i row%s' % (len(querylist), affected, helpers.maybe_plural(affected)), logger.DEBUG) return sqlResult except sqlite3.OperationalError as e: sqlResult = [] if self.connection: self.connection.rollback() if not self.action_error(e): raise attempt += 1 except sqlite3.DatabaseError as e: if self.connection: self.connection.rollback() logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise return sqlResult
def saveNZB(nzb_name, nzb_string): """ :param nzb_name: nzb name :type nzb_name: AnyStr :param nzb_string: nzb string :type nzb_string: AnyStr """ try: with ek.ek(open, nzb_name + '.nzb', 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as e: logger.log(u'Unable to save NZB: ' + ex(e), logger.ERROR)
def _notify(self, title, body, name=None, host=None, password=None, **kwargs): name = name or title or 'SickGear Notification' hosts = [ h.strip() for h in self._choose(host, sickbeard.GROWL_HOST).split(',') ] growl_hosts = [] host_re = re.compile( r'^(?:(?P<password>[^@]+?)@)?(?P<host>[^:]+?)(?::(?P<port>\d+))?$') for h in hosts: host_parts = host_re.match(h) if host_parts: host, port, password = host_parts.group( 'host'), host_parts.group('port'), host_parts.group( 'password') if host: growl_hosts += [(host, 23053 if not port else int(port), None if not password else password)] opts = dict(title=title, name=name, app='SickGear', sticky=None, priority=None, icon=True, debug=False) success = False for pc in growl_hosts: opts['host'] = pc[0] opts['port'] = pc[1] opts['password'] = pc[2] try: if self._send_growl_msg(opts, body): success = True except (BaseException, Exception) as e: self._log_warning(u'Unable to send growl to %s:%s - %s' % (opts['host'], opts['port'], ex(e))) return success
def _send_to_plex(self, command, host, username=None, password=None): """Handles communication to Plex hosts via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the legacy xbmcCmds HTTP API host: Plex host:port username: Plex API username password: Plex API password Returns: Returns True for successful commands or False if there was an error """ if not host: self._log_error(u'No host specified, check your settings') return False for key in command: if not PY2 or type(command[key]) == text_type: command[key] = command[key].encode('utf-8') enc_command = urlencode(command) self._log_debug(u'Encoded API command: ' + enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) try: req = urllib.request.Request(url) if password: req.add_header( 'Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) self._log_debug(u'Contacting (with auth header) via url: ' + url) else: self._log_debug(u'Contacting via url: ' + url) http_response_obj = urllib.request.urlopen( req) # PY2 http_response_obj has no `with` context manager result = decode_str(http_response_obj.read(), sickbeard.SYS_ENCODING) http_response_obj.close() self._log_debug(u'HTTP response: ' + result.replace('\n', '')) return True except (urllib.error.URLError, IOError) as e: self._log_warning(u'Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e)) return False
def folders_at_path(path, include_parent=False, include_files=False): """ Returns a list of dictionaries with the folders contained at the given path Give the empty string as the path to list the contents of the root path under Unix this means "/", on Windows this will be a list of drive letters) """ # walk up the tree until we find a valid path while path and not ek.ek(os.path.isdir, path): if path == ek.ek(os.path.dirname, path): path = '' break else: path = ek.ek(os.path.dirname, path) if '' == path: if 'nt' == os.name: entries = [{'currentPath': r'\My Computer'}] for letter in getWinDrives(): letter_path = '%s:\\' % letter entries.append({'name': letter_path, 'path': letter_path}) return entries else: path = '/' # fix up the path and find the parent path = ek.ek(os.path.abspath, ek.ek(os.path.normpath, path)) parent_path = ek.ek(os.path.dirname, path) # if we're at the root then the next step is the meta-node showing our drive letters if 'nt' == os.name and path == parent_path: parent_path = '' try: file_list = get_file_list(path, include_files) except OSError as e: logger.log('Unable to open %s: %r / %s' % (path, e, ex(e)), logger.WARNING) file_list = get_file_list(parent_path, include_files) file_list = sorted( file_list, key=lambda x: ek.ek(os.path.basename, x['name']).lower()) entries = [{'currentPath': path}] if include_parent and path != parent_path: entries.append({'name': '..', 'path': parent_path}) entries.extend(file_list) return entries