def delete_files(self, path, files, force=False): """ Remove files from filesystem. :param path: path to process :param files: files we want to delete :param force: Boolean, force deletion, defaults to false """ if not files: return if not self.result and force: self.log_and_output( 'Forcing deletion of files, even though last result was not successful.', level=logging.DEBUG) elif not self.result: return # Delete all file not needed for cur_file in files: cur_file_path = os.path.join(path, cur_file) if not os.path.isfile(cur_file_path): continue # Prevent error when a notwantedfiles is an associated files self.log_and_output('Deleting file: {cur_file}', level=logging.DEBUG, **{'cur_file': cur_file}) # check first the read-only attribute file_attribute = os.stat(cur_file_path)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable self.log_and_output( 'Changing read-only flag for file: {cur_file}', level=logging.DEBUG, **{'cur_file': cur_file}) try: os.chmod(cur_file_path, stat.S_IWRITE) except OSError as error: self.log_and_output( 'Cannot change permissions of {cur_file_path}: {error}', level=logging.DEBUG, **{ 'cur_file_path': cur_file_path, 'error': ex(error) }) try: os.remove(cur_file_path) except OSError as error: self.log_and_output( 'Unable to delete file {cur_file}: {error}', level=logging.DEBUG, **{ 'cur_file': cur_file, 'error': ex(error) })
def validate_rss(self): """Validate if RSS.""" try: add_cookie = self.add_cookies_from_ui() if not add_cookie.get('result'): return add_cookie data = self.cache._get_rss_data()['entries'] if not data: return { 'result': False, 'message': 'No items found in the RSS feed {0}'.format(self.url) } title, url = self._get_title_and_url(data[0]) if not title: return { 'result': False, 'message': 'Unable to get title from first item' } if not url: return { 'result': False, 'message': 'Unable to get torrent url from first item' } if url.startswith('magnet:') and re.search( r'urn:btih:([\w]{32,40})', url): return {'result': True, 'message': 'RSS feed Parsed correctly'} else: torrent_file = self.session.get_content(url) try: # `bencodepy` is monkeypatched in `medusa.init` BENCODE.decode(torrent_file, allow_extra_data=True) except Exception as error: self.dump_html(torrent_file) return { 'result': False, 'message': 'Torrent link is not a valid torrent file: {0}'.format( ex(error)) } return {'result': True, 'message': 'RSS feed Parsed correctly'} except Exception as error: return { 'result': False, 'message': 'Error when trying to load RSS: {0}'.format(ex(error)) }
def mass_action(self, querylist=None, logTransaction=False, fetchall=False): """ Execute multiple queries :param querylist: list of queries :param logTransaction: Boolean to wrap all in one transaction :param fetchall: Boolean, when using a select query force returning all results :return: list of results """ # Remove Falsey types querylist = (q for q in querylist or [] if q) sql_results = [] attempt = 0 with db_locks[self.filename]: self._set_row_factory() while attempt < 5: try: for qu in querylist: if len(qu) == 1: if logTransaction: logger.log(qu[0], logger.DEBUG) sql_results.append(self._execute(qu[0], fetchall=fetchall)) elif len(qu) > 1: if logTransaction: logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG) sql_results.append(self._execute(qu[0], qu[1], fetchall=fetchall)) self.connection.commit() logger.log(u"Transaction with " + str(len(sql_results)) + u" queries executed", logger.DEBUG) # finished break except sqlite3.OperationalError as e: sql_results = [] if self.connection: self.connection.rollback() if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: logger.log(u"DB error: " + ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: logger.log(u"DB error: " + ex(e), logger.ERROR) raise except sqlite3.DatabaseError as e: sql_results = [] if self.connection: self.connection.rollback() logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR) raise # time.sleep(0.02) return sql_results
def delete_folder(folder, check_empty=True): """ Remove a folder from the filesystem. :param folder: Path to folder to remove :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True :return: True on success, False on failure """ # check if it's a folder if not folder or not os.path.isdir(folder): return False # check if it's a protected folder if helpers.real_path(folder) in (helpers.real_path( app.TV_DOWNLOAD_DIR), helpers.real_path( app.DEFAULT_CLIENT_PATH), helpers.real_path(app.TORRENT_PATH)): return False # check if it's empty folder when wanted checked if check_empty: check_files = os.listdir(folder) if check_files: log.info( 'Not deleting folder {folder} found the following files: {check_files}', { 'folder': folder, 'check_files': check_files }) return False try: log.info("Deleting folder (if it's empty): {folder}", {'folder': folder}) os.rmdir(folder) except (OSError, IOError) as error: log.warning('Unable to delete folder: {folder}: {error}', { 'folder': folder, 'error': ex(error) }) return False else: try: log.info('Deleting folder: {folder}', {'folder': folder}) shutil.rmtree(folder) except (OSError, IOError) as error: log.warning('Unable to delete folder: {folder}: {error}', { 'folder': folder, 'error': ex(error) }) return False return True
def action(self, query, args=None, fetchall=False, fetchone=False): """ Execute single query :param query: Query string :param args: Arguments to query string :param fetchall: Boolean to indicate all results must be fetched :param fetchone: Boolean to indicate one result must be fetched (to walk results for instance) :return: query results """ if query is None: return sql_results = None attempt = 0 with db_locks[self.filename]: self._set_row_factory() while attempt < 5: try: if args is None: logger.log(self.filename + ': ' + query, logger.DB) else: logger.log( self.filename + ': ' + query + ' with args ' + str(args), logger.DB) sql_results = self._execute(query, args, fetchall=fetchall, fetchone=fetchone) self.connection.commit() # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError as e: if 'unable to open database file' in e.args[ 0] or 'database is locked' in e.args[0]: logger.log(u'DB error: ' + ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: logger.log(u'DB error: ' + ex(e), logger.ERROR) raise except sqlite3.DatabaseError as e: logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise # time.sleep(0.02) return sql_results
def mass_action(self, querylist=None, logTransaction=False, fetchall=False): """ Execute multiple queries :param querylist: list of queries :param logTransaction: Boolean to wrap all in one transaction :param fetchall: Boolean, when using a select query force returning all results :return: list of results """ # Remove Falsey types querylist = (q for q in querylist or [] if q) sql_results = [] attempt = 0 with db_locks[self.filename]: self._set_row_factory() while attempt < 5: try: for qu in querylist: if len(qu) == 1: if logTransaction: logger.log(qu[0], logger.DEBUG) sql_results.append(self._execute(qu[0], fetchall=fetchall)) elif len(qu) > 1: if logTransaction: logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DEBUG) sql_results.append(self._execute(qu[0], qu[1], fetchall=fetchall)) self.connection.commit() logger.log(u'Transaction with ' + str(len(sql_results)) + u' queries executed', logger.DEBUG) # finished break except sqlite3.OperationalError as e: sql_results = [] self._try_rollback() if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: logger.log(u'DB error: ' + ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: logger.log(u'DB error: ' + ex(e), logger.ERROR) raise except sqlite3.DatabaseError as e: sql_results = [] self._try_rollback() logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise # time.sleep(0.02) return sql_results
def process_failed(self, path): """Process a download that did not complete correctly.""" if app.USE_FAILED_DOWNLOADS: try: processor = failed_processor.FailedProcessor( path, self.resource_name) self.result = processor.process() process_fail_message = '' except FailedPostProcessingFailedException as error: processor = None self.result = False process_fail_message = ex(error) if processor: self._output.append(processor.log) if app.DELETE_FAILED and self.result: if self.delete_folder(path, check_empty=False): self._log('Deleted folder: {0}'.format(path), logger.DEBUG) if self.result: self._log( 'Failed Download Processing succeeded: {0}, {1}'.format( self.resource_name, path)) else: self._log( 'Failed Download Processing failed: {0}, {1}: {2}'.format( self.resource_name, path, process_fail_message), logger.WARNING)
def run_subs_scripts(video_path, scripts, *args): """Execute subtitle scripts. :param video_path: the video path :type video_path: str :param scripts: the script commands to be executed :type scripts: list of str :param args: the arguments to be passed to the script :type args: list of str """ for script_name in scripts: script_cmd = [ piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip() ] script_cmd.extend(str(arg) for arg in args) logger.info(u'Running subtitle %s-script: %s', 'extra' if len(args) > 1 else 'pre', script_name) # use subprocess to run the command and capture output logger.info(u'Executing command: %s', script_cmd) try: process = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = process.communicate() # @UnusedVariable logger.debug(u'Script result: %s', out) except Exception as error: logger.info(u'Unable to run subtitles script: %s', ex(error)) invalidate_video_cache(video_path)
def delete_unwanted_subtitles(dirpath, filename): """Delete unwanted subtitles for the given filename in the specified dirpath. :param dirpath: the directory path to be used :type dirpath: str :param filename: the subtitle filename :type dirpath: str """ if not app.SUBTITLES_MULTI or not app.SUBTITLES_KEEP_ONLY_WANTED or \ filename.rpartition('.')[2] not in subtitle_extensions: return code = filename.rsplit('.', 2)[1].lower().replace('_', '-') language = from_code(code, unknown='') or from_ietf_code(code) found_language = None try: found_language = language.opensubtitles except LanguageConvertError: logger.info(u"Unable to convert language code '%s' for: %s", code, filename) if found_language and found_language not in app.SUBTITLES_LANGUAGES: try: os.remove(os.path.join(dirpath, filename)) except OSError as error: logger.info(u"Couldn't delete subtitle: %s. Error: %s", filename, ex(error)) else: logger.debug( u"Deleted '%s' because we don't want subtitle language '%s'. We only want '%s' language(s)", filename, language, ','.join(app.SUBTITLES_LANGUAGES))
def __init__(self, filename=None, suffix=None, row_type='dict'): self.filename = filename or app.APPLICATION_DB self.suffix = suffix self.row_type = row_type try: if self.filename not in db_cons or not db_cons[self.filename]: db_locks[self.filename] = threading.Lock() self.connection = sqlite3.connect(self.path, 20, check_same_thread=False) self.connection.text_factory = DBConnection._unicode_text_factory db_cons[self.filename] = self.connection else: self.connection = db_cons[self.filename] # start off row factory configured as before out of # paranoia but wait to do so until other potential users # of the shared connection are done using # it... technically not required as row factory is reset # in all the public methods after the lock has been # aquired with db_locks[self.filename]: self._set_row_factory() except sqlite3.OperationalError: logger.log(u'Please check your database owner/permissions: {}'.format( self.path, logger.WARNING)) except Exception as e: logger.log(u'DB error: ' + ex(e), logger.ERROR) raise
def process_failed(self, path): """Process a download that did not complete correctly.""" if app.USE_FAILED_DOWNLOADS: try: processor = failed_processor.FailedProcessor(path, self.resource_name) self.result = processor.process() process_fail_message = '' except FailedPostProcessingFailedException as error: processor = None self.result = False process_fail_message = ex(error) if processor: self._output.append(processor.output) if app.DELETE_FAILED and self.result: if self.delete_folder(path, check_empty=False): self.log('Deleted folder: {0}'.format(path), logger.DEBUG) if self.result: self.log('Failed Download Processing succeeded: {0}, {1}'.format (self.resource_name, path)) else: self.log('Failed Download Processing failed: {0}, {1}: {2}'.format (self.resource_name, path, process_fail_message), logger.WARNING)
def run_subs_scripts(video_path, scripts, *args): """Execute subtitle scripts. :param video_path: the video path :type video_path: str :param scripts: the script commands to be executed :type scripts: list of str :param args: the arguments to be passed to the script :type args: list of str """ for script_path in scripts: if not os.path.isfile(script_path): logger.warning(u'Subtitle script {0} is not a file.'.format(script_path)) continue if not script_path.endswith('.py'): logger.warning(u'Subtitle script {0} is not a Python file.'.format(script_path)) continue logger.info(u'Running subtitle %s-script: %s', 'extra' if len(args) > 1 else 'pre', script_path) script_cmd = [sys.executable, script_path] + [str(arg) for arg in args] # use subprocess to run the command and capture output logger.info(u'Executing command: %s', script_cmd) try: process = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = process.communicate() # @UnusedVariable logger.debug(u'Script result: %s', out) except Exception as error: logger.info(u'Unable to run subtitles script: %r', ex(error)) invalidate_video_cache(video_path)
def process_media(self, path, video_files, force=False, is_priority=None, ignore_subs=False): """ Postprocess media files. :param path: Path to postprocess in :param video_files: Filenames to look for and postprocess :param force: Postprocess currently postprocessing file :param is_priority: Boolean, is this a priority download :param ignore_subs: True to ignore setting 'postpone if no subs' """ self.postpone_processing = False for video in video_files: file_path = os.path.join(path, video) if not force and self.already_postprocessed(video): self.log_and_output('Skipping already processed file: {video}', level=logging.DEBUG, **{'video': video}) continue try: processor = post_processor.PostProcessor( file_path, self.resource_name, self.process_method, is_priority) if app.POSTPONE_IF_NO_SUBS: if not self._process_postponed(processor, file_path, video, ignore_subs): continue self.result = processor.process() process_fail_message = '' except EpisodePostProcessingFailedException as error: processor = None self.result = False process_fail_message = ex(error) if processor: self._output.append(processor.output) if self.result: self.log_and_output('Processing succeeded for {file_path}', **{'file_path': file_path}) else: self.log_and_output( 'Processing failed for {file_path}: {process_fail_message}', level=logging.WARNING, **{ 'file_path': file_path, 'process_fail_message': process_fail_message }) self.missedfiles.append('{0}: Processing failed: {1}'.format( file_path, process_fail_message)) self.succeeded = False
def process_failed(self, path, resource_name=None): """Process a download that did not complete correctly.""" try: processor = failed_processor.FailedProcessor( path, resource_name or self.resource_name, self.episodes ) self.result = processor.process() process_fail_message = '' except FailedPostProcessingFailedException as error: processor = None self.result = False process_fail_message = ex(error) if processor: self._output.append(processor.output) if app.DELETE_FAILED and self.result: if self.delete_folder(path, check_empty=False): self.log_and_output('Deleted folder: {path}', level=logging.DEBUG, **{'path': path}) if self.result: self.log_and_output('Failed Download Processing succeeded: {resource}, {path}', **{'resource': self.resource_name, 'path': path}) else: self.log_and_output('Failed Download Processing failed: {resource}, {path}: {process_fail_message}', level=logging.WARNING, **{ 'resource': self.resource_name, 'path': path, 'process_fail_message': process_fail_message })
def __init__(self, filename=None, suffix=None, row_type='dict'): self.filename = filename or app.APPLICATION_DB self.suffix = suffix self.row_type = row_type try: if self.filename not in db_cons or not db_cons[self.filename]: db_locks[self.filename] = threading.Lock() self.connection = sqlite3.connect(self.path, 20, check_same_thread=False) self.connection.text_factory = DBConnection._unicode_text_factory db_cons[self.filename] = self.connection else: self.connection = db_cons[self.filename] # start off row factory configured as before out of # paranoia but wait to do so until other potential users # of the shared connection are done using # it... technically not required as row factory is reset # in all the public methods after the lock has been # aquired with db_locks[self.filename]: self._set_row_factory() except sqlite3.OperationalError: logger.log( u'Please check your database owner/permissions: {}'.format( self.path, logger.WARNING)) except Exception as e: logger.log(u'DB error: ' + ex(e), logger.ERROR) raise
def delete_unwanted_subtitles(dirpath, filename): """Delete unwanted subtitles for the given filename in the specified dirpath. :param dirpath: the directory path to be used :type dirpath: str :param filename: the subtitle filename :type dirpath: str """ if not app.SUBTITLES_MULTI or not app.SUBTITLES_KEEP_ONLY_WANTED or \ filename.rpartition('.')[2] not in subtitle_extensions: return code = filename.rsplit('.', 2)[1].lower().replace('_', '-') language = from_code(code, unknown='') or from_ietf_code(code) found_language = None try: found_language = language.opensubtitles except LanguageConvertError: logger.info(u"Unable to convert language code '%s' for: %s", code, filename) if found_language and found_language not in app.SUBTITLES_LANGUAGES: try: os.remove(os.path.join(dirpath, filename)) except OSError as error: logger.info(u"Couldn't delete subtitle: %s. Error: %s", filename, ex(error)) else: logger.debug(u"Deleted '%s' because we don't want subtitle language '%s'. We only want '%s' language(s)", filename, language, ','.join(app.SUBTITLES_LANGUAGES))
def run_subs_scripts(video_path, scripts, *args): """Execute subtitle scripts. :param video_path: the video path :type video_path: str :param scripts: the script commands to be executed :type scripts: list of str :param args: the arguments to be passed to the script :type args: list of str """ for script_name in scripts: script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()] script_cmd.extend(str(arg) for arg in args) logger.info(u'Running subtitle %s-script: %s', 'extra' if len(args) > 1 else 'pre', script_name) # use subprocess to run the command and capture output logger.info(u'Executing command: %s', script_cmd) try: process = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = process.communicate() # @UnusedVariable logger.debug(u'Script result: %s', out) except Exception as error: logger.info(u'Unable to run subtitles script: %s', ex(error)) invalidate_video_cache(video_path)
def getFeed(url, params=None, request_hook=None): try: response = request_hook(url, params=params, timeout=30) if not response: raise Exception feed = parse(response.text, response_headers={'content-type': 'application/xml'}) if feed: if 'entries' in feed: return feed elif 'error' in feed.feed: err_code = feed.feed['error']['code'] err_desc = feed.feed['error']['description'] log.debug(u'RSS ERROR:[{error}] CODE:[{code}]', { 'error': err_desc, 'code': err_code }) else: log.debug(u'RSS error loading data: {}', url) except Exception as e: log.debug(u'RSS error: {}', ex(e)) return {'entries': []}
def delete_folder(folder, check_empty=True): """ Remove a folder from the filesystem. :param folder: Path to folder to remove :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True :return: True on success, False on failure """ # check if it's a folder if not os.path.isdir(folder): return False # check if it isn't TV_DOWNLOAD_DIR if app.TV_DOWNLOAD_DIR: if helpers.real_path(folder) == helpers.real_path( app.TV_DOWNLOAD_DIR): return False # check if it's empty folder when wanted checked if check_empty: check_files = os.listdir(folder) if check_files: logger.log( 'Not deleting folder {0} found the following files: {1}'. format(folder, check_files), logger.INFO) return False try: logger.log( "Deleting folder (if it's empty): {0}".format(folder)) os.rmdir(folder) except (OSError, IOError) as error: logger.log( 'Unable to delete folder: {0}: {1}'.format( folder, ex(error)), logger.WARNING) return False else: try: logger.log('Deleting folder: {0}'.format(folder)) shutil.rmtree(folder) except (OSError, IOError) as error: logger.log( 'Unable to delete folder: {0}: {1}'.format( folder, ex(error)), logger.WARNING) return False return True
def write_ep_file(self, ep_obj): """ Generates and writes ep_obj's metadata under the given path with the given filename root. Uses the episode's name with the extension in _ep_nfo_extension. ep_obj: Episode object for which to create the metadata file_name_path: The file name to use for this metadata. Note that the extension will be automatically added based on _ep_nfo_extension. This should include an absolute path. Note that this method expects that _ep_data will return an ElementTree object. If your _ep_data returns data in another format you'll need to override this method. """ data = self._ep_data(ep_obj) if not data: return False nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_dir = os.path.dirname(nfo_file_path) if not (nfo_file_path and nfo_file_dir): log.debug( u'Unable to write episode nfo file because episode location is missing.' ) return False try: if not os.path.isdir(nfo_file_dir): log.debug( 'Metadata directory did not exist, creating it at {location}', {'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) log.debug('Writing episode nfo file to {location}', {'location': nfo_file_path}) with io.open(nfo_file_path, 'wb') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. data.write(nfo_file, encoding='utf-8', xml_declaration=True) helpers.chmod_as_parent(nfo_file_path) except IOError as e: log.error( 'Unable to write file to {location} - are you sure the folder is writable? {error}', { 'location': nfo_file_path, 'error': ex(e) }) return False return True
def update_library(self, show=None): """ Update the Emby Media Server host via HTTP API. :return: True for no issue or False if there was an error """ if app.USE_EMBY: if not app.EMBY_HOST: log.debug('EMBY: No host specified, check your settings') return False if show: # EMBY only supports TVDB ids provider = 'tvdbid' if show.indexer == INDEXER_TVDBV2: tvdb_id = show.indexerid else: # Try using external ids to get a TVDB id tvdb_id = show.externals.get(mappings[INDEXER_TVDBV2], None) if tvdb_id is None: if show.indexer == INDEXER_TVRAGE: log.warning('EMBY: TVRage indexer no longer valid') else: log.warning( 'EMBY: Unable to find a TVDB ID for {series},' ' and {indexer} indexer is unsupported', {'series': show.name, 'indexer': indexer_id_to_name(show.indexer)} ) return False params = { provider: text_type(tvdb_id) } else: params = {} url = 'http://{host}/emby/Library/Series/Updated'.format(host=app.EMBY_HOST) try: resp = self.session.post( url=url, params=params, headers={ 'X-MediaBrowser-Token': app.EMBY_APIKEY } ) resp.raise_for_status() if resp.content: log.debug('EMBY: HTTP response: {0}', resp.content.replace('\n', '')) log.info('EMBY: Successfully sent a "Series Library Updated" command.') return True except (HTTPError, RequestException) as error: log.warning('EMBY: Warning: Unable to contact Emby at {url}: {error}', {'url': url, 'error': ex(error)}) return False
def update_library(self, show=None): """ Update the Emby Media Server host via HTTP API. :return: True for no issue or False if there was an error """ if app.USE_EMBY: if not app.EMBY_HOST: log.debug('EMBY: No host specified, check your settings') return False if show: # EMBY only supports TVDB ids provider = 'tvdbid' if show.indexer == INDEXER_TVDBV2: tvdb_id = show.indexerid else: # Try using external ids to get a TVDB id tvdb_id = show.externals.get(mappings[INDEXER_TVDBV2], None) if tvdb_id is None: if show.indexer == INDEXER_TVRAGE: log.warning('EMBY: TVRage indexer no longer valid') else: log.warning( 'EMBY: Unable to find a TVDB ID for {series},' ' and {indexer} indexer is unsupported', {'series': show.name, 'indexer': indexer_id_to_name(show.indexer)} ) return False params = { provider: str(tvdb_id) } else: params = {} url = 'http://{host}/emby/Library/Series/Updated'.format(host=app.EMBY_HOST) try: resp = self.session.post( url=url, params=params, headers={ 'X-MediaBrowser-Token': app.EMBY_APIKEY } ) resp.raise_for_status() if resp.content: log.debug('EMBY: HTTP response: {0}', resp.content.replace('\n', '')) log.info('EMBY: Successfully sent a "Series Library Updated" command.') return True except (HTTPError, RequestException) as error: log.warning('EMBY: Warning: Unable to contact Emby at {url}: {error}', {'url': url, 'error': ex(error)}) return False
def action(self, query, args=None, fetchall=False, fetchone=False): """ Execute single query :param query: Query string :param args: Arguments to query string :param fetchall: Boolean to indicate all results must be fetched :param fetchone: Boolean to indicate one result must be fetched (to walk results for instance) :return: query results """ if query is None: return sql_results = None attempt = 0 with db_locks[self.filename]: self._set_row_factory() while attempt < 5: try: if args is None: logger.log(self.filename + ': ' + query, logger.DB) else: logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB) sql_results = self._execute(query, args, fetchall=fetchall, fetchone=fetchone) self.connection.commit() # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError as e: if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: logger.log(u'DB error: ' + ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: logger.log(u'DB error: ' + ex(e), logger.ERROR) raise except sqlite3.DatabaseError as e: logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise # time.sleep(0.02) return sql_results
def _sendGrowl(self, title='Medusa Notification', message=None, name=None, host=None, password=None, force=False): if not app.USE_GROWL and not force: return False if name is None: name = title if host is None: hostParts = app.GROWL_HOST.split(':') else: hostParts = host.split(':') if len(hostParts) != 2 or hostParts[1] == '': port = 23053 else: port = int(hostParts[1]) growlHosts = [(hostParts[0], port)] opts = { 'name': name, 'title': title, 'app': 'Medusa', 'sticky': None, 'priority': None, 'debug': False } if password is None: opts['password'] = app.GROWL_PASSWORD else: opts['password'] = password opts['icon'] = True for pc in growlHosts: opts['host'] = pc[0] opts['port'] = pc[1] log.debug( u'GROWL: Sending growl to {host}:{port} - {msg!r}', {'msg': message, 'host': opts['host'], 'port': opts['port']} ) try: if self._send_growl(opts, message): return True else: if self._sendRegistration(host, password): return self._send_growl(opts, message) else: return False except Exception as error: log.warning( u'GROWL: Unable to send growl to {host}:{port} - {msg!r}', {'msg': ex(error), 'host': opts['host'], 'port': opts['port']} ) return False
def _send_synologyNotifier(self, title, message): synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, message] log.info(u'Executing command {0}', synodsmnotify_cmd) log.debug(u'Absolute path to command: {0}', os.path.abspath(synodsmnotify_cmd[0])) try: p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() log.debug(u'Script result: {0}', out) except OSError as e: log.info(u'Unable to run synodsmnotify: {0}', ex(e))
def write_ep_file(self, ep_obj): """ Generates and writes ep_obj's metadata under the given path with the given filename root. Uses the episode's name with the extension in _ep_nfo_extension. ep_obj: Episode object for which to create the metadata file_name_path: The file name to use for this metadata. Note that the extension will be automatically added based on _ep_nfo_extension. This should include an absolute path. """ data = self._ep_data(ep_obj) if not data: return False nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_dir = os.path.dirname(nfo_file_path) if not (nfo_file_path and nfo_file_dir): log.debug( u'Unable to write episode nfo file because episode location is missing.' ) return False try: if not os.path.isdir(nfo_file_dir): log.debug( u'Metadata directory missing, creating it at {location}', {'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) log.debug(u'Writing episode nfo file to {location}', {'location': nfo_file_path}) with io.open(nfo_file_path, 'wb') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. nfo_file.write(data.encode('utf-8')) helpers.chmod_as_parent(nfo_file_path) except EnvironmentError as e: log.error( u'Unable to write file to {path} - are you sure the folder is writable? {error}', { 'path': nfo_file_path, 'error': ex(e) }) return False return True
def is_rar_supported(): """Check rar unpacking support.""" try: rarfile.custom_check([rarfile.UNRAR_TOOL], True) except rarfile.RarExecError: logger.log('UNRAR tool not available.', logger.WARNING) return False except Exception as msg: logger.log('Rar Not Supported: {error}'.format(error=ex(msg)), logger.ERROR) return False return True
def makeObject(self, cmd_arg, cur_path): if app.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, os.path.abspath(cur_path)] log.debug(u'Executing command {0}', synoindex_cmd) log.debug(u'Absolute path to command: {0}', os.path.abspath(synoindex_cmd[0])) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() log.debug(u'Script result: {0}', out) except OSError as e: log.error(u'Unable to run synoindex: {0}', ex(e))
def makeObject(self, cmd_arg, cur_path): if app.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, os.path.abspath(cur_path)] log.debug(u'Executing command {0}', synoindex_cmd) log.debug(u'Absolute path to command: {0}', os.path.abspath(synoindex_cmd[0])) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() log.debug(u'Script result: {0}', out) except OSError as e: log.error(u'Unable to run synoindex: {0}', ex(e))
def delete_folder(folder, check_empty=True): """ Remove a folder from the filesystem. :param folder: Path to folder to remove :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True :return: True on success, False on failure """ # check if it's a folder if not os.path.isdir(folder): return False # check if it isn't TV_DOWNLOAD_DIR if app.TV_DOWNLOAD_DIR: if helpers.real_path(folder) == helpers.real_path(app.TV_DOWNLOAD_DIR): return False # check if it's empty folder when wanted checked if check_empty: check_files = os.listdir(folder) if check_files: logger.log('Not deleting folder {0} found the following files: {1}'.format (folder, check_files), logger.INFO) return False try: logger.log("Deleting folder (if it's empty): {0}".format(folder)) os.rmdir(folder) except (OSError, IOError) as error: logger.log('Unable to delete folder: {0}: {1}'.format(folder, ex(error)), logger.WARNING) return False else: try: logger.log('Deleting folder: {0}'.format(folder)) shutil.rmtree(folder) except (OSError, IOError) as error: logger.log('Unable to delete folder: {0}: {1}'.format(folder, ex(error)), logger.WARNING) return False return True
def save_nzb(nzb_name, nzb_string): """ Save NZB to disk :param nzb_name: Filename/path to write to :param nzb_string: Content to write in file """ try: with open(nzb_name + ".nzb", 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as error: log.error(u'Unable to save NZB: {0}', ex(error))
def save_nzb(nzb_name, nzb_string): """ Save NZB to disk :param nzb_name: Filename/path to write to :param nzb_string: Content to write in file """ try: with open(nzb_name + ".nzb", 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as error: logger.log(u"Unable to save NZB: " + ex(error), logger.ERROR) # pylint: disable=no-member
def _sendRegistration(self, host=None, password=None): opts = {} if host is None: hostParts = app.GROWL_HOST.split(':') else: hostParts = host.split(':') if len(hostParts) != 2 or hostParts[1] == '': port = 23053 else: port = int(hostParts[1]) opts['host'] = hostParts[0] opts['port'] = port if password is None: opts['password'] = app.GROWL_PASSWORD else: opts['password'] = password opts['app'] = 'Medusa' opts['debug'] = False # Send Registration register = gntp.GNTPRegister() register.add_header('Application-Name', opts['app']) register.add_header('Application-Icon', app.LOGO_URL) register.add_notification('Test', True) register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True) register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True) register.add_notification( common.notifyStrings[common.NOTIFY_GIT_UPDATE], True) if opts['password']: register.set_password(opts['password']) try: return self._send(opts['host'], opts['port'], register.encode(), opts['debug']) except Exception as error: log.warning( u'GROWL: Unable to send growl to {host}:{port} - {msg!r}', { 'msg': ex(error), 'host': opts['host'], 'port': opts['port'] }) return False
def update_library(self, show=None): """Handles updating the Emby Media Server host via HTTP API Returns: Returns True for no issue or False if there was an error """ if app.USE_EMBY: if not app.EMBY_HOST: log.debug(u'EMBY: No host specified, check your settings') return False if show: if show.indexer == 1: provider = 'tvdb' elif show.indexer == 2: log.warning(u'EMBY: TVRage Provider no longer valid') return False else: log.warning(u'EMBY: Provider unknown') return False query = '?%sid=%s' % (provider, show.indexerid) else: query = '' url = 'http://%s/emby/Library/Series/Updated%s' % (app.EMBY_HOST, query) values = {} data = urlencode(values) try: req = Request(url, data) req.add_header('X-MediaBrowser-Token', app.EMBY_APIKEY) response = urlopen(req) result = response.read() response.close() log.debug(u'EMBY: HTTP response: {0}', result.replace('\n', '')) return True except (URLError, IOError) as error: log.warning( u'EMBY: Warning: Unable to contact Emby at {url}: {error}', { 'url': url, 'error': ex(error) }) return False
def write_ep_file(self, ep_obj): """ Generates and writes ep_obj's metadata under the given path with the given filename root. Uses the episode's name with the extension in _ep_nfo_extension. ep_obj: Episode object for which to create the metadata file_name_path: The file name to use for this metadata. Note that the extension will be automatically added based on _ep_nfo_extension. This should include an absolute path. Note that this method expects that _ep_data will return an ElementTree object. If your _ep_data returns data in another format you'll need to override this method. """ data = self._ep_data(ep_obj) if not data: return False nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_dir = os.path.dirname(nfo_file_path) if not (nfo_file_path and nfo_file_dir): log.debug(u'Unable to write episode nfo file because episode location is missing.') return False try: if not os.path.isdir(nfo_file_dir): log.debug('Metadata directory did not exist, creating it at {location}', {'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) log.debug('Writing episode nfo file to {location}', {'location': nfo_file_path}) with io.open(nfo_file_path, 'wb') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. data.write(nfo_file, encoding='utf-8', xml_declaration=True) helpers.chmod_as_parent(nfo_file_path) except IOError as e: log.error('Unable to write file to {location} - are you sure the folder is writable? {error}', {'location': nfo_file_path, 'error': ex(e)}) return False return True
def write_show_file(self, show_obj): """ Generates and writes show_obj's metadata under the given path to the filename given by get_show_file_path() show_obj: Series object for which to create the metadata path: An absolute or relative path where we should put the file. Note that the file name will be the default show_file_name. Note that this method expects that _show_data will return an ElementTree object. If your _show_data returns data in another format you'll need to override this method. """ data = self._show_data(show_obj) if not data: return False nfo_file_path = self.get_show_file_path(show_obj) nfo_file_dir = os.path.dirname(nfo_file_path) try: if not os.path.isdir(nfo_file_dir): log.debug( 'Metadata directory did not exist, creating it at {location}', {'location': nfo_file_dir} ) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) log.debug( 'Writing show nfo file to {location}', {'location': nfo_file_dir} ) nfo_file = io.open(nfo_file_path, 'wb') data.write(nfo_file, encoding='utf-8', xml_declaration=True) nfo_file.close() helpers.chmod_as_parent(nfo_file_path) except IOError as error: log.error( 'Unable to write file to {location} - are you sure the folder is writable? {error}', {'location': nfo_file_path, 'error': ex(error)} ) return False return True
def write_show_file(self, show_obj): """ Generates and writes show_obj's metadata under the given path to the filename given by get_show_file_path() show_obj: Series object for which to create the metadata path: An absolute or relative path where we should put the file. Note that the file name will be the default show_file_name. Note that this method expects that _show_data will return an ElementTree object. If your _show_data returns data in another format you'll need to override this method. """ data = self._show_data(show_obj) if not data: return False nfo_file_path = self.get_show_file_path(show_obj) nfo_file_dir = os.path.dirname(nfo_file_path) try: if not os.path.isdir(nfo_file_dir): log.debug( 'Metadata directory did not exist, creating it at {location}', {'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) log.debug('Writing show nfo file to {location}', {'location': nfo_file_dir}) nfo_file = io.open(nfo_file_path, 'wb') data.write(nfo_file, encoding='utf-8', xml_declaration=True) nfo_file.close() helpers.chmod_as_parent(nfo_file_path) except IOError as error: log.error( 'Unable to write file to {location} - are you sure the folder is writable? {error}', { 'location': nfo_file_path, 'error': ex(error) }) return False return True
def _send_synologyNotifier(self, title, message): synodsmnotify_cmd = [ '/usr/syno/bin/synodsmnotify', '@administrators', title, message ] log.info(u'Executing command {0}', synodsmnotify_cmd) log.debug(u'Absolute path to command: {0}', os.path.abspath(synodsmnotify_cmd[0])) try: p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() log.debug(u'Script result: {0}', out) except OSError as e: log.info(u'Unable to run synodsmnotify: {0}', ex(e))
def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u'Checking ' + prettyName(upgradeClass.__name__) + ' database upgrade', logger.DEBUG) if not instance.test(): logger.log(u'Database upgrade required: ' + prettyName(upgradeClass.__name__), logger.DEBUG) try: instance.execute() except Exception as e: logger.log('Error in ' + str(upgradeClass.__name__) + ': ' + ex(e), logger.ERROR) raise logger.log(upgradeClass.__name__ + ' upgrade completed', logger.DEBUG) else: logger.log(upgradeClass.__name__ + ' upgrade not required', logger.DEBUG) for upgradeSubClass in upgradeClass.__subclasses__(): _processUpgrade(connection, upgradeSubClass)
def update_show_indexer_metadata(self, show_obj): if self.show_metadata and show_obj and self._has_show_metadata( show_obj): log.debug( u'Metadata provider {name} updating series indexer info metadata file for {series}', { u'name': self.name, u'series': show_obj.name }) nfo_file_path = self.get_show_file_path(show_obj) try: with io.open(nfo_file_path, u'rb') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) indexerid = showXML.find(u'id') root = showXML.getroot() if indexerid is not None: indexerid.text = str(show_obj.indexerid) else: etree.SubElement(root, u'id').text = str(show_obj.indexerid) # Make it purdy helpers.indent_xml(root) showXML.write(nfo_file_path, encoding=u'UTF-8') helpers.chmod_as_parent(nfo_file_path) return True except etree.ParseError as error: log.warning( u'Received an invalid XML for {series}, try again later. Error: {error}', { u'series': show_obj.name, u'error': error }) except IOError as e: log.error( u'Unable to write file to {location} - are you sure the folder is writeable? {error}', { u'location': nfo_file_path, u'error': ex(e) })
def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u'Checking ' + prettyName(upgradeClass.__name__) + ' database upgrade', logger.DEBUG) if not instance.test(): logger.log(u'Database upgrade required: ' + prettyName(upgradeClass.__name__), logger.DEBUG) try: instance.execute() except Exception as e: logger.log('Error in ' + str(upgradeClass.__name__) + ': ' + ex(e), logger.ERROR) raise logger.log(upgradeClass.__name__ + ' upgrade completed', logger.DEBUG) else: logger.log(upgradeClass.__name__ + ' upgrade not required', logger.DEBUG) for upgradeSubClass in upgradeClass.__subclasses__(): _processUpgrade(connection, upgradeSubClass)
def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) if not instance.test(): logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.DEBUG) try: instance.execute() except Exception as e: logger.log("Error in " + str(upgradeClass.__name__) + ": " + ex(e), logger.ERROR) raise logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG) else: logger.log(upgradeClass.__name__ + " upgrade not required", logger.DEBUG) for upgradeSubClass in upgradeClass.__subclasses__(): _processUpgrade(connection, upgradeSubClass)
def isRarSupported(): """ Test unpacking support. Test Packing Support: - Simulating in memory rar extraction on test.rar file """ try: rar_path = os.path.join(app.PROG_DIR, app.LIB_FOLDER, 'unrar2', 'test.rar') testing = RarFile(rar_path).read_files('*test.txt') if testing[0][1] == b'This is only a test.': return 'supported' logger.log('Rar Not Supported: Can not read the content of test file', logger.ERROR) return 'not supported' except Exception as msg: logger.log('Rar Not Supported: {error}'.format(error=ex(msg)), logger.ERROR) return 'not supported'
def _download_result(result): """ Download a result to the appropriate black hole folder. :param result: SearchResult instance to download. :return: boolean, True on success """ res_provider = result.provider if res_provider is None: log.error( u'Invalid provider name - this is a coding error, report it please' ) return False # nzbs with an URL can just be downloaded from the provider if result.result_type == u'nzb': new_result = res_provider.download_result(result) # if it's an nzb data result elif result.result_type == u'nzbdata': # get the final file path to the nzb file_name = os.path.join(app.NZB_DIR, result.name + u'.nzb') log.info(u'Saving NZB to {0}', file_name) new_result = True # save the data to disk try: with open(file_name, u'w') as file_out: file_out.write(result.extra_info[0]) chmod_as_parent(file_name) except EnvironmentError as e: log.error(u'Error trying to save NZB to black hole: {0}', ex(e)) new_result = False elif result.result_type == u'torrent': new_result = res_provider.download_result(result) else: log.error( u'Invalid provider type - this is a coding error, report it please' ) new_result = False return new_result
def _sendRegistration(self, host=None, password=None): opts = {} if host is None: hostParts = app.GROWL_HOST.split(':') else: hostParts = host.split(':') if len(hostParts) != 2 or hostParts[1] == '': port = 23053 else: port = int(hostParts[1]) opts['host'] = hostParts[0] opts['port'] = port if password is None: opts['password'] = app.GROWL_PASSWORD else: opts['password'] = password opts['app'] = 'Medusa' opts['debug'] = False # Send Registration register = gntp.core.GNTPRegister() register.add_header('Application-Name', opts['app']) register.add_header('Application-Icon', app.LOGO_URL) register.add_notification('Test', True) register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True) register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True) register.add_notification(common.notifyStrings[common.NOTIFY_GIT_UPDATE], True) if opts['password']: register.set_password(opts['password']) try: return self._send(opts['host'], opts['port'], register.encode(), opts['debug']) except Exception as error: log.warning( u'GROWL: Unable to send growl to {host}:{port} - {msg!r}', {'msg': ex(error), 'host': opts['host'], 'port': opts['port']} ) return False
def process_media(self, path, video_files, force=False, is_priority=None, ignore_subs=False): """ Postprocess media files. :param path: Path to postprocess in :param video_files: Filenames to look for and postprocess :param force: Postprocess currently postprocessing file :param is_priority: Boolean, is this a priority download :param ignore_subs: True to ignore setting 'postpone if no subs' """ self.postpone_processing = False for video in video_files: file_path = os.path.join(path, video) if not force and self.already_postprocessed(video): self.log('Skipping already processed file: {0}'.format(video), logger.DEBUG) continue try: processor = post_processor.PostProcessor(file_path, self.resource_name, self.process_method, is_priority) if app.POSTPONE_IF_NO_SUBS: if not self._process_postponed(processor, file_path, video, ignore_subs): continue self.result = processor.process() process_fail_message = '' except EpisodePostProcessingFailedException as error: processor = None self.result = False process_fail_message = ex(error) if processor: self._output.append(processor.output) if self.result: self.log('Processing succeeded for {0}'.format(file_path)) else: self.log('Processing failed for {0}: {1}'.format(file_path, process_fail_message), logger.WARNING) self.missedfiles.append('{0}: Processing failed: {1}'.format(file_path, process_fail_message)) self.succeeded = False
def _download_result(result): """ Download a result to the appropriate black hole folder. :param result: SearchResult instance to download. :return: boolean, True on success """ res_provider = result.provider if res_provider is None: log.error(u'Invalid provider name - this is a coding error, report it please') return False # nzbs with an URL can just be downloaded from the provider if result.result_type == u'nzb': new_result = res_provider.download_result(result) # if it's an nzb data result elif result.result_type == u'nzbdata': # get the final file path to the nzb file_name = os.path.join(app.NZB_DIR, result.name + u'.nzb') log.info(u'Saving NZB to {0}', file_name) new_result = True # save the data to disk try: with open(file_name, u'w') as file_out: file_out.write(result.extra_info[0]) chmod_as_parent(file_name) except EnvironmentError as e: log.error(u'Error trying to save NZB to black hole: {0}', ex(e)) new_result = False elif result.result_type == u'torrent': new_result = res_provider.download_result(result) else: log.error(u'Invalid provider type - this is a coding error, report it please') new_result = False return new_result
def refresh(indexer_id, series_id): """ Try to refresh a show. :param indexer_id: The unique id of the show to refresh :return: A tuple containing: - an error message if the show could not be refreshed, ``None`` otherwise - the show object that was refreshed, if it exists, ``None`` otherwise """ error, series_obj = Show._validate_indexer_id(indexer_id, series_id) if error is not None: return error, series_obj try: app.show_queue_scheduler.action.refreshShow(series_obj) except CantRefreshShowException as exception: return ex(exception), series_obj return None, series_obj
def _connectivity_test(): """Generate tests. :param self: :return: test to run """ if not _provider.url: print('%s has no url set, skipping' % _provider.name) return try: requests.head(_provider.url, verify=certifi.where(), timeout=10) except requests.exceptions.SSLError as error: if 'certificate verify failed' in text_type(error): print('Cannot verify certificate for %s' % _provider.name) else: print('SSLError on %s: %s' % (_provider.name, ex(error.message))) raise except requests.exceptions.Timeout: print('Provider timed out')
def _notify_emby(self, message, host=None, emby_apikey=None): """ Notify Emby host via HTTP API. :return: True for no issue or False if there was an error """ # fill in omitted parameters if not host: host = app.EMBY_HOST if not emby_apikey: emby_apikey = app.EMBY_APIKEY url = 'http://{host}/emby/Notifications/Admin'.format(host=host) data = json.dumps({ 'Name': 'Medusa', 'Description': message, 'ImageUrl': app.LOGO_URL }) try: resp = self.session.post( url=url, data=data, headers={ 'X-MediaBrowser-Token': emby_apikey, 'Content-Type': 'application/json' } ) resp.raise_for_status() if resp.content: log.debug('EMBY: HTTP response: {0}', resp.content.replace('\n', '')) log.info('EMBY: Successfully sent a test notification.') return True except (HTTPError, RequestException) as error: log.warning('EMBY: Warning: Unable to contact Emby at {url}: {error}', {'url': url, 'error': ex(error)}) return False
def getFeed(url, params=None, request_hook=None): try: response = request_hook(url, params=params, timeout=30) if not response: raise Exception feed = parse(response.text, response_headers={'content-type': 'application/xml'}) if feed: if 'entries' in feed: return feed elif 'error' in feed.feed: err_code = feed.feed['error']['code'] err_desc = feed.feed['error']['description'] log.debug(u'RSS ERROR:[{error}] CODE:[{code}]', {'error': err_desc, 'code': err_code}) else: log.debug(u'RSS error loading data: {}', url) except Exception as e: log.debug(u'RSS error: {}', ex(e)) return {'entries': []}
def delete_files(self, path, files, force=False): """ Remove files from filesystem. :param path: path to process :param files: files we want to delete :param force: Boolean, force deletion, defaults to false """ if not files: return if not self.result and force: self.log('Forcing deletion of files, even though last result was not successful.', logger.DEBUG) elif not self.result: return # Delete all file not needed for cur_file in files: cur_file_path = os.path.join(path, cur_file) if not os.path.isfile(cur_file_path): continue # Prevent error when a notwantedfiles is an associated files self.log('Deleting file: {0}'.format(cur_file), logger.DEBUG) # check first the read-only attribute file_attribute = os.stat(cur_file_path)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable self.log('Changing read-only flag for file: {0}'.format(cur_file), logger.DEBUG) try: os.chmod(cur_file_path, stat.S_IWRITE) except OSError as error: self.log('Cannot change permissions of {0}: {1}'.format(cur_file_path, ex(error)), logger.DEBUG) try: os.remove(cur_file_path) except OSError as error: self.log('Unable to delete file {0}: {1}'.format(cur_file, ex(error)), logger.DEBUG)
def delete(indexer_id, series_id, remove_files=False): """ Try to delete a show. :param indexer_id: The unique id of the indexer, used to add the show. :param series_id: The unique id of the series. :param remove_files: ``True`` to remove the files associated with the show, ``False`` otherwise :return: A tuple containing: - an error message if the show could not be deleted, ``None`` otherwise - the show object that was deleted, if it exists, ``None`` otherwise """ error, show = Show._validate_indexer_id(indexer_id, series_id) if error is not None: return error, show if show: try: app.show_queue_scheduler.action.removeShow(show, bool(remove_files)) except CantRemoveShowException as exception: return ex(exception), show return None, show
def run(self): """ Actually runs the thread to process events """ try: while not self.stop.is_set(): try: # get event type event_type = self.queue.get(True, 1) # perform callback if we got a event type self.callback(event_type) # event completed self.queue.task_done() except Empty: event_type = None # exiting thread self.stop.clear() except Exception as error: log.error(u'Exception generated in thread %s: %s', self.name, ex(error)) log.debug(repr(traceback.format_exc()))
def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """Retrieve a list of recently aired episodes, and search for these episodes in the different providers.""" propers = {} # For each provider get the list of propers original_thread_name = threading.currentThread().name providers = enabled_providers('backlog') search_date = datetime.datetime.today() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS) main_db_con = db.DBConnection() if not app.POSTPONE_IF_NO_SUBS: # Get the recently aired (last 2 days) shows from DB recently_aired = main_db_con.select( 'SELECT indexer, showid, season, episode, status, airdate' ' FROM tv_episodes' ' WHERE airdate >= ?' ' AND status = ?', [search_date.toordinal(), DOWNLOADED] ) else: # Get recently subtitled episodes (last 2 days) from DB # Episode status becomes downloaded only after found subtitles last_subtitled = search_date.strftime(History.date_format) recently_aired = main_db_con.select('SELECT indexer_id AS indexer, showid, season, episode FROM history ' 'WHERE date >= ? AND action = ?', [last_subtitled, SUBTITLED]) if not recently_aired: log.info('No recently aired new episodes, nothing to search for') return [] # Loop through the providers, and search for releases for cur_provider in providers: threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name) log.info('Searching for any new PROPER releases from {provider}', {'provider': cur_provider.name}) try: cur_propers = cur_provider.find_propers(recently_aired) except AuthException as e: log.debug('Authentication error: {error}', {'error': ex(e)}) continue # if they haven't been added by a different provider than add the proper to the list for proper in cur_propers: name = self._sanitize_name(proper.name) if name not in propers: log.debug('Found new possible proper result: {name}', {'name': proper.name}) propers[name] = proper threading.currentThread().name = original_thread_name # take the list of unique propers and get it sorted by sorted_propers = sorted(list(itervalues(propers)), key=operator.attrgetter('date'), reverse=True) final_propers = [] # Keep only items from last PROPER_SEARCH_DAYS setting in processed propers: latest_proper = datetime.datetime.now() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS) self.processed_propers = [p for p in self.processed_propers if p.get('date') >= latest_proper] # Get proper names from processed propers processed_propers_names = [proper.get('name') for proper in self.processed_propers if proper.get('name')] for cur_proper in sorted_propers: if not self.ignore_processed_propers and cur_proper.name in processed_propers_names: log.debug(u'Proper already processed. Skipping: {proper_name}', {'proper_name': cur_proper.name}) continue try: cur_proper.parse_result = NameParser().parse(cur_proper.name) except (InvalidNameException, InvalidShowException) as error: log.debug('{error}', {'error': error}) continue if not cur_proper.parse_result.proper_tags: log.info('Skipping non-proper: {name}', {'name': cur_proper.name}) continue if not cur_proper.series.episodes.get(cur_proper.parse_result.season_number) or \ any([ep for ep in cur_proper.parse_result.episode_numbers if not cur_proper.series.episodes[cur_proper.parse_result.season_number].get(ep)]): log.info('Skipping proper for wrong season/episode: {name}', {'name': cur_proper.name}) continue log.debug('Proper tags for {proper}: {tags}', { 'proper': cur_proper.name, 'tags': cur_proper.parse_result.proper_tags }) if not cur_proper.parse_result.series_name: log.debug('Ignoring invalid show: {name}', {'name': cur_proper.name}) if cur_proper.name not in processed_propers_names: self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue if not cur_proper.parse_result.episode_numbers: log.debug('Ignoring full season instead of episode: {name}', {'name': cur_proper.name}) if cur_proper.name not in processed_propers_names: self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue log.debug('Successful match! Matched {original_name} to show {new_name}', {'original_name': cur_proper.parse_result.original_name, 'new_name': cur_proper.parse_result.series.name }) # Map the indexerid in the db to the show's indexerid cur_proper.indexerid = cur_proper.parse_result.series.indexerid # Map the indexer in the db to the show's indexer cur_proper.indexer = cur_proper.parse_result.series.indexer # Map our Proper instance cur_proper.series = cur_proper.parse_result.series cur_proper.actual_season = cur_proper.parse_result.season_number \ if cur_proper.parse_result.season_number is not None else 1 cur_proper.actual_episodes = cur_proper.parse_result.episode_numbers cur_proper.release_group = cur_proper.parse_result.release_group cur_proper.version = cur_proper.parse_result.version cur_proper.quality = cur_proper.parse_result.quality cur_proper.content = None cur_proper.proper_tags = cur_proper.parse_result.proper_tags # filter release, in this case, it's just a quality gate. As we only send one result. wanted_results = filter_results(cur_proper) best_result = pick_result(wanted_results) if not best_result: log.info('Rejected proper: {name}', {'name': cur_proper.name}) if cur_proper.name not in processed_propers_names: self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue # only get anime proper if it has release group and version if best_result.series.is_anime: if not best_result.release_group and best_result.version == -1: log.info('Ignoring proper without release group and version: {name}', {'name': best_result.name}) if cur_proper.name not in processed_propers_names: self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue # check if we have the episode as DOWNLOADED main_db_con = db.DBConnection() sql_results = main_db_con.select('SELECT quality, release_name ' 'FROM tv_episodes WHERE indexer = ? ' 'AND showid = ? AND season = ? ' 'AND episode = ? AND status = ?', [best_result.indexer, best_result.series.indexerid, best_result.actual_season, best_result.actual_episodes[0], DOWNLOADED]) if not sql_results: log.info("Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}", { 'name': best_result.name }) continue # only keep the proper if we have already downloaded an episode with the same quality old_quality = int(sql_results[0]['quality']) if old_quality != best_result.quality: log.info('Ignoring proper because quality is different: {name}', {'name': best_result.name}) if cur_proper.name not in processed_propers_names: self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue # only keep the proper if we have already downloaded an episode with the same codec release_name = sql_results[0]['release_name'] if release_name: release_name_guess = NameParser()._parse_string(release_name) current_codec = release_name_guess.video_codec # Ignore proper if codec differs from downloaded release codec if all([current_codec, best_result.parse_result.video_codec, best_result.parse_result.video_codec != current_codec]): log.info('Ignoring proper because codec is different: {name}', {'name': best_result.name}) if best_result.name not in processed_propers_names: self.processed_propers.append({'name': best_result.name, 'date': best_result.date}) continue streaming_service = release_name_guess.guess.get(u'streaming_service') # Ignore proper if streaming service differs from downloaded release streaming service if best_result.parse_result.guess.get(u'streaming_service') != streaming_service: log.info('Ignoring proper because streaming service is different: {name}', {'name': best_result.name}) if best_result.name not in processed_propers_names: self.processed_propers.append({'name': best_result.name, 'date': best_result.date}) continue else: log.debug("Coudn't find a release name in database. Skipping codec comparison for: {name}", { 'name': best_result.name }) # check if we actually want this proper (if it's the right release group and a higher version) if best_result.series.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( 'SELECT release_group, version ' 'FROM tv_episodes WHERE indexer = ? AND showid = ? ' 'AND season = ? AND episode = ?', [best_result.indexer, best_result.series.indexerid, best_result.actual_season, best_result.actual_episodes[0]]) old_version = int(sql_results[0]['version']) old_release_group = (sql_results[0]['release_group']) if -1 < old_version < best_result.version: log.info('Found new anime version {new} to replace existing version {old}: {name}', {'old': old_version, 'new': best_result.version, 'name': best_result.name }) else: log.info('Ignoring proper with the same or lower version: {name}', {'name': best_result.name}) if cur_proper.name not in processed_propers_names: self.processed_propers.append({'name': best_result.name, 'date': best_result.date}) continue if old_release_group != best_result.release_group: log.info('Ignoring proper from release group {new} instead of current group {old}', {'new': best_result.release_group, 'old': old_release_group}) if best_result.name not in processed_propers_names: self.processed_propers.append({'name': best_result.name, 'date': best_result.date}) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if best_result.indexerid != -1 and ( best_result.indexerid, best_result.actual_season, best_result.actual_episodes ) not in list(map(operator.attrgetter('indexerid', 'actual_season', 'actual_episodes'), final_propers)): log.info('Found a desired proper: {name}', {'name': best_result.name}) final_propers.append(best_result) if best_result.name not in processed_propers_names: self.processed_propers.append({'name': best_result.name, 'date': best_result.date}) return final_propers
def test_ex_ret_concat_args_strings(self): self.assertEqual(ex(Exception('lots', 'of', 'strings')), 'lots : of : strings')
def search_for_needed_episodes(scheduler_start_time, force=False): """Search providers for needed episodes. :param force: run the search even if no episodes are needed :param scheduler_start_time: timestamp of the start of the search scheduler :return: list of found episodes """ show_list = app.showList from_date = datetime.date.fromordinal(1) episodes = [] for cur_show in show_list: if cur_show.paused: log.debug( u'Not checking for needed episodes of {0} because the show is paused', cur_show.name, ) continue episodes.extend(wanted_episodes(cur_show, from_date)) if not episodes and not force: # nothing wanted so early out, ie: avoid whatever arbitrarily # complex thing a provider cache update entails, for example, # reading rss feeds return [] providers = enabled_providers(u'daily') if not providers: log.warning( u'No NZB/Torrent providers found or enabled in the application config for daily searches.' u' Please check your settings' ) return [] original_thread_name = threading.currentThread().name log.info(u'Using daily search providers') for cur_provider in providers: threading.currentThread().name = u'{thread} :: [{provider}]'.format( thread=original_thread_name, provider=cur_provider.name ) cur_provider.cache.update_cache(scheduler_start_time) single_results = {} multi_results = [] for cur_provider in providers: threading.currentThread().name = u'{thread} :: [{provider}]'.format( thread=original_thread_name, provider=cur_provider.name ) try: found_results = cur_provider.cache.find_needed_episodes(episodes) except AuthException as error: log.error(u'Authentication error: {0}', ex(error)) continue # pick a single result for each episode, respecting existing results for episode_no, results in iteritems(found_results): if results[0].series.paused: log.debug(u'Skipping {0} because the show is paused.', results[0].series.name) continue # if all results were rejected move on to the next episode wanted_results = filter_results(results) if not wanted_results: log.debug(u'All found results for {0} were rejected.', results[0].series.name) continue best_result = pick_result(wanted_results) # Skip the result if search delay is enabled for the provider. if delay_search(best_result): continue if episode_no in (SEASON_RESULT, MULTI_EP_RESULT): multi_results.append(best_result) else: # if it's already in the list (from another provider) and # the newly found quality is no better then skip it if episode_no in single_results: allowed_qualities, preferred_qualities = results[0].series.current_qualities if not Quality.is_higher_quality(single_results[episode_no].quality, best_result.quality, allowed_qualities, preferred_qualities): continue single_results[episode_no] = best_result threading.currentThread().name = original_thread_name return combine_results(multi_results, list(itervalues(single_results)))
def _sendNMJ(self, host, database, mount=None): """ Send a NMJ update command to the specified machine host: The hostname/IP to send the request to (no port) database: The database to send the request to mount: The mount URL to use (optional) return: True if the request succeeded, False otherwise """ # if a mount URL is provided then attempt to open a handle to that URL if mount: try: req = Request(mount) log.debug(u'Try to mount network drive via url: {0}', mount) handle = urlopen(req) except IOError as error: if hasattr(error, 'reason'): log.warning(u'NMJ: Could not contact Popcorn Hour on host {0}: {1}', host, error.reason) elif hasattr(error, 'code'): log.warning(u'NMJ: Problem with Popcorn Hour on host {0}: {1}', host, error.code) return False except Exception as error: log.error(u'NMJ: Unknown exception: {0}', ex(error)) return False # build up the request URL and parameters UPDATE_URL = 'http://%(host)s:8008/metadata_database?%(params)s' params = { 'arg0': 'scanner_start', 'arg1': database, 'arg2': 'background', 'arg3': '' } params = urlencode(params) updateUrl = UPDATE_URL % {'host': host, 'params': params} # send the request to the server try: req = Request(updateUrl) log.debug(u'Sending NMJ scan update command via url: {0}', updateUrl) handle = urlopen(req) response = handle.read() except IOError as error: if hasattr(error, 'reason'): log.warning(u'NMJ: Could not contact Popcorn Hour on host {0}: {1}', host, error.reason) elif hasattr(error, 'code'): log.warning(u'NMJ: Problem with Popcorn Hour on host {0}: {1}', host, error.code) return False except Exception as error: log.error(u'NMJ: Unknown exception: {0}', ex(error)) return False # try to parse the resulting XML try: et = etree.fromstring(response) result = et.findtext('returnValue') except SyntaxError as error: log.error(u'Unable to parse XML returned from the Popcorn Hour: {0}', error) return False # if the result was a number then consider that an error if int(result) > 0: log.error(u'Popcorn Hour returned an error code: {0!r}', result) return False else: log.info(u'NMJ started background scan') return True
def unrar(self, path, rar_files, force=False): """ Extract RAR files. :param path: Path to look for files in :param rar_files: Names of RAR files :param force: process currently processing items :return: List of unpacked file names """ unpacked_files = [] if app.UNPACK and rar_files: self.log('Packed files detected: {0}'.format(rar_files), logger.DEBUG) for archive in rar_files: self.log('Unpacking archive: {0}'.format(archive), logger.DEBUG) failure = None try: rar_handle = RarFile(os.path.join(path, archive)) # Skip extraction if any file in archive has previously been extracted skip_extraction = False for file_in_archive in [os.path.basename(each.filename) for each in rar_handle.infolist() if not each.isdir]: if not force and self.already_postprocessed(file_in_archive): self.log('Archive file already post-processed, extraction skipped: {0}'.format (file_in_archive), logger.DEBUG) skip_extraction = True break if app.POSTPONE_IF_NO_SUBS and os.path.isfile(os.path.join(path, file_in_archive)): self.log('Archive file already extracted, extraction skipped: {0}'.format (file_in_archive), logger.DEBUG) skip_extraction = True break if not skip_extraction: rar_handle.extract(path=path, withSubpath=False, overwrite=False) for each in rar_handle.infolist(): if not each.isdir: basename = os.path.basename(each.filename) unpacked_files.append(basename) del rar_handle except ArchiveHeaderBroken: failure = ('Archive Header Broken', 'Unpacking failed because the Archive Header is Broken') except IncorrectRARPassword: failure = ('Incorrect RAR Password', 'Unpacking failed because of an Incorrect Rar Password') except FileOpenError: failure = ('File Open Error, check the parent folder and destination file permissions.', 'Unpacking failed with a File Open Error (file permissions?)') except InvalidRARArchiveUsage: failure = ('Invalid Rar Archive Usage', 'Unpacking Failed with Invalid Rar Archive Usage') except InvalidRARArchive: failure = ('Invalid Rar Archive', 'Unpacking Failed with an Invalid Rar Archive Error') except Exception as error: failure = (ex(error), 'Unpacking failed for an unknown reason') if failure is not None: self.log('Failed unpacking archive {0}: {1}'.format(archive, failure[0]), logger.WARNING) self.missedfiles.append('{0}: Unpacking failed: {1}'.format(archive, failure[1])) self.result = False continue self.log('Extracted content: {0}'.format(unpacked_files), logger.DEBUG) return unpacked_files
def test_ex_ret_stringed_args(self): self.assertEqual(ex(Exception(303)), 'error 303')