def _enforce_telegram_plugin_ver(): if telegram is None: raise plugin.PluginWarning('missing python-telegram-bot pkg') elif not hasattr(telegram, str('__version__')): raise plugin.PluginWarning( 'invalid or old python-telegram-bot pkg') elif LooseVersion( telegram.__version__) < native_str(_MIN_TELEGRAM_VER): raise plugin.PluginWarning('old python-telegram-bot ({0})'.format( telegram.__version__))
def on_task_output(self, task, config): config = self.prepare_config(config) if config is None: return for entry in task.accepted: if 'location' not in entry: self.log.verbose( 'Cannot handle %s because it does not have the field location.', entry['title']) continue src = entry['location'] src_isdir = os.path.isdir(src) try: # check location if not os.path.exists(src): raise plugin.PluginWarning( 'location `%s` does not exists (anymore).' % src) if src_isdir: if not config.get('allow_dir'): raise plugin.PluginWarning( 'location `%s` is a directory.' % src) elif not os.path.isfile(src): raise plugin.PluginWarning('location `%s` is not a file.' % src) # search for namesakes siblings = {} # dict of (path=ext) pairs if not src_isdir and 'along' in config: parent = os.path.dirname(src) filename_no_ext, filename_ext = os.path.splitext( os.path.basename(src)) for ext in config['along']['extensions']: siblings.update( get_siblings(ext, filename_no_ext, filename_ext, parent)) files = os.listdir(parent) files_lower = list(map(str.lower, files)) for subdir in config['along'].get('subdirs', []): try: idx = files_lower.index(subdir) except ValueError: continue subdir_path = os.path.join(parent, files[idx]) if not os.path.isdir(subdir_path): continue for ext in config['along']['extensions']: siblings.update( get_siblings(ext, filename_no_ext, filename_ext, subdir_path)) # execute action in subclasses self.handle_entry(task, config, entry, siblings) except OSError as err: entry.fail(str(err)) continue
def on_task_output(self, task, config): if config is True: config = {} elif config is False: return for entry in task.accepted: if 'location' not in entry: self.log.verbose( 'Cannot handle %s because it does not have the field location.' % entry['title']) continue src = entry['location'] src_isdir = os.path.isdir(src) try: # check location if not os.path.exists(src): raise plugin.PluginWarning( 'location `%s` does not exists (anymore).' % src) if src_isdir: if not config.get('allow_dir'): raise plugin.PluginWarning( 'location `%s` is a directory.' % src) elif not os.path.isfile(src): raise plugin.PluginWarning('location `%s` is not a file.' % src) # search for namesakes siblings = {} # dict of (path=ext) pairs if not src_isdir and 'along' in config: parent = os.path.dirname(src) filename_no_ext = os.path.splitext( os.path.basename(src))[0] subdirs = [parent] + config['along'].get('subdirs', []) for subdir in subdirs: if subdir == parent: abs_subdirs = [subdir] else: # use glob to get a list of matching dirs abs_subdirs = glob.glob( os.path.join(parent, os.path.normpath(subdir))) # iterate over every dir returned by glob looking for matching ext for abs_subdir in abs_subdirs: if os.path.isdir(abs_subdir): for ext in config['along']['files']: siblings.update( get_siblings(ext, src, filename_no_ext, abs_subdir)) # execute action in subclasses self.handle_entry(task, config, entry, siblings) except OSError as err: entry.fail(str(err)) continue
def search(self, task, entry, config): # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand optionlist = [ 'misc', 'movies', 'audio', 'tv', 'games', 'apps', 'pics', 'anime', 'comics', 'books', 'music video', 'unclassified', 'all' ] entries = set() search_strings = [ normalize_unicode(s) for s in entry.get('search_strings', [entry['title']]) ] for search_string in search_strings: url = 'http://isohunt.com/js/rss/%s?iht=%s&noSL' % (urllib.quote( search_string.encode('utf-8')), optionlist.index(config)) log.debug('requesting: %s' % url) rss = feedparser.parse(url) status = rss.get('status', False) if status != 200: raise plugin.PluginWarning( 'Search result not 200 (OK), received %s' % status) ex = rss.get('bozo_exception', False) if ex: raise plugin.PluginWarning('Got bozo_exception (bad feed)') for item in rss.entries: entry = Entry() entry['title'] = item.title entry['url'] = item.link m = re.search(r'Size: ([\d]+).*Seeds: (\d+).*Leechers: (\d+)', item.description, re.IGNORECASE) if not m: log.debug('regexp did not find seeds / peer data') continue else: log.debug( 'regexp found size(%s), Seeds(%s) and Leeches(%s)' % (m.group(1), m.group(2), m.group(3))) entry['content_size'] = int(m.group(1)) entry['torrent_seeds'] = int(m.group(2)) entry['torrent_leeches'] = int(m.group(3)) entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) entries.add(entry) return entries
def search(self, entry, config=None): config = self.process_config(config) feed = REPUTATIONS[config['reputation']] entries = set() for search_string in entry.get('search_string', [entry['title']]): query = normalize_unicode(search_string + config.get('extra_terms', '')) for domain in ['eu', 'me']: # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = 'http://torrentz.%s/%s?q=%s' % ( domain, feed, urllib.quote(query.encode('utf-8'))) log.debug('requesting: %s' % url) try: r = requests.get(url) break except requests.RequestException as err: log.warning('torrentz.%s failed. Error: %s' % (domain, err)) else: raise plugin.PluginWarning( 'Error getting torrentz search results') rss = feedparser.parse(r.content) ex = rss.get('bozo_exception', False) if ex: raise plugin.PluginWarning('Got bozo_exception (bad feed)') for item in rss.entries: m = re.search( r'Size: ([\d]+) Mb Seeds: ([,\d]+) Peers: ([,\d]+) Hash: ([a-f0-9]+)', item.description, re.IGNORECASE) if not m: log.debug('regexp did not find seeds / peer data') continue entry = Entry() entry['title'] = item.title entry['url'] = item.link entry['content_size'] = int(m.group(1)) entry['torrent_seeds'] = int(m.group(2).replace(',', '')) entry['torrent_leeches'] = int(m.group(3).replace(',', '')) entry['torrent_info_hash'] = m.group(4).upper() entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) entries.add(entry) log.debug('Search got %d results' % len(entries)) return entries
def search(self, entry, config=None): config = self.process_config(config) feed = REPUTATIONS[config['reputation']] entries = set() for search_string in entry.get('search_string', [entry['title']]): query = normalize_unicode(search_string + config.get('extra_terms', '')) # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = 'http://torrentz.eu/%s?q=%s' % ( feed, urllib.quote(query.encode('utf-8'))) log.debug('requesting: %s' % url) try: opened = urllib2.urlopen(url) except urllib2.URLError as err: raise plugin.PluginWarning('Error requesting URL: %s' % err) rss = feedparser.parse(opened) status = rss.get('status', False) if status != 200: raise plugin.PluginWarning( 'Search result not 200 (OK), received %s %s' % (status, opened.msg)) ex = rss.get('bozo_exception', False) if ex: raise plugin.PluginWarning('Got bozo_exception (bad feed)') for item in rss.entries: m = re.search( r'Size: ([\d]+) Mb Seeds: ([,\d]+) Peers: ([,\d]+) Hash: ([a-f0-9]+)', item.description, re.IGNORECASE) if not m: log.debug('regexp did not find seeds / peer data') continue entry = Entry() entry['title'] = item.title entry['url'] = item.link entry['content_size'] = int(m.group(1)) entry['torrent_seeds'] = int(m.group(2).replace(',', '')) entry['torrent_leeches'] = int(m.group(3).replace(',', '')) entry['torrent_info_hash'] = m.group(4).upper() entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) entries.add(entry) log.debug('Search got %d results' % len(entries)) return entries
def on_task_filter(self, task, config): if not task.accepted: log.debug('No accepted entries, not scanning for existing.') return log.verbose('Scanning path(s) for existing files.') config = self.prepare_config(config) for path in config: # unicode path causes crashes on some paths path = str(os.path.expanduser(path)) if not os.path.exists(path): raise plugin.PluginWarning('Path %s does not exist' % path, log) # scan through for root, dirs, files in os.walk(path, followlinks=True): # convert filelists into utf-8 to avoid unicode problems dirs = [x.decode('utf-8', 'ignore') for x in dirs] files = [x.decode('utf-8', 'ignore') for x in files] # windows file system is not case sensitive if platform.system() == 'Windows': dirs = [s.lower() for s in dirs] files = [s.lower() for s in files] for entry in task.accepted: # priority is: filename, location (filename only), title name = check = os.path.split(entry.get('filename', entry.get('location', entry['title'])))[1] if platform.system() == 'Windows': check = check.lower() if check in dirs or check in files: log.debug('Found %s in %s' % (name, root)) entry.reject(os.path.join(root, name))
def on_task_output(self, task, config): """Mark all accepted episodes as acquired on MyEpisodes""" if not task.accepted: # Nothing accepted, don't do anything return username = config['username'] password = config['password'] cookiejar = http.cookiejar.CookieJar() opener = request.build_opener(request.HTTPCookieProcessor(cookiejar)) baseurl = request.Request('http://www.myepisodes.com/login.php?') loginparams = parse.urlencode({'username': username, 'password': password, 'action': 'Login'}) try: logincon = opener.open(baseurl, loginparams) loginsrc = logincon.read() except URLError as e: log.error('Error logging in to myepisodes: %s' % e) return if str(username) not in loginsrc: raise plugin.PluginWarning(('Login to myepisodes.com failed, please check ' 'your account data or see if the site is down.'), log) for entry in task.accepted: try: self.mark_episode(task, entry, opener) except plugin.PluginWarning as w: log.warning(str(w))
def on_task_exit(self, task, config): """Send telegram message(s) at exit""" if telegram is None: raise plugin.PluginWarning('missing telegram python pkg') accepted_tasks = list(task.accepted) if not accepted_tasks: return token, usernames, fullnames, groups = self._parse_config(config) self.log.debug( 'token={0} usernames={1} fullnames={2} groups={3}'.format( token, usernames, fullnames, groups)) bot = telegram.Bot(token) session = task.session chat_ids = self._get_chat_ids_n_update_db(bot, session, usernames, fullnames, groups) if not chat_ids: return for entry in accepted_tasks: title = entry.get('title') for chat_id in (x.id for x in chat_ids): bot.sendMessage(chat_id=chat_id, text=str(title))
def on_task_exit(self, task, config): exts = ['.it.srt', '.ita.srt'] if isinstance(config, list): exts = [('.' + s).replace('..', '.') for s in config] elif isinstance(config, bool) and not config: return for entry in task.accepted: if not ('location' in entry and os.path.exists(entry['location'])): continue fn = os.path.splitext(entry['location'])[0] for ext in exts: sub = fn + ext if not os.path.exists(sub): continue try: with open(sub, 'r') as f: txt = f.read() enc = chardet.detect(txt)['encoding'] log.debug('encoding is %s for file %s' % (enc, sub)) if enc == 'utf-8' and '\xc3' in txt: log.verbose('this file contains wrong characters!') txt = txt.replace('ŕ', 'à').replace('č', 'è').replace('ě', 'ì').replace('ň', 'ò').replace('ů', 'ù').replace('Č', 'È') bak = sub + '.bak' if os.path.exists(bak): raise plugin.PluginWarning('backup already exists') shutil.copy(sub, bak) with open(sub, 'w') as f: f.write(txt) log.info('Subtitles file fixed: ' + sub) except Exception as err: log.error('Error on file %s: %s' % (sub, err))
def _login(self, config): """Authenicate with the myepisodes service and return a requests session Return: requests session Raises: PluginWarning if login fails PluginError if http communication fails """ url = "https://www.myepisodes.com/login.php" session = requests.Session() payload = { 'username': config['username'], 'password': config['password'], 'action': 'Login', } try: response = session.post(url, data=payload) if 'login' in response.url: raise plugin.PluginWarning( ( 'Login to myepisodes.com failed, please see if the site is down and verify ' 'your credentials.' ), log, ) except requests.RequestException as e: raise plugin.PluginError('Error logging in to myepisodes: %s' % e) return session
def on_task_filter(self, task, config): raise plugin.PluginWarning( 'This plugin no longer works with the imdb, replacement will be implemented soon' ) if isinstance(config, basestring): config = {'url': task.config['imdb_rated']} self.update_rated(task, config) for entry in task.entries: # if no imdb_url perform lookup if not 'imdb_url' in entry: try: get_plugin_by_name('imdb_lookup').instance.lookup(entry) except PluginError: pass # ignore imdb lookup failures # ignore entries without imdb_url if not 'imdb_url' in entry: continue is_rated = task.session.query(ImdbRated).\ filter(ImdbRated.url == config['url']).\ filter(ImdbRated.imdb_url == entry['imdb_url']).first() is not None if config.get('reverse', False): # reversed, reject unrated if not is_rated: entry.reject('imdb rated reverse') else: # normal mode, reject rated if is_rated: entry.reject('imdb rated')
def on_task_filter(self, task, config): if not task.accepted: log.debug('No accepted entries, not scanning for existing.') return log.verbose('Scanning path(s) for existing files.') config = self.prepare_config(config) filenames = {} for folder in config: folder = Path(folder).expanduser() if not folder.exists(): raise plugin.PluginWarning('Path %s does not exist' % folder, log) for p in folder.walk(errors='ignore'): key = p.name # windows file system is not case sensitive if platform.system() == 'Windows': key = key.lower() filenames[key] = p for entry in task.accepted: # priority is: filename, location (filename only), title name = Path(entry.get('filename', entry.get('location', entry['title']))).name if platform.system() == 'Windows': name = name.lower() if name in filenames: log.debug('Found %s in %s' % (name, filenames[name])) entry.reject('exists in %s' % filenames[name])
def search(self, entry, config): search_strings = [ normalize_unicode(s).lower() for s in entry.get('search_strings', [entry['title']]) ] entries = set() for search_string in search_strings: if config.get('verified'): search_string += ' verified:1' url = 'http://kickass.to/search/%s/?rss=1' % urllib.quote( search_string.encode('utf-8')) if config.get('category', 'all') != 'all': url += '&category=%s' % config['category'] log.debug('requesting: %s' % url) rss = feedparser.parse(url) status = rss.get('status', False) if status != 200: raise plugin.PluginWarning( 'Search result not 200 (OK), received %s' % status) ex = rss.get('bozo_exception', False) if ex: raise plugin.PluginWarning('Got bozo_exception (bad feed)') for item in rss.entries: entry = Entry() entry['title'] = item.title if not item.get('enclosures'): log.warning( 'Could not get url for entry from KAT. Maybe plugin needs updated?' ) continue entry['url'] = item.enclosures[0]['url'] entry['torrent_seeds'] = int(item.torrent_seeds) entry['torrent_leeches'] = int(item.torrent_peers) entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) entry['content_size'] = int( item.torrent_contentlength) / 1024 / 1024 entry['torrent_info_hash'] = item.torrent_infohash entries.add(entry) return entries
def _check_token(self): try: self._bot.getMe() except UnicodeDecodeError as e: self.log.trace('bot.getMe() raised: {!r}'.format(e)) raise plugin.PluginWarning('invalid bot token') except (NetworkError, TelegramError) as e: self.log.error('Could not connect Telegram servers at this time, please try again later: %s', e.message)
def on_task_input(self, task, config): if not task.requests.cookies: username = config['username'] password = config['password'] log.debug("Logging in to %s ..." % URL) params = { 'username': username, 'password': password, 'action': 'Login' } loginsrc = task.requests.post(URL + 'login.php?action=login', data=params).content if str(username) not in loginsrc: raise plugin.PluginWarning( ('Login to myepisodes.com failed, please check ' 'your account data or see if the site is down.'), log) page = task.requests.get(URL + "myshows/manage/").content try: soup = get_soup(page) except Exception as e: raise plugin.PluginError( "Unable to parse myepisodes.com page: %s" % (e, )) entries = [] def show_list(select_id): return soup.find('select', {'id': select_id}).findAll('option') options = show_list('shows') if config['include_ignored']: options = chain(options, show_list('ignored_shows')) for option in options: name = option.text if config.get('strip_dates'): # Remove year from end of name if present name = re.sub(r'\s+\(\d{4}\)$', '', name) showid = option.get('value') url = '%sviews.php?type=epsbyshow&showid=%s' % (URL, showid) entry = Entry() entry['title'] = name entry['url'] = url entry['series_name'] = name entry['myepisodes_id'] = showid if entry.isvalid(): entries.append(entry) else: log.debug('Invalid entry created? %s' % entry) if not entries: log.warning( "No shows found on myepisodes.com list. Maybe you need to add some first?" ) return entries
def _handle_path(self, entries, ftp, baseurl, path='', mlst_supported=False, files_only=False, recursive=False, encoding=None): try: dirs = ftp.nlst(path) except ftplib.error_perm as e: raise plugin.PluginWarning(str(e)) if not dirs: log.verbose('Directory %s is empty', path) if len(dirs) == 1 and path == dirs[0]: # It's probably a file return False for p in dirs: if encoding: p = p.decode(encoding) mlst = {} if mlst_supported: mlst_output = ftp.sendcmd('MLST ' + path + '/' + p) clean_mlst_output = [ line.strip().lower() for line in mlst_output.splitlines() ][1] mlst = self.parse_mlst(clean_mlst_output) if recursive and (not mlst_supported or mlst.get('type') == 'dir'): is_directory = self._handle_path(entries, ftp, baseurl, path + '/' + p, mlst_supported, files_only, recursive, encoding) if not is_directory and not mlst_supported: mlst['type'] = 'file' if not files_only or mlst.get('type') == 'file': url = baseurl + p title = os.path.basename(p) log.info('[%s] "%s"' % ( mlst.get('type') or "unknown", path + '/' + p, )) entry = Entry(title, url) if not 'size' in mlst: entry['content-size'] = ftp.size(path + '/' + p) / (1024 * 1024) else: entry['content-size'] = float( mlst.get('size')) / (1024 * 1024) entries.append(entry) return True
def on_task_output(self, task, config): if config is True: config = {} elif config is False: return sexts = [] if 'along' in config: sexts = [('.' + s).replace('..', '.').lower() for s in config['along']] for entry in task.accepted: if not 'location' in entry: self.log.verbose( 'Cannot handle %s because it does not have the field location.' % entry['title']) continue src = entry['location'] src_isdir = os.path.isdir(src) try: # check location if not os.path.exists(src): raise plugin.PluginWarning( 'location `%s` does not exists (anymore).' % src) if src_isdir: if not config.get('allow_dir'): raise plugin.PluginWarning( 'location `%s` is a directory.' % src) elif not os.path.isfile(src): raise plugin.PluginWarning('location `%s` is not a file.' % src) # search for namesakes siblings = [] if not src_isdir and 'along' in config: src_file, src_ext = os.path.splitext(src) for ext in sexts: if ext != src_ext.lower() and os.path.exists(src_file + ext): siblings.append(src_file + ext) # execute action in subclasses self.handle_entry(task, config, entry, siblings) except Exception as err: entry.fail(str(err)) continue
def _validate_entry(self, entry): """ Checks an entry for all of the fields needed to comunicate with myepidoes Return: boolean """ if 'series_season' not in entry \ or 'series_episode' not in entry \ or 'series_name' not in entry: raise plugin.PluginWarning( 'Can\'t mark entry `%s` in myepisodes without series_season, series_episode and series_name ' 'fields' % entry['title'], log)
def search(self, entry, config): if not isinstance(config, dict): config = {'category': config} config.setdefault('category', 'anime') config.setdefault('filter', 'all') entries = set() for search_string in entry.get('search_strings', [entry['title']]): name = normalize_unicode(search_string) url = 'http://www.nyaa.eu/?page=rss&cats=%s&filter=%s&term=%s' % ( CATEGORIES[config['category']], FILTERS.index( config['filter']), urllib.quote(name.encode('utf-8'))) log.debug('requesting: %s' % url) rss = feedparser.parse(url) status = rss.get('status', False) if status != 200: raise plugin.PluginWarning( 'Search result not 200 (OK), received %s' % status) ex = rss.get('bozo_exception', False) if ex: raise plugin.PluginWarning('Got bozo_exception (bad feed)') for item in rss.entries: entry = Entry() entry['title'] = item.title entry['url'] = item.link # TODO: parse some shit #entry['torrent_seeds'] = int(item.seeds) #entry['torrent_leeches'] = int(item.leechs) #entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches']) #entry['content_size'] = int(item.size) / 1024 / 1024 entries.add(entry) return entries
def on_task_download(self, task, config): config = self.prepare_config(config, task) for entry in task.accepted: ftp_url = urlparse(entry.get('url')) ftp_url = ftp_url._replace(path=unquote(ftp_url.path)) current_path = os.path.dirname(ftp_url.path) try: ftp = self.ftp_connect(config, ftp_url, current_path) except ftplib.all_errors as e: entry.fail("Unable to connect to server : %s" % (e)) break to_path = config['ftp_tmp_path'] try: to_path = entry.render(to_path) except RenderError as err: raise plugin.PluginError( "Path value replacement `%s` failed: %s" % (to_path, err.args[0])) # Clean invalid characters with pathscrub plugin to_path = pathscrub(to_path) if not os.path.exists(to_path): log.debug("Creating base path: %s" % to_path) os.makedirs(to_path) if not os.path.isdir(to_path): raise plugin.PluginWarning( "Destination `%s` is not a directory." % to_path) file_name = os.path.basename(ftp_url.path) try: # Directory ftp = self.check_connection(ftp, config, ftp_url, current_path) ftp.cwd(file_name) self.ftp_walk(ftp, os.path.join(to_path, file_name), config, ftp_url, ftp_url.path) ftp = self.check_connection(ftp, config, ftp_url, current_path) ftp.cwd('..') if config['delete_origin']: ftp.rmd(file_name) except ftplib.error_perm: # File self.ftp_down(ftp, file_name, to_path, config, ftp_url, current_path) ftp.close()
def on_task_input(self, task, config): config = self.prepare_config(config) connection_config = config['config'] if connection_config['use-ssl']: ftp = ftplib.FTP_TLS() else: ftp = ftplib.FTP() # ftp.set_debuglevel(2) log.debug('Trying connecting to: %s', (connection_config['host'])) try: ftp.connect(connection_config['host'], connection_config['port']) ftp.login(connection_config['username'], connection_config['password']) except ftplib.all_errors as e: raise plugin.PluginError(e) log.debug('Connected.') ftp.sendcmd('TYPE I') ftp.set_pasv(True) entries = [] for path in config['dirs']: baseurl = "ftp://%s:%s@%s:%s/" % ( connection_config['username'], connection_config['password'], connection_config['host'], connection_config['port']) try: dirs = ftp.nlst(path) except ftplib.error_perm as e: raise plugin.PluginWarning(str(e)) if not dirs: log.verbose('Directory %s is empty', path) for p in dirs: url = baseurl + p title = os.path.basename(p) log.info('Accepting entry %s ' % title) entries.append(Entry(title, url)) return entries
def on_task_filter(self, task, config): if not task.accepted: log.debug('No accepted entries, not scanning for existing.') return log.verbose('Scanning path(s) for existing files.') config = self.prepare_config(config) for path in config: # unicode path causes crashes on some paths path = str(os.path.expanduser(path)) if not os.path.exists(path): raise plugin.PluginWarning('Path %s does not exist' % path, log) # scan through for root, dirs, files in os.walk(path, followlinks=True): # convert filelists into utf-8 to avoid unicode problems dirs = [x.decode('utf-8', 'ignore') for x in dirs] files = [x.decode('utf-8', 'ignore') for x in files] for entry in task.accepted: name = entry['title'] if name in dirs or name in files: log.debug('Found %s in %s' % (name, root)) entry.reject(os.path.join(root, name))
def _lookup_myepisodes_id(self, entry): """ Attempts to find the myepisodes id for the series Return: myepisode id or None """ # Do we already have the id? myepisodes_id = entry.get('myepisodes_id') if myepisodes_id: return myepisodes_id # have we previously recorded the id for this series? myepisodes_id = self._retrieve_id_from_database(entry) if myepisodes_id: return myepisodes_id # We don't know the id for this series, so it's time to search myepisodes.com for it myepisodes_id = self._retrieve_id_from_website(entry) if myepisodes_id: return myepisodes_id raise plugin.PluginWarning('Unable to determine the myepisodes id for: `%s`' % entry['title'], log)
def output(self, task, entry, config): """Moves temp-file into final destination Raises: PluginError if operation fails """ if 'file' not in entry and not task.options.test: log.debug('file missing, entry: %s', entry) raise plugin.PluginError( 'Entry `%s` has no temp file associated with' % entry['title']) try: # use path from entry if has one, otherwise use from download definition parameter path = entry.get('path', config.get('path')) if not isinstance(path, str): raise plugin.PluginError('Invalid `path` in entry `%s`' % entry['title']) # override path from command line parameter if task.options.dl_path: path = task.options.dl_path # expand variables in path try: path = os.path.expanduser(entry.render(path)) except RenderError as e: entry.fail( 'Could not set path. Error during string replacement: %s' % e) return # Clean illegal characters from path name path = pathscrub(path) # If we are in test mode, report and return if task.options.test: log.info('Would write `%s` to `%s`', entry['title'], path) # Set a fake location, so the exec plugin can do string replacement during --test #1015 entry['location'] = os.path.join(path, 'TEST_MODE_NO_OUTPUT') return # make path if not os.path.isdir(path): log.debug('Creating directory %s', path) try: os.makedirs(path) except: raise plugin.PluginError('Cannot create path %s' % path, log) # check that temp file is present if not os.path.exists(entry['file']): log.debug('entry: %s', entry) raise plugin.PluginWarning( 'Downloaded temp file `%s` doesn\'t exist!?' % entry['file']) if config.get('filename'): try: entry['filename'] = entry.render(config['filename']) log.debug('set filename from config %s' % entry['filename']) except RenderError as e: entry.fail( 'Could not set filename. Error during string replacement: %s' % e) return # if we still don't have a filename, try making one from title (last resort) elif not entry.get('filename'): entry['filename'] = entry['title'] log.debug('set filename from title %s', entry['filename']) if 'mime-type' not in entry: log.warning( 'Unable to figure proper filename for %s. Using title.', entry['title']) else: guess = mimetypes.guess_extension(entry['mime-type']) if not guess: log.warning( 'Unable to guess extension with mime-type %s', guess) else: self.filename_ext_from_mime(entry) name = entry.get('filename', entry['title']) # Remove illegal characters from filename #325, #353 name = pathscrub(name) # Remove directory separators from filename #208 name = name.replace('/', ' ') if sys.platform.startswith('win'): name = name.replace('\\', ' ') # remove duplicate spaces name = ' '.join(name.split()) # combine to full path + filename destfile = os.path.join(path, name) log.debug('destfile: %s', destfile) if os.path.exists(destfile): import filecmp if filecmp.cmp(entry['file'], destfile): log.debug("Identical destination file '%s' already exists", destfile) elif config.get('overwrite'): log.debug("Overwriting already existing file %s", destfile) else: log.info( 'File `%s` already exists and is not identical, download failed.', destfile) entry.fail( 'File `%s` already exists and is not identical.' % destfile) return else: # move temp file log.debug('moving %s to %s', entry['file'], destfile) try: shutil.move(entry['file'], destfile) except (IOError, OSError) as err: # ignore permission errors, see ticket #555 import errno if not os.path.exists(destfile): raise plugin.PluginError('Unable to write %s: %s' % (destfile, err)) if err.errno != errno.EPERM and err.errno != errno.EACCES: raise # store final destination as output key entry['location'] = destfile finally: self.cleanup_temp_file(entry)
def search(self, entry, config): search_strings = [ normalize_unicode(s).lower() for s in entry.get('search_strings', [entry['title']]) ] entries = set() for search_string in search_strings: search_string_url_fragment = search_string if config.get('verified'): search_string_url_fragment += ' verified:1' url = 'http://kickass.to/search/%s/?rss=1' % urllib.quote( search_string_url_fragment.encode('utf-8')) if config.get('category', 'all') != 'all': url += '&category=%s' % config['category'] sorters = [{ 'field': 'time_add', 'sorder': 'desc' }, { 'field': 'seeders', 'sorder': 'desc' }] for sort in sorters: url += '&field=%(field)s&sorder=%(sorder)s' % sort log.debug('requesting: %s' % url) rss = feedparser.parse(url) status = rss.get('status', False) if status == 404: # Kat returns status code 404 when no results found for some reason... log.debug('No results found for search query: %s' % search_string) continue elif status != 200: raise plugin.PluginWarning( 'Search result not 200 (OK), received %s' % status) ex = rss.get('bozo_exception', False) if ex: raise plugin.PluginWarning('Got bozo_exception (bad feed)') for item in rss.entries: entry = Entry() entry['title'] = item.title if not item.get('enclosures'): log.warning( 'Could not get url for entry from KAT. Maybe plugin needs updated?' ) continue entry['url'] = item.enclosures[0]['url'] entry['torrent_seeds'] = int(item.torrent_seeds) entry['torrent_leeches'] = int(item.torrent_peers) entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) entry['content_size'] = int( item.torrent_contentlength) / 1024 / 1024 entry['torrent_info_hash'] = item.torrent_infohash entries.add(entry) if len(rss.entries) < 25: break return entries
def handle_entry(self, task, config, entry, siblings): src = entry['location'] src_isdir = os.path.isdir(src) src_path, src_name = os.path.split(src) # get the proper path and name in order of: entry, config, above split dst_path = entry.get(self.destination_field, config.get('to', src_path)) if config.get('rename'): dst_name = config['rename'] elif entry.get('filename') and entry['filename'] != src_name: # entry specifies different filename than what was split from the path # since some inputs fill in filename it must be different in order to be used dst_name = entry['filename'] else: dst_name = src_name try: dst_path = entry.render(dst_path) except RenderError as err: raise plugin.PluginError('Path value replacement `%s` failed: %s' % (dst_path, err.args[0])) try: dst_name = entry.render(dst_name) except RenderError as err: raise plugin.PluginError( 'Filename value replacement `%s` failed: %s' % (dst_name, err.args[0])) # Clean invalid characters with pathscrub plugin dst_path = pathscrub(os.path.expanduser(dst_path)) dst_name = pathscrub(dst_name, filename=True) # Join path and filename dst = os.path.join(dst_path, dst_name) if dst == entry['location']: raise plugin.PluginWarning('source and destination are the same.') if not os.path.exists(dst_path): if task.options.test: self.log.info('Would create `%s`', dst_path) else: self.log.info('Creating destination directory `%s`', dst_path) os.makedirs(dst_path) if not os.path.isdir(dst_path) and not task.options.test: raise plugin.PluginWarning('destination `%s` is not a directory.' % dst_path) # unpack_safety if config.get('unpack_safety', entry.get('unpack_safety', True)): count = 0 while True: if count > 60 * 30: raise plugin.PluginWarning( 'The task has been waiting unpacking for 30 minutes') size = os.path.getsize(src) time.sleep(1) new_size = os.path.getsize(src) if size != new_size: if not count % 10: self.log.verbose( 'File `%s` is possibly being unpacked, waiting ...', src_name) else: break count += 1 src_file, src_ext = os.path.splitext(src) dst_file, dst_ext = os.path.splitext(dst) # Check dst contains src_ext if config.get('keep_extension', entry.get('keep_extension', True)): if not src_isdir and dst_ext != src_ext: self.log.verbose('Adding extension `%s` to dst `%s`', src_ext, dst) dst += src_ext dst_file += dst_ext # this is used for sibling files. dst_ext turns out not to be an extension! funct_name = 'move' if self.move else 'copy' funct_done = 'moved' if self.move else 'copied' if task.options.test: self.log.info('Would %s `%s` to `%s`', funct_name, src, dst) for s, ext in siblings.items(): # we cannot rely on splitext for extensions here (subtitles may have the language code) d = dst_file + ext self.log.info('Would also %s `%s` to `%s`', funct_name, s, d) else: # IO errors will have the entry mark failed in the base class if self.move: shutil.move(src, dst) elif src_isdir: shutil.copytree(src, dst) else: shutil.copy(src, dst) self.log.info('`%s` has been %s to `%s`', src, funct_done, dst) # further errors will not have any effect (the entry has been successfully moved or copied out) for s, ext in siblings.items(): # we cannot rely on splitext for extensions here (subtitles may have the language code) d = dst_file + ext try: if self.move: shutil.move(s, d) else: shutil.copy(s, d) self.log.info('`%s` has been %s to `%s` as well.', s, funct_done, d) except Exception as err: self.log.warning(str(err)) entry['old_location'] = entry['location'] entry['location'] = dst if self.move and not src_isdir: self.clean_source(task, config, entry)
def on_task_output(self, task, config): if not config: return config = self.prepare_config(config) existing = config['existing'] for entry in task.accepted: if 'location' not in entry: entry.fail('Does not have location field for symlinking') continue linkfrom = entry['location'] linkfrom_path, linkfrom_name = os.path.split(linkfrom) # get the proper path and name in order of: entry, config, above split linkto_path = entry.get('link_to', config.get('to', linkfrom_path)) if config.get('rename'): linkto_name = config['rename'] elif entry.get('filename') and entry['filename'] != linkfrom_name: # entry specifies different filename than what was split from the path # since some inputs fill in filename it must be different in order to be used linkto_name = entry['filename'] else: linkto_name = linkfrom_name try: linkto_path = entry.render(linkto_path) except RenderError as err: raise plugin.PluginError( 'Path value replacement `%s` failed: %s' % (linkto_path, err.args[0])) try: linkto_name = entry.render(linkto_name) except RenderError as err: raise plugin.PluginError( 'Filename value replacement `%s` failed: %s' % (linkto_name, err.args[0])) # Clean invalid characters with pathscrub plugin linkto_path = pathscrub(os.path.expanduser(linkto_path)) linkto_name = pathscrub(linkto_name, filename=True) # Join path and filename linkto = os.path.join(linkto_path, linkto_name) if linkto == entry['location']: raise plugin.PluginWarning( 'source and destination are the same.') # Hardlinks for dirs will not be failed here if os.path.exists(linkto) and (config['link_type'] == 'soft' or os.path.isfile(linkfrom)): msg = 'Symlink destination %s already exists' % linkto if existing == 'ignore': logger.verbose(msg) else: entry.fail(msg) continue logger.verbose('{}link `{}` to `{}`', config['link_type'], linkfrom, linkto) try: if config['link_type'] == 'soft': os.symlink(linkfrom, linkto) else: if os.path.isdir(linkfrom): self.hard_link_dir(linkfrom, linkto, existing) else: dirname = os.path.dirname(linkto) if not os.path.exists(dirname): os.makedirs(dirname) os.link(linkfrom, linkto) except OSError as e: entry.fail('Failed to create %slink, %s' % (config['link_type'], e))
def sqlite2cookie(self, filename): from io import StringIO try: from pysqlite2 import dbapi2 as sqlite except ImportError: try: from sqlite3 import dbapi2 as sqlite # try the 2.5+ stdlib except ImportError: raise plugin.PluginWarning( 'Unable to use sqlite3 or pysqlite2', log) log.debug('connecting: %s' % filename) try: con = sqlite.connect(filename) except: raise plugin.PluginError('Unable to open cookies sqlite database') cur = con.cursor() try: cur.execute( 'select host, path, isSecure, expiry, name, value from moz_cookies' ) except sqlite.Error: raise plugin.PluginError( '%s does not appear to be a valid Firefox 3 cookies file' % filename, log) ftstr = ['FALSE', 'TRUE'] s = StringIO() s.write("""\ # Netscape HTTP Cookie File # http://www.netscape.com/newsref/std/cookie_spec.html # This is a generated file! Do not edit. """) count = 0 failed = 0 log.debug('fetching all cookies') def notabs(val): if isinstance(val, basestring): return val.replace('\t', '') return val while True: try: item = next(cur) # remove \t from item (#582) item = [notabs(field) for field in item] try: s.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % (item[0], ftstr[item[0].startswith('.')], item[1], ftstr[item[2]], item[3], item[4], item[5])) log.trace('Adding cookie for %s. key: %s value: %s' % (item[0], item[4], item[5])) count += 1 except IOError: def to_hex(x): return ''.join([hex(ord(c))[2:].zfill(2) for c in x]) i = 0 for val in item: if isinstance(val, basestring): log.debug('item[%s]: %s' % (i, to_hex(val))) else: log.debug('item[%s]: %s' % (i, val)) i += 1 failed += 1 except UnicodeDecodeError: # for some god awful reason the sqlite module can throw UnicodeDecodeError ... log.debug('got UnicodeDecodeError from sqlite, ignored') failed += 1 except StopIteration: break log.debug('Added %s cookies to jar. %s failed (non-ascii)' % (count, failed)) s.seek(0) con.close() cookie_jar = http.cookiejar.MozillaCookieJar() cookie_jar._really_load(s, '', True, True) return cookie_jar
def on_task_exit(self, task, config): """Store finished / downloaded entries at exit""" if not rss2gen: raise plugin.PluginWarning( 'plugin make_rss requires PyRSS2Gen library.') config = self.prepare_config(config) # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely useful) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter( RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() try: rss.title = entry.render(config['title']) except RenderError as e: log.error( 'Error rendering jinja title for `%s` falling back to entry title: %s' % (entry['title'], e)) rss.title = entry['title'] for field in config['link']: if field in entry: rss.link = entry[field] break try: template = get_template(config['template'], 'rss') except ValueError as e: raise plugin.PluginError('Invalid template specified: %s' % e) try: rss.description = render_from_entry(template, entry) except RenderError as e: log.error( 'Error while rendering entry %s, falling back to plain title: %s' % (entry, e)) rss.description = entry['title'] + ' - (Render Error)' rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss) if not rss2gen: return # don't generate rss when learning if task.options.learn: return db_items = task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).\ order_by(RSSEntry.published.desc()).all() # make items rss_items = [] for db_item in db_items: add = True if config['items'] != -1: if len(rss_items) > config['items']: add = False if config['days'] != -1: if datetime.datetime.today() - datetime.timedelta( days=config['days']) > db_item.published: add = False if add: # add into generated feed hasher = hashlib.sha1() hasher.update(db_item.title.encode('utf8')) hasher.update(db_item.description.encode('utf8')) hasher.update(db_item.link.encode('utf8')) guid = base64.urlsafe_b64encode(hasher.digest()) guid = PyRSS2Gen.Guid(guid, isPermaLink=False) gen = { 'title': db_item.title, 'description': db_item.description, 'link': db_item.link, 'pubDate': db_item.published, 'guid': guid } log.trace('Adding %s into rss %s' % (gen['title'], config['file'])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed task.session.delete(db_item) # make rss rss = PyRSS2Gen.RSS2(title='FlexGet', link=config.get('rsslink', 'http://flexget.com'), description='FlexGet generated RSS feed', lastBuildDate=datetime.datetime.utcnow(), items=rss_items) # don't run with --test if task.options.test: log.info('Would write rss file with %d entries.', len(rss_items)) return # write rss fn = os.path.expanduser(config['file']) with open(fn, 'w') as file: try: log.verbose('Writing output rss to %s' % fn) rss.write_xml(file, encoding=config['encoding']) except LookupError: log.critical('Unknown encoding %s' % config['encoding']) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical('Unable to write %s' % fn) return