def on_task_output(self, task, config): for entry in task.accepted: item = {} for key in config['fields']: if type(key) is dict: item[key.keys()[0]] = render_from_entry(key.values()[0], entry) elif entry.get(key): item[key] = entry[key] try: path = [] if config['path'] == "" else [render_from_entry(config['path'], entry)] if config['grouping']: group = entry['title'].replace(".", " ") group = group.replace("_", " ") group = group.split(" - ")[0] group = group.split(",")[0] group = group.split("#")[0] group = group.split(":")[0] group = group.split(" (")[0] group = re.compile("[\d]+").split(group)[0] group = group.strip() item['group'] = group path.append(group) if config['key']: path.append(render_from_entry(config['key'], entry)) method = "put" else: method = "post" path = urllib.quote("/".join(path)) url = config['storage'] + "/" + path + ".json" if config['auth']: url += "?auth=" + config['auth'] log.verbose("Requesting Firebase: " + method.upper() + " " + url) log.verbose(item) response = requests.request(method, url, data=jsonpickle.encode(item, unpicklable=False)) log.verbose('Firebase response: ' + response.text) if not response.status_code == 200: entry.fail('http error') except Exception as e: entry.fail("exception: %s" % e)
def on_task_output(self, task, config): for entry in task.accepted: log.info('entry: %s' % entry) if entry.get('urls'): urls = entry.get('urls') else: urls = [entry['url']] for url in urls: #log.info('url: %s' % url) with closing(self.opener.open(url)) as page: try: content = page.read() z = self._zip(content) filename = z.headers.dict['content-disposition'].split('=')[1] path = config.get('path') #log.info("configured path : %s", path) path = render_from_entry(path, entry) #log.info("rendered path : %s", path) filename = os.path.join(path,filename) filename = os.path.expanduser(filename) soup = BeautifulSoup(content) with open(filename,'wb') as f: f.write(z.read()) entry['output'] = filename #if 'messages' in self.config : # self._post_comment(soup,page.geturl()) self._fill_fields(entry,soup) except ValueError: print("Missing subtitle link in page: %s" % page.geturl())
def on_task_output(self, task, config): log.info('Starting fadbs_series_nfo') filename = os.path.expanduser('tvshow.nfo.template') for entry in task.entries: log.debug('Starting nfo generation for %s', entry['title']) # Load stuff entry['fadbs_nfo'] = {} entry_titles = entry.get('anidb_titles') if entry_titles: entry['fadbs_nfo'].update( title=self.__main_title(config, entry_titles)) else: log.warning('We were not given any titles, skipping...') continue entry_tags = entry.get('anidb_tags') entry['fadbs_nfo']['genres'] = [] entry['fadbs_nfo']['tags'] = [] if entry_tags: fadbs_nfo = self.__genres( entry.get('anidb_tags').items(), config['genre_weight']) entry['fadbs_nfo'].update(genres=fadbs_nfo[0]) entry['fadbs_nfo'].update(tags=fadbs_nfo[1]) template_ = template.render_from_entry( template.get_template(filename), entry) nfo_path = os.path.join(entry['location'], 'tvshow.nfo') with open(nfo_path, 'wb') as nfo: nfo.write(template_.encode('utf-8')) nfo.close()
def on_task_exit(self, task): """Store finished / downloaded entries at exit""" if not rss2gen: raise PluginWarning('plugin make_rss requires PyRSS2Gen library.') config = self.get_config(task) # don't run with --test if task.manager.options.test: return # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely usefull) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter( RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() rss.title = entry.render(config['title']) for field in config['link']: if field in entry: rss.link = entry[field] break rss.description = render_from_entry( get_template(config['template'], 'rss'), entry) rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss)
def on_task_exit(self, task): """Store finished / downloaded entries at exit""" if not rss2gen: raise PluginWarning('plugin make_rss requires PyRSS2Gen library.') config = self.get_config(task) # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely usefull) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() rss.title = entry.render(config['title']) for field in config['link']: if field in entry: rss.link = entry[field] break try: template = get_template(config['template'], 'rss') except ValueError as e: raise PluginError('Invalid template specified: %s' % e) try: rss.description = render_from_entry(template, entry) except RenderError as e: log.error('Error while rendering entry %s, falling back to plain title: %s' % (entry, e)) rss.description = entry['title'] + ' - (Render Error)' rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss)
def handle_entry(cls, entry: Entry, sftp: SftpClient, config: dict): to: str = config['to'] location: str = entry['location'] delete_origin: bool = config['delete_origin'] if to: try: to = render_from_entry(to, entry) except RenderError as e: logger.error('Could not render path: {}', to) entry.fail(str(e)) # type: ignore return try: sftp.upload(location, to) except SftpError as e: entry.fail(str(e)) # type: ignore return if delete_origin and Path(location).is_file(): try: Path(location).unlink() except Exception as e: logger.warning('Failed to delete file {} ({})', location, e) # type: ignore
def on_task_exit(self, task): """Store finished / downloaded entries at exit""" if not rss2gen: raise PluginWarning('plugin make_rss requires PyRSS2Gen library.') config = self.get_config(task) # don't run with --test if task.manager.options.test: return # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely usefull) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() rss.title = entry.render(config['title']) for field in config['link']: if field in entry: rss.link = entry[field] break rss.description = render_from_entry(get_template(config['template'], 'rss'), entry) rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss)
def execute(self, task, phase_name, config): config = self.prepare_config(config) if not phase_name in config: log.debug('phase %s not configured' % phase_name) return name_map = {'for_entries': task.entries, 'for_accepted': task.accepted, 'for_rejected': task.rejected, 'for_failed': task.failed} allow_background = config.get('allow_background') for operation, entries in name_map.iteritems(): if not operation in config[phase_name]: continue log.debug('running phase_name: %s operation: %s entries: %s' % (phase_name, operation, len(entries))) for entry in entries: cmd = config[phase_name][operation] entrydict = EscapingDict(entry) if config.get('auto_escape') else entry # Do string replacement from entry, but make sure quotes get escaped try: cmd = render_from_entry(cmd, entrydict) except RenderError as e: log.error('Could not set exec command for %s: %s' % (entry['title'], e)) # fail the entry if configured to do so if config.get('fail_entries'): entry.fail('Entry `%s` does not have required fields for string replacement.' % entry['title']) continue log.debug('phase_name: %s operation: %s cmd: %s' % (phase_name, operation, cmd)) if task.options.test: log.info('Would execute: %s' % cmd) else: # Make sure the command can be encoded into appropriate encoding, don't actually encode yet, # so logging continues to work. try: cmd.encode(config['encoding']) except UnicodeEncodeError: log.error('Unable to encode cmd `%s` to %s' % (cmd, config['encoding'])) if config.get('fail_entries'): entry.fail('cmd `%s` could not be encoded to %s.' % (cmd, config['encoding'])) continue # Run the command, fail entries with non-zero return code if configured to if self.execute_cmd(cmd, allow_background, config['encoding']) != 0 and config.get('fail_entries'): entry.fail('exec return code was non-zero') # phase keyword in this if 'phase' in config[phase_name]: cmd = config[phase_name]['phase'] try: cmd = render_from_task(cmd, task) except RenderError as e: log.error('Error rendering `%s`: %s' % (cmd, e)) else: log.debug('phase cmd: %s' % cmd) if task.options.test: log.info('Would execute: %s' % cmd) else: self.execute_cmd(cmd, allow_background, config['encoding'])
def render(self, template): """Renders a template string based on fields in the entry. Raises RenderError if there is a problem. :param template: A template string that uses jinja2 or python string replacement format. :return: The result of the rendering. """ return render_from_entry(template, self)
def get_output_path(to, entry): """Determine which path to output to""" try: if to: return render_from_entry(to, entry) else: return os.path.dirname(entry.get('location')) except RenderError: raise PluginError('Could not render path: %s', to)
def get_output_path(to, entry): """Determine which path to output to""" try: if to: return render_from_entry(to, entry) else: return os.path.dirname(entry.get('location')) except RenderError: raise plugin.PluginError('Could not render path: %s' % to)
def handle_entry(self, entry, sftp, config, url_prefix): location = entry['location'] filename = localpath.basename(location) to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s', to) entry.fail(e) return destination = remotepath.join(to, filename) destination_url = urljoin(url_prefix, destination) if not os.path.exists(location): log.warning('File no longer exists: %s', location) return if not sftp.lexists(to): try: sftp.makedirs(to) except Exception as e: log.error('Failed to create remote directory %s (%s)' % (to, e)) entry.fail(e) return if not sftp.isdir(to): log.error('Not a directory: %s' % to) entry.fail return try: sftp.put(localpath=location, remotepath=destination) log.verbose('Successfully uploaded %s to %s' % (location, destination_url)) except OSError as e: log.warning('File no longer exists: %s', location) return except IOError as e: log.error('Remote directory does not exist: %s (%s)' % to) entry.fail return except Exception as e: log.error('Failed to upload %s (%s)' % (location, e)) entry.fail return if config['delete_origin']: try: os.remove(location) except Exception as e: log.error('Failed to delete file %s (%s)')
def download_entry(self, entry, config, sftp): """ Downloads the file(s) described in entry """ path = unquote(urlparse(entry['url']).path) or '.' delete_origin = config['delete_origin'] recursive = config['recursive'] to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s' % to) entry.fail(e) return if not sftp.lexists(path): log.error('Remote path does not exist: %s' % path) return if sftp.isfile(path): source_file = remotepath.basename(path) source_dir = remotepath.dirname(path) try: sftp.cwd(source_dir) self.download_file(source_file, to, sftp, delete_origin) except Exception as e: error = 'Failed to download file %s (%s)' % (path, e) log.error(error) entry.fail(error) elif sftp.isdir(path): base_path = remotepath.normpath(remotepath.join(path, '..')) dir_name = remotepath.basename(path) handle_file = partial(self.download_file, dest=to, sftp=sftp, delete_origin=delete_origin) try: sftp.cwd(base_path) sftp.walktree(dir_name, handle_file, self.handle_dir, self.handle_unknown, recursive) except Exception as e: error = 'Failed to download directory %s (%s)' % (path, e) log.error(error) entry.fail(error) return if delete_origin: self.remove_dir(sftp, path) else: log.warning('Skipping unknown file %s' % path)
def download_entry(self, entry, config, sftp): """ Downloads the file(s) described in entry """ path = unquote(urlparse(entry['url']).path) or '.' delete_origin = config['delete_origin'] recursive = config['recursive'] to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s' % to) entry.fail(e) return if not sftp.lexists(path): log.error('Remote path does not exist: %s' % path) return if sftp.isfile(path): source_file = remotepath.basename(path) source_dir = remotepath.dirname(path) try: sftp.cwd(source_dir) self.download_file(source_file, to, sftp, delete_origin) except Exception as e: error = 'Failed to download file %s (%s)' % (path, e) log.error(error) entry.fail(error) elif sftp.isdir(path): base_path = remotepath.normpath(remotepath.join(path, '..')) dir_name = remotepath.basename(path) handle_file = partial( self.download_file, dest=to, sftp=sftp, delete_origin=delete_origin ) try: sftp.cwd(base_path) sftp.walktree( dir_name, handle_file, self.handle_dir, self.handle_unknown, recursive ) except Exception as e: error = 'Failed to download directory %s (%s)' % (path, e) log.error(error) entry.fail(error) return if delete_origin: self.remove_dir(sftp, path) else: log.warning('Skipping unknown file %s' % path)
def _parameterize(element, entry): if isinstance(element, dict): return dict((k, _parameterize(v, entry)) for k, v in element.items()) if isinstance(element, list): return [_parameterize(v, entry) for v in element] if isinstance(element, str) and ('{{' in element or '{%' in element): try: return render_from_entry(element, entry, native=True) except (RenderError, TypeError) as e: raise plugin.PluginError('Error parameterizing `%s`: %s' % (element, e), logger=log) return element
def handle_entry(self, entry, sftp, config, url_prefix): location = entry['location'] filename = localpath.basename(location) to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s', to) entry.fail(e) return destination = remotepath.join(to, filename) destination_url = urljoin(url_prefix, destination) if not os.path.exists(location): log.warn('File no longer exists: %s', location) return if not sftp.lexists(to): try: sftp.makedirs(to) except Exception as e: log.error('Failed to create remote directory %s (%s)' % (to, e)) entry.fail(e) return if not sftp.isdir(to): log.error('Not a directory: %s' % to) entry.fail return try: sftp.put(localpath=location, remotepath=destination) log.verbose('Successfully uploaded %s to %s' % (location, destination_url)) except OSError as e: log.warn('File no longer exists: %s', location) return except IOError as e: log.error('Remote directory does not exist: %s (%s)' % to) entry.fail return except Exception as e: log.error('Failed to upload %s (%s)' % (location, e)) entry.fail return if config['delete_origin']: try: os.remove(location) except Exception as e: log.error('Failed to delete file %s (%s)')
def replace_from_entry(field, entry, field_name, logger, default=''): """This is a helper function to do string replacement/jinja2 formatting from an entry dict. It catches exceptions from the string replacement and prints errors to the given log. field_name is the description to use when printing the error. Returns the result of the replacemnt, or default if there is an error.""" # Do jinja2 template replacement try: result = render_from_entry(field, entry) except UndefinedError, e: logger("Could not set %s for %s: does not contain the field '%s' for jinja replacement." % (field_name, entry['title'], e)) return default
def on_feed_output(self, feed): for entry in feed.accepted: if feed.manager.options.test: log.info("Would send prowl message about: %s", entry["title"]) continue # get the parameters config = self.get_config(feed) apikey = entry.get("apikey", config["apikey"]) application = entry.get("application", config["application"]) event = entry.get("event", config["event"]) priority = entry.get("priority", config["priority"]) description = config.get("description", entry["title"]) # If description has jinja template, render it try: description = render_from_entry(description, entry) except UndefinedError, e: description = entry["title"] log.error("Error rendering jinja description: %s" % e) # Open connection h = HTTPSConnection("prowl.weks.net") # Send the request data = { "priority": priority, "application": application, "apikey": apikey, "event": event, "description": description, } h.request("POST", "/publicapi/add", headers=headers, body=urlencode(data)) # Check if it succeeded response = h.getresponse() request_status = response.status # error codes and messages from http://prowl.weks.net/api.php if request_status == 200: log.debug("Prowl message sent") elif request_status == 400: log.error("Bad request, the parameters you provided did not validate") elif request_status == 401: log.error("Not authorized, the API key given is not valid, and does not correspond to a user.") elif request_status == 406: log.error("Not acceptable, your IP address has exceeded the API limit.") elif request_status == 500: log.error("Internal server error, something failed to execute properly on the Prowl side.") else: log.error("Unknown error when sending Prowl message")
def render(self, template): """ Renders a template string based on fields in the entry. :param string template: A template string that uses jinja2 or python string replacement format. :return: The result of the rendering. :rtype: string :raises RenderError: If there is a problem. """ if not isinstance(template, basestring): raise ValueError('Trying to render non string template, got %s' % repr(template)) log.trace('rendering: %s' % template) return render_from_entry(template, self)
def render(self, template): """ Renders a template string based on fields in the entry. :param string template: A template string that uses jinja2 or python string replacement format. :return: The result of the rendering. :rtype: string :raises RenderError: If there is a problem. """ if not isinstance(template, basestring): raise ValueError('Trying to render non string template, got %s' % repr(template)) log.trace('rendering: %s' % template) return render_from_entry(template, self)
def render(self, template, native=False): """ Renders a template string based on fields in the entry. :param template: A template string or FlexGetTemplate that uses jinja2 or python string replacement format. :param native: If True, and the rendering result can be all native python types, not just strings. :return: The result of the rendering. :rtype: string :raises RenderError: If there is a problem. """ if not isinstance(template, (str, FlexGetTemplate)): raise ValueError( 'Trying to render non string template or unrecognized template format, got %s' % repr(template)) log.trace('rendering: %s', template) return render_from_entry(template, self, native=native)
def execute(self, task, phase_name, config): config = self.prepare_config(config) if not phase_name in config: log.debug('phase %s not configured' % phase_name) return name_map = {'for_entries': task.entries, 'for_accepted': task.accepted, 'for_rejected': task.rejected, 'for_failed': task.failed} allow_background = config.get('allow_background') for operation, entries in name_map.iteritems(): if not operation in config[phase_name]: continue log.debug('running phase_name: %s operation: %s entries: %s' % (phase_name, operation, len(entries))) for entry in entries: cmd = config[phase_name][operation] entrydict = EscapingDict(entry) if config.get('auto_escape') else entry # Do string replacement from entry, but make sure quotes get escaped try: cmd = render_from_entry(cmd, entrydict) except RenderError, e: log.error('Could not set exec command for %s: %s' % (entry['title'], e)) # fail the entry if configured to do so if config.get('fail_entries'): task.fail(entry, 'Entry `%s` does not have required fields for string replacement.' % entry['title']) continue log.debug('phase_name: %s operation: %s cmd: %s' % (phase_name, operation, cmd)) if task.manager.options.test: log.info('Would execute: %s' % cmd) else: # Make sure the command can be encoded into appropriate encoding, don't actually encode yet, # so logging continues to work. try: cmd.encode(config['encoding']) except UnicodeEncodeError: log.error('Unable to encode cmd `%s` to %s' % (cmd, config['encoding'])) if config.get('fail_entries'): task.fail(entry, 'cmd `%s` could not be encoded to %s.' % (cmd, config['encoding'])) continue # Run the command, fail entries with non-zero return code if configured to if self.execute_cmd(cmd, allow_background, config['encoding']) != 0 and config.get('fail_entries'): task.fail(entry, 'exec return code was non-zero')
def on_task_output(self, task, config): if not config: return config = self.prepare_config(config) existing = config['existing'] for entry in task.accepted: if 'location' not in entry: entry.fail('Does not have location field for symlinking') continue lnkfrom = entry['location'] name = os.path.basename(lnkfrom) lnkto = os.path.join(config['to'], name) try: lnkto = render_from_entry(lnkto, entry) except RenderError as error: log.error('Could not render path: %s', lnkto) entry.fail(str(error)) return # Hardlinks for dirs will not be failed here if os.path.exists(lnkto) and (config['link_type'] == 'soft' or os.path.isfile(lnkfrom)): msg = 'Symlink destination %s already exists' % lnkto if existing == 'ignore': log.verbose(msg) else: entry.fail(msg) continue log.verbose('%slink `%s` to `%s`', config['link_type'], lnkfrom, lnkto) try: if config['link_type'] == 'soft': os.symlink(lnkfrom, lnkto) else: if os.path.isdir(lnkfrom): self.hard_link_dir(lnkfrom, lnkto, existing) else: dirname = os.path.dirname(lnkto) if not os.path.exists(dirname): os.makedirs(dirname) os.link(lnkfrom, lnkto) except OSError as e: entry.fail('Failed to create %slink, %s' % (config['link_type'], e))
def on_task_exit(self, task): """Store finished / downloaded entries at exit""" if not rss2gen: raise PluginWarning('plugin make_rss requires PyRSS2Gen library.') config = self.get_config(task) # don't run with --test if task.manager.options.test: return # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely usefull) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() rss.title = entry.render(config['title']) for field in config['link']: if field in entry: rss.link = entry[field] break # description = """{% if series_name is defined %}{% if series_banner_url is defined %}<img src="{{series_banner_url}}" />{% endif %} # {{series_name_tvdb|d(series_name)}} {{series_id}} {{ep_name|d('')}} # <b>Cast:</b> {{series_actors|d('')}} # <b>Guest Starring:</b> {{ep_guest_stars|d('')}} # <b>Overview:</b> {{ep_overview|d('')}} # {% elif imdb_name is defined %}{{imdb_name}} {{imdb_year}} # <b>Score:</b> {{imdb_score|d('N/A')}} ({{imdb_votes|d('0')}} votes) # <b>Genres:</b> {{imdb_genres|d('N/A')}} # <b>Plot:</b> {{imdb_plot_outline|d('N/A')}} # {% else %}{{title}}{% endif %}""" #rss.description = entry.render(description) rss.description = render_from_entry(get_template(config['template'], 'rss'), entry) rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss)
def on_task_output(self, task, config): if not config: return config = self.prepare_config(config) existing = config['existing'] for entry in task.accepted: if 'location' not in entry: entry.fail('Does not have location field for symlinking') continue lnkfrom = entry['location'] name = os.path.basename(lnkfrom) lnkto = os.path.join(config['to'], name) try: lnkto = render_from_entry(lnkto, entry) except RenderError as error: log.error('Could not render path: %s', lnkto) entry.fail(str(error)) return # Hardlinks for dirs will not be failed here if os.path.exists(lnkto) and ( config['link_type'] == 'soft' or os.path.isfile(lnkfrom) ): msg = 'Symlink destination %s already exists' % lnkto if existing == 'ignore': log.verbose(msg) else: entry.fail(msg) continue log.verbose('%slink `%s` to `%s`', config['link_type'], lnkfrom, lnkto) try: if config['link_type'] == 'soft': os.symlink(lnkfrom, lnkto) else: if os.path.isdir(lnkfrom): self.hard_link_dir(lnkfrom, lnkto, existing) else: dirname = os.path.dirname(lnkto) if not os.path.exists(dirname): os.makedirs(dirname) os.link(lnkfrom, lnkto) except OSError as e: entry.fail('Failed to create %slink, %s' % (config['link_type'], e))
def download_entry(cls, entry: Entry, config: dict, sftp: SftpClient) -> None: """ Downloads the file(s) described in entry """ path: str = unquote(urlparse(entry['url']).path) or '.' delete_origin: bool = config['delete_origin'] recursive: bool = config['recursive'] to: str = config['to'] try: to = render_from_entry(to, entry) except RenderError as e: logger.error('Could not render path: {}', to) entry.fail(str(e)) # type: ignore return try: sftp.download(path, to, recursive, delete_origin) except SftpError as e: entry.fail(e) # type: ignore
def execute(self, feed, phase_name): config = self.get_config(feed) if not phase_name in config: log.debug('phase %s not configured' % phase_name) return name_map = {'for_entries': feed.entries, 'for_accepted': feed.accepted, 'for_rejected': feed.rejected, 'for_failed': feed.failed} allow_background = config.get('allow_background') for operation, entries in name_map.iteritems(): if not operation in config[phase_name]: continue log.debug('running phase_name: %s operation: %s entries: %s' % (phase_name, operation, len(entries))) for entry in entries: cmd = config[phase_name][operation] entrydict = EscapingDict(entry) if config.get('auto_escape') else entry # Do string replacement from entry, but make sure quotes get escaped try: cmd = render_from_entry(cmd, entrydict) except RenderError, e: log.error('Could not set exec command for %s: %s' % (entry['title'], e)) # fail the entry if configured to do so if config.get('fail_entries'): feed.fail(entry, 'Entry `%s` does not have required fields for string replacement.' % entry['title']) continue log.debug('phase_name: %s operation: %s cmd: %s' % (phase_name, operation, cmd)) if feed.manager.options.test: log.info('Would execute: %s' % cmd) else: # Run the command, fail entries with non-zero return code if configured to if self.execute_cmd(cmd, allow_background) != 0 and config.get('fail_entries'): feed.fail(entry, "exec return code was non-zero")
def on_task_exit(self, task): """Store finished / downloaded entries at exit""" if not rss2gen: raise PluginWarning("plugin make_rss requires PyRSS2Gen library.") config = self.get_config(task) # don't run with --test if task.manager.options.test: return # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely usefull) if not config["history"]: log.debug("disabling history") for item in task.session.query(RSSEntry).filter(RSSEntry.file == config["file"]).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() rss.title = entry.render(config["title"]) for field in config["link"]: if field in entry: rss.link = entry[field] break # TODO: better exception handling try: rss.description = render_from_entry(get_template(config["template"], "rss"), entry) except: log.error("Error while rendering entry %s, falling back to plain title", entry) rss.description = entry["title"] + " - (Render Error)" rss.file = config["file"] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug("Saving %s into rss database" % entry["title"]) task.session.add(rss)
def handle_entry(self, entry, config): """ Extract matching files into the directory specified Optionally delete the original archive if config.delete_archive is True """ match = re.compile(config['regexp'], re.IGNORECASE).match archive_path = entry.get('location') if not archive_path: log.warning('Entry does not appear to represent a local file.') return archive_dir = os.path.dirname(archive_path) if not os.path.exists(archive_path): log.warning('File no longer exists: %s', archive_path) return arch = open_archive_entry(entry) if not arch: return to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as error: log.error('Could not render path: %s', to) entry.fail(error) return else: to = archive_dir for info in arch.infolist(): destination = get_destination_path(info.filename, to, config['keep_dirs']) dest_dir = os.path.dirname(destination) arch_file = os.path.basename(info.filename) if is_dir(info): log.debug('Appears to be a directory: %s', info.filename) continue if not match(arch_file): log.debug('File did not match regexp: %s', arch_file) continue log.debug('Found matching file: %s', info.filename) log.debug('Creating path: %s', dest_dir) makepath(dest_dir) if os.path.exists(destination): log.verbose('File already exists: %s', destination) continue error_message = '' log.debug('Attempting to extract: %s to %s', arch_file, destination) try: arch.extract_file(info, destination) except archive.FSError as error: error_message = 'OS error while creating file: %s (%s)' % (destination, error) except archive.ArchiveError as error: error_message = 'Failed to extract file: %s in %s (%s)' % (info.filename, \ archive_path, error) if error_message: log.error(error_message) entry.fail(entry) if os.path.exists(destination): log.debug('Cleaning up partially extracted file: %s', destination) os.remove(destination) return if config['delete_archive']: arch.delete() else: arch.close()
def handle_entry(self, entry, config): """ Extract matching files into the directory specified Optionally delete the original archive if config.delete_archive is True """ match = re.compile(config['regexp'], re.IGNORECASE).match archive_path = entry.get('location') if not archive_path: log.warn('Entry does not appear to represent a local file, decompress plugin only supports local files') return archive_dir = os.path.dirname(archive_path) archive_file = os.path.basename(archive_path) if not os.path.exists(archive_path): log.warn('File no longer exists: %s', archive_path) return archive = self.open_archive(entry) if not archive: return to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s', to) entry.fail(e) return else: to = archive_dir for info in archive.infolist(): path = info.filename filename = os.path.basename(path) if self.is_dir(info): log.debug('Appears to be a directory: %s', path) continue if not match(path): log.debug('File did not match regexp: %s', path) continue log.debug('Found matching file: %s', path) if config['keep_dirs']: path_suffix = path else: path_suffix = filename destination = os.path.join(to, path_suffix) dest_dir = os.path.dirname(destination) if not os.path.exists(dest_dir): log.debug('Creating path: %s', dest_dir) os.makedirs(dest_dir) if not os.path.exists(destination): success = False error_message = '' source = None log.debug('Attempting to extract: %s to %s', path, destination) try: # python 2.6 doesn't seem to like "with" in conjuntion with ZipFile.open source = archive.open(path) with open(destination, 'wb') as target: shutil.copyfileobj(source, target) log.verbose('Extracted: %s', path) success = True except (IOError, os.error) as error: error_message = 'OS error while creating file: %s (%s)' % (destination, error) except (zipfile.BadZipfile, rarfile.Error) as error: error_message = 'Failed to extract file: %s in %s (%s)' % (path, archive_path, error) finally: if source and not source.closed: source.close() if not success: log.error(error_message) entry.fail(error_message) if os.path.exists(destination): log.debug('Cleaning up partially extracted file: %s', destination) return else: log.verbose('File already exists: %s', destination) if config['delete_archive']: if hasattr(archive, 'volumelist'): volumes = archive.volumelist() else: volumes = [archive_path] archive.close() for volume in volumes: log.debug('Deleting volume: %s', volume) os.remove(volume) log.verbose('Deleted archive: %s', archive_file) else: archive.close()
def handle_entry(self, entry, config): """ Extract matching files into the directory specified Optionally delete the original archive if config.delete_archive is True """ match = re.compile(config['regexp'], re.IGNORECASE).match archive_path = entry['location'] archive_dir = os.path.dirname(archive_path) archive_file = os.path.basename(archive_path) if not os.path.exists(archive_path): log.warn('File no longer exists: %s' % archive_path) return archive = self.open_archive(entry) if not archive: return to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s' % to) entry.fail(e) return else: to = archive_dir for info in archive.infolist(): path = info.filename filename = os.path.basename(path) if self.is_dir(info): log.debug('Appears to be a directory: %s' % path) continue if not match(path): log.debug('File did not match regexp: %s' % path) continue log.debug('Found matching file: %s' %path) if config['keep_dirs']: path_suffix = path else: path_suffix = filename destination = os.path.join(to, path_suffix) dest_dir = os.path.dirname(destination) if not os.path.exists(dest_dir): log.debug('Creating path: %s' % dest_dir) os.makedirs(dest_dir) if not os.path.exists(destination): log.debug('Attempting to extract: %s to %s' % (path, destination)) try: source = archive.open(path) target = file(destination, "wb") shutil.copyfileobj(source, target) log.verbose('Extracted: %s' % path ) except Exception as e: error_message = 'Failed to extract file: %s in %s (%s)' % \ (path, archive_path, e) log.error(error_message) entry.fail(error_message) if os.path.exists(destination): log.debug('Cleaning up partially extracted file: %s' % destination) os.remove(destination) return else: log.verbose('File already exists: %s' % destination) if config['delete_archive']: if isinstance(archive, zipfile.ZipFile): volumes = [archive_path] else: volumes = archive.volumelist() archive.close() for volume in volumes: log.debug('Deleting volume: %s' % volume) os.remove(volume) log.verbose('Deleted archive: %s' % archive_file) else: archive.close()
def handle_entry(self, entry, config): """ Extract matching files into the directory specified Optionally delete the original archive if config.delete_archive is True """ match = re.compile(config['regexp'], re.IGNORECASE).match archive_path = entry.get('location') if not archive_path: log.warning('Entry does not appear to represent a local file.') return archive_dir = os.path.dirname(archive_path) if not os.path.exists(archive_path): log.warning('File no longer exists: %s', archive_path) return arch = open_archive_entry(entry) if not arch: return to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as error: log.error('Could not render path: %s', to) entry.fail(error) return else: to = archive_dir for info in arch.infolist(): destination = get_destination_path(info.filename, to, config['keep_dirs']) dest_dir = os.path.dirname(destination) arch_file = os.path.basename(info.filename) if is_dir(info): log.debug('Appears to be a directory: %s', info.filename) continue if not match(arch_file): log.debug('File did not match regexp: %s', arch_file) continue log.debug('Found matching file: %s', info.filename) log.debug('Creating path: %s', dest_dir) makepath(dest_dir) if os.path.exists(destination): log.verbose('File already exists: %s', destination) continue error_message = '' log.debug('Attempting to extract: %s to %s', arch_file, destination) try: arch.extract_file(info, destination) except archive.FSError as error: error_message = 'OS error while creating file: %s (%s)' % ( destination, error) except archive.ArchiveError as error: error_message = 'Failed to extract file: %s in %s (%s)' % (info.filename, \ archive_path, error) if error_message: log.error(error_message) entry.fail(entry) if os.path.exists(destination): log.debug('Cleaning up partially extracted file: %s', destination) os.remove(destination) return if config['delete_archive']: arch.delete() else: arch.close()
def execute(self, task, phase_name, config): config = self.prepare_config(config) if phase_name not in config: log.debug("phase %s not configured" % phase_name) return name_map = { "for_entries": task.entries, "for_accepted": task.accepted, "for_rejected": task.rejected, "for_failed": task.failed, } allow_background = config.get("allow_background") for operation, entries in name_map.items(): if operation not in config[phase_name]: continue log.debug("running phase_name: %s operation: %s entries: %s" % (phase_name, operation, len(entries))) for entry in entries: for cmd in config[phase_name][operation]: entrydict = EscapingEntry(entry) if config.get("auto_escape") else entry # Do string replacement from entry, but make sure quotes get escaped try: cmd = render_from_entry(cmd, entrydict) except RenderError as e: log.error("Could not set exec command for %s: %s" % (entry["title"], e)) # fail the entry if configured to do so if config.get("fail_entries"): entry.fail( "Entry `%s` does not have required fields for string replacement." % entry["title"] ) continue log.debug("phase_name: %s operation: %s cmd: %s" % (phase_name, operation, cmd)) if task.options.test: log.info("Would execute: %s" % cmd) else: # Make sure the command can be encoded into appropriate encoding, don't actually encode yet, # so logging continues to work. try: cmd.encode(config["encoding"]) except UnicodeEncodeError: log.error("Unable to encode cmd `%s` to %s" % (cmd, config["encoding"])) if config.get("fail_entries"): entry.fail("cmd `%s` could not be encoded to %s." % (cmd, config["encoding"])) continue # Run the command, fail entries with non-zero return code if configured to if self.execute_cmd(cmd, allow_background, config["encoding"]) != 0 and config.get( "fail_entries" ): entry.fail("exec return code was non-zero") # phase keyword in this if "phase" in config[phase_name]: for cmd in config[phase_name]["phase"]: try: cmd = render_from_task(cmd, task) except RenderError as e: log.error("Error rendering `%s`: %s" % (cmd, e)) else: log.debug("phase cmd: %s" % cmd) if task.options.test: log.info("Would execute: %s" % cmd) else: self.execute_cmd(cmd, allow_background, config["encoding"])
def execute(self, task, phase_name, config): config = self.prepare_config(config) if phase_name not in config: logger.debug('phase {} not configured', phase_name) return name_map = { 'for_entries': task.entries, 'for_accepted': task.accepted, 'for_rejected': task.rejected, 'for_undecided': task.undecided, 'for_failed': task.failed, } allow_background = config.get('allow_background') for operation, entries in name_map.items(): if operation not in config[phase_name]: continue logger.debug( 'running phase_name: {} operation: {} entries: {}', phase_name, operation, len(entries), ) for entry in entries: for cmd in config[phase_name][operation]: entrydict = EscapingEntry(entry) if config.get( 'auto_escape') else entry # Do string replacement from entry, but make sure quotes get escaped try: cmd = render_from_entry(cmd, entrydict) except RenderError as e: logger.error('Could not set exec command for {}: {}', entry['title'], e) # fail the entry if configured to do so if config.get('fail_entries'): entry.fail( 'Entry `%s` does not have required fields for string replacement.' % entry['title']) continue logger.debug('phase_name: {} operation: {} cmd: {}', phase_name, operation, cmd) if task.options.test: logger.info('Would execute: {}', cmd) else: # Make sure the command can be encoded into appropriate encoding, don't actually encode yet, # so logging continues to work. try: cmd.encode(config['encoding']) except UnicodeEncodeError: logger.error('Unable to encode cmd `{}` to {}', cmd, config['encoding']) if config.get('fail_entries'): entry.fail( 'cmd `%s` could not be encoded to %s.' % (cmd, config['encoding'])) continue # Run the command, fail entries with non-zero return code if configured to if self.execute_cmd( cmd, allow_background, config['encoding'] ) != 0 and config.get('fail_entries'): entry.fail('exec return code was non-zero') # phase keyword in this if 'phase' in config[phase_name]: for cmd in config[phase_name]['phase']: try: cmd = render_from_task(cmd, task) except RenderError as e: logger.error('Error rendering `{}`: {}', cmd, e) else: logger.debug('phase cmd: {}', cmd) if task.options.test: logger.info('Would execute: {}', cmd) else: self.execute_cmd(cmd, allow_background, config['encoding'])
def on_task_exit(self, task, config): """Store finished / downloaded entries at exit""" if not rss2gen: raise plugin.PluginWarning('plugin make_rss requires PyRSS2Gen library.') config = self.prepare_config(config) # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely useful) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() rss.title = entry.render(config['title']) for field in config['link']: if field in entry: rss.link = entry[field] break try: template = get_template(config['template'], 'rss') except ValueError as e: raise plugin.PluginError('Invalid template specified: %s' % e) try: rss.description = render_from_entry(template, entry) except RenderError as e: log.error('Error while rendering entry %s, falling back to plain title: %s' % (entry, e)) rss.description = entry['title'] + ' - (Render Error)' rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss) if not rss2gen: return # don't generate rss when learning if task.options.learn: return db_items = task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).\ order_by(RSSEntry.published.desc()).all() # make items rss_items = [] for db_item in db_items: add = True if config['items'] != -1: if len(rss_items) > config['items']: add = False if config['days'] != -1: if datetime.datetime.today() - datetime.timedelta(days=config['days']) > db_item.published: add = False if add: # add into generated feed hasher = hashlib.sha1() hasher.update(db_item.title.encode('utf8')) hasher.update(db_item.description.encode('utf8')) hasher.update(db_item.link.encode('utf8')) guid = base64.urlsafe_b64encode(hasher.digest()) gen = {'title': db_item.title, 'description': db_item.description, 'link': db_item.link, 'pubDate': db_item.published, 'guid': guid} log.trace('Adding %s into rss %s' % (gen['title'], config['file'])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed task.session.delete(db_item) # make rss rss = PyRSS2Gen.RSS2(title='FlexGet', link=config.get('rsslink', 'http://flexget.com'), description='FlexGet generated RSS feed', lastBuildDate=datetime.datetime.utcnow(), items=rss_items) # don't run with --test if task.options.test: log.info('Would write rss file with %d entries.', len(rss_items)) return # write rss fn = os.path.expanduser(config['file']) with open(fn, 'w') as file: try: log.verbose('Writing output rss to %s' % fn) rss.write_xml(file, encoding=config['encoding']) except LookupError: log.critical('Unknown encoding %s' % config['encoding']) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical('Unable to write %s' % fn) return
def on_task_exit(self, task, config): """Store finished / downloaded entries at exit""" if not rss2gen: raise plugin.PluginWarning( 'plugin make_rss requires PyRSS2Gen library.') config = self.prepare_config(config) # when history is disabled, remove everything from backlog on every run (a bit hackish, rarely useful) if not config['history']: log.debug('disabling history') for item in task.session.query(RSSEntry).filter( RSSEntry.file == config['file']).all(): task.session.delete(item) # save entries into db for RSS generation for entry in task.accepted: rss = RSSEntry() try: rss.title = entry.render(config['title']) except RenderError as e: log.error( 'Error rendering jinja title for `%s` falling back to entry title: %s' % (entry['title'], e)) rss.title = entry['title'] for field in config['link']: if field in entry: rss.link = entry[field] break try: template = get_template(config['template'], 'rss') except ValueError as e: raise plugin.PluginError('Invalid template specified: %s' % e) try: rss.description = render_from_entry(template, entry) except RenderError as e: log.error( 'Error while rendering entry %s, falling back to plain title: %s' % (entry, e)) rss.description = entry['title'] + ' - (Render Error)' rss.file = config['file'] # TODO: check if this exists and suggest disabling history if it does since it shouldn't happen normally ... log.debug('Saving %s into rss database' % entry['title']) task.session.add(rss) if not rss2gen: return # don't generate rss when learning if task.options.learn: return db_items = task.session.query(RSSEntry).filter(RSSEntry.file == config['file']).\ order_by(RSSEntry.published.desc()).all() # make items rss_items = [] for db_item in db_items: add = True if config['items'] != -1: if len(rss_items) > config['items']: add = False if config['days'] != -1: if datetime.datetime.today() - datetime.timedelta( days=config['days']) > db_item.published: add = False if add: # add into generated feed hasher = hashlib.sha1() hasher.update(db_item.title.encode('utf8')) hasher.update(db_item.description.encode('utf8')) hasher.update(db_item.link.encode('utf8')) guid = base64.urlsafe_b64encode(hasher.digest()) guid = PyRSS2Gen.Guid(guid, isPermaLink=False) gen = { 'title': db_item.title, 'description': db_item.description, 'link': db_item.link, 'pubDate': db_item.published, 'guid': guid } log.trace('Adding %s into rss %s' % (gen['title'], config['file'])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed task.session.delete(db_item) # make rss rss = PyRSS2Gen.RSS2(title='FlexGet', link=config.get('rsslink', 'http://flexget.com'), description='FlexGet generated RSS feed', lastBuildDate=datetime.datetime.utcnow(), items=rss_items) # don't run with --test if task.options.test: log.info('Would write rss file with %d entries.', len(rss_items)) return # write rss fn = os.path.expanduser(config['file']) with open(fn, 'w') as file: try: log.verbose('Writing output rss to %s' % fn) rss.write_xml(file, encoding=config['encoding']) except LookupError: log.critical('Unknown encoding %s' % config['encoding']) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical('Unable to write %s' % fn) return
def handle_entry(self, entry, config): """ Extract matching files into the directory specified Optionally delete the original archive if config.delete_archive is True """ match = re.compile(config['regexp'], re.IGNORECASE).match archive_path = entry.get('location') if not archive_path: log.warning( 'Entry does not appear to represent a local file, decompress plugin only supports local files' ) return archive_dir = os.path.dirname(archive_path) archive_file = os.path.basename(archive_path) if not os.path.exists(archive_path): log.warning('File no longer exists: %s', archive_path) return archive = self.open_archive(entry) if not archive: return to = config['to'] if to: try: to = render_from_entry(to, entry) except RenderError as e: log.error('Could not render path: %s', to) entry.fail(e) return else: to = archive_dir for info in archive.infolist(): path = info.filename filename = os.path.basename(path) if self.is_dir(info): log.debug('Appears to be a directory: %s', path) continue if not match(path): log.debug('File did not match regexp: %s', path) continue log.debug('Found matching file: %s', path) if config['keep_dirs']: path_suffix = path else: path_suffix = filename destination = os.path.join(to, path_suffix) dest_dir = os.path.dirname(destination) if not os.path.exists(dest_dir): log.debug('Creating path: %s', dest_dir) os.makedirs(dest_dir) if not os.path.exists(destination): success = False error_message = '' source = None log.debug('Attempting to extract: %s to %s', path, destination) try: # python 2.6 doesn't seem to like "with" in conjuntion with ZipFile.open source = archive.open(path) with open(destination, 'wb') as target: shutil.copyfileobj(source, target) log.verbose('Extracted: %s', path) success = True except (IOError, os.error) as error: error_message = 'OS error while creating file: %s (%s)' % ( destination, error) except (zipfile.BadZipfile, rarfile.Error) as error: error_message = 'Failed to extract file: %s in %s (%s)' % ( path, archive_path, error) finally: if source and not source.closed: source.close() if not success: log.error(error_message) entry.fail(error_message) if os.path.exists(destination): log.debug('Cleaning up partially extracted file: %s', destination) return else: log.verbose('File already exists: %s', destination) if config['delete_archive']: if hasattr(archive, 'volumelist'): volumes = archive.volumelist() else: volumes = [archive_path] archive.close() for volume in volumes: log.debug('Deleting volume: %s', volume) os.remove(volume) log.verbose('Deleted archive: %s', archive_file) else: archive.close()