def on_task_start(self, task, config): if list(sys.version_info) < [2, 7]: raise plugin.DependencyError('subliminal', 'Python 2.7', 'Subliminal plugin requires python 2.7.') try: import babelfish except ImportError as e: log.debug('Error importing Babelfish: %s', e) raise plugin.DependencyError('subliminal', 'babelfish', 'Babelfish module required. ImportError: %s', e) try: import subliminal except ImportError as e: log.debug('Error importing Subliminal: %s', e) raise plugin.DependencyError('subliminal', 'subliminal', 'Subliminal module required. ImportError: %s', e)
def on_task_start(self, task, config): """Raise a DependencyError if our dependencies aren't available""" try: from deluge.ui.client import client # noqa except ImportError as e: log.debug('Error importing deluge: %s' % e) raise plugin.DependencyError('deluge', 'deluge', 'Deluge >=1.2 module and it\'s dependencies required. ImportError: %s' % e, log) try: from twisted.internet import reactor # noqa except: raise plugin.DependencyError('deluge', 'twisted.internet', 'Twisted.internet package required', log)
def on_task_start(self, task, config): """Raise a DependencyError if our dependencies aren't available""" # This is overridden by OutputDeluge to add deluge 1.1 support try: from deluge.ui.client import client except ImportError as e: log.debug('Error importing deluge: %s' % e) raise plugin.DependencyError('output_deluge', 'deluge', 'Deluge module and it\'s dependencies required. ImportError: %s' % e, log) try: from twisted.internet import reactor except: raise plugin.DependencyError('output_deluge', 'twisted.internet', 'Twisted.internet package required', log) log.debug('Using deluge 1.2 api')
def on_task_start(self, task, config): try: import sleekxmpp except ImportError as e: log.debug('Error importing SleekXMPP: %s' % e) raise plugin.DependencyError( 'notify_xmpp', 'sleekxmpp', 'SleekXMPP module required. ImportError: %s' % e) try: import dnspython except ImportError as e: log.debug('Error importing dnspython: %s' % e) raise plugin.DependencyError( 'notify_xmpp', 'dnspython', 'dnspython module required. ImportError: %s' % e)
def on_task_start(self, task, config): try: import babelfish except ImportError as e: log.debug('Error importing Babelfish: %s' % e) raise plugin.DependencyError( 'subliminal', 'babelfish', 'Babelfish module required. ImportError: %s' % e) try: import subliminal except ImportError as e: log.debug('Error importing Subliminal: %s' % e) raise plugin.DependencyError( 'subliminal', 'subliminal', 'Subliminal module required. ImportError: %s' % e)
def on_task_start(self, task, config): try: import Evtx except ImportError as e: log.debug('Error importing Evtx: %s' % e) raise plugin.DependencyError('winevents', 'python-evtx', 'Evtx module required. ImportError: %s' % e)
def on_task_start(self, task, config): try: import subliminal except ImportError as e: log.debug('Error importing Subliminal: %s' % e) raise plugin.DependencyError( 'subliminal', 'subliminal', 'Subliminal module required. ImportError: %s' % e) from subliminal.cli import MutexLock from dogpile.cache.exception import RegionAlreadyConfigured try: subliminal.region.configure( 'dogpile.cache.dbm', arguments={ 'filename': os.path.join(tempfile.gettempdir(), 'cachefile.dbm'), 'lock_factory': MutexLock, }, ) except RegionAlreadyConfigured: pass logging.getLogger("subliminal").setLevel(logging.CRITICAL) logging.getLogger("enzyme").setLevel(logging.WARNING)
def on_task_start(self, task, config): try: import cloudscraper except ImportError as e: log.debug('Error importing cloudscraper: %s' % e) raise plugin.DependencyError( 'cfscraper', 'cloudscraper', 'cloudscraper module required. ImportError: %s' % e) class CFScrapeWrapper(Session, cloudscraper.CloudScraper): """ This class allows the FlexGet session to inherit from CloudScraper instead of the requests.Session directly. """ if config is True: task.requests.headers = OrderedDict([ ('User-Agent', task.requests.headers['User-Agent']), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' ), ('Accept-Language', 'en-US,en;q=0.5'), ('Accept-Encoding', 'gzip, deflate'), ('Connection', 'close'), ('Upgrade-Insecure-Requests', '1'), ]) task.requests = CFScrapeWrapper.create_scraper(task.requests)
def search(self, task, entry, config): if not config: return try: import cloudscraper except ImportError as e: logger.debug('Error importing cloudscraper: {}', e) raise plugin.DependencyError( issued_by='cfscraper', missing='cloudscraper', message='CLOudscraper module required. ImportError: %s' % e, ) scraper = cloudscraper.create_scraper() entries = [] for search_string in entry.get('search_strings', [entry['title']]): logger.debug('Searching `{}`', search_string) try: url = 'https://www.magnetdl.com/b/{}/'.format(quote(search_string.lower())) for entry in self.parse_page(scraper, url): entries.append(entry) except Page404Error: logger.warning('Url {} returned 404', url) return entries time.sleep(random.randint(1, 5)) return entries
def on_task_start(self, task ,config): # verify that we actually support Boto 3 try: import boto3 except ImportError as e: log.debug("Error importing boto3: %s", e) raise plugin.DependencyError("sns", "boto3", "Boto3 module required. ImportError: %s" % e)
def on_task_start(self, task, config): try: import cloudscraper except ImportError as e: logger.debug('Error importing cloudscraper: {}', e) raise plugin.DependencyError( 'cfscraper', 'cloudscraper', 'cloudscraper module required. ImportError: %s' % e) class CFScrapeWrapper(Session, cloudscraper.CloudScraper): """ This class allows the FlexGet session to inherit from CloudScraper instead of the requests.Session directly. """ def Challenge_Response(self, resp, **kwargs): """Make sure limiters are disabled when doing a cloudflare challenge.""" if not self.is_reCaptcha_Challenge(resp): # If this is a recaptcha challenge, the request gets sent straight to requests, not our subclass, # so it can't have any extra arguments that requests doesn't expect. kwargs['disable_limiters'] = True return super().Challenge_Response(resp, **kwargs) if config is True: task.requests.headers = OrderedDict([ ('User-Agent', task.requests.headers['User-Agent']), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' ), ('Accept-Language', 'en-US,en;q=0.5'), ('Accept-Encoding', 'gzip, deflate'), ('Connection', 'close'), ('Upgrade-Insecure-Requests', '1'), ]) task.requests = CFScrapeWrapper.create_scraper(task.requests, solveDepth=5)
def on_task_start(self, task, config): try: import periscope except ImportError as e: log.debug('Error importing Periscope: %s' % e) raise plugin.DependencyError('periscope', 'periscope', 'Periscope module required. ImportError: %s' % e)
def setup_client(self, config): try: from deluge_client import DelugeRPCClient except ImportError as e: log.debug('Error importing deluge-client: %s' % e) raise plugin.DependencyError( 'deluge', 'deluge-client', 'deluge-client >=1.5 is required. `pip install deluge-client` to install.', log, ) config = self.prepare_config(config) if config['host'] in ['localhost', '127.0.0.1' ] and not config.get('username'): # If an username is not specified, we have to do a lookup for the localclient username/password auth = self.get_localhost_auth(config.get('config_path')) if auth and auth[0]: config['username'], config['password'] = auth else: raise plugin.PluginError( 'Unable to get local authentication info for Deluge. You may need to ' 'specify an username and password from your Deluge auth file.' ) return DelugeRPCClient( config['host'], config['port'], config['username'], config['password'], decode_utf8=True, )
def on_task_start(self, task, config): if config is False: return try: import libtorrent # noqa except ImportError: raise plugin.DependencyError('convert_magnet', 'libtorrent', 'libtorrent package required', log)
def __init__( self, host: str, port: int, username: str, password: Optional[str] = None, private_key: Optional[str] = None, private_key_pass: Optional[str] = None, connection_tries: int = 3, ): if not pysftp: raise plugin.DependencyError( issued_by='sftp_client', missing='pysftp', message='sftp client requires the pysftp Python module.', ) self.host: str = host self.port: int = port self.username: str = username self.password: Optional[str] = password self.private_key: Optional[str] = private_key self.private_key_pass: Optional[str] = private_key_pass self.prefix: str = self._get_prefix() self._sftp: 'pysftp.Connection' = self._connect(connection_tries) self._handler_builder: HandlerBuilder = HandlerBuilder( self._sftp, self.prefix, self.private_key, self.private_key_pass )
def on_task_start(self, task, config): try: import flexget.plugins.local.friendfeed2 except ImportError as e: log.debug('Error importing FriendFeed API 2.0: %s' % e) raise plugin.DependencyError('friendfeed', 'friendfeed', 'FriendFeed API 2.0 module required. ImportError: %s' % e)
def on_task_start(self, task, config): try: from gi.repository import Notify except ImportError as e: log.debug('Error importing Notify: %s' % e) raise plugin.DependencyError( 'notify_osd', 'gi.repository', 'Notify module required. ImportError: %s' % e)
def scraper(): try: import cfscrape except ImportError as e: log.debug('Error importing cfscrape: %s', e) raise plugin.DependencyError('cfscraper', 'cfscrape', 'cfscrape module required. ImportError: %s' % e) else: return cfscrape.create_scraper()
def dependency_check(): """ Check if pysftp module is present """ if not pysftp: raise plugin.DependencyError(issued_by='sftp', missing='pysftp', message='sftp plugin requires the pysftp Python module.')
def parse(self, config): """ Parse filter condition(s) from config. """ try: from pyrocore.util import matching except ImportError, exc: raise plugin.DependencyError( "You need to (easy_)install 'pyrocore>=0.4' to use the %s plugin (%s)" % (self.name, exc))
def on_task_start(self, task, config): """Raise a DependencyError if our dependencies aren't available""" try: from filmweb.filmweb import Filmweb as FilmwebAPI # noqa except ImportError as e: log.debug('Error importing pyfilmweb: %s' % e) raise plugin.DependencyError( 'filmweb_watchlist', 'pyfilmweb', 'pyfilmweb==0.1.1.1 module required. ImportError: %s' % e, log)
def on_task_start(self, task, config): try: import cfscrape except ImportError as e: log.debug('Error importing cfscrape: %s' % e) raise plugin.DependencyError( 'cfscraper', 'cfscrape', 'cfscrape module required. ImportError: %s' % e) if config is True: task.requests = cfscrape.create_scraper(task.requests)
def __init__(self): try: import psutil except ImportError as e: log.debug('Error importing psutil: %s' % e) raise plugin.DependencyError( 'limiter', 'psutil', 'psutil is required. `pip install psutil` to install.', log, ) self.psutil = psutil
def on_task_start(self, task, config): try: import cfscrape except ImportError as e: log.debug('Error importing cfscrape: %s' % e) raise plugin.DependencyError( 'cfscraper', 'cfscrape', 'cfscrape module required. ImportError: %s' % e) class CFScrapeWrapper(Session, cfscrape.CloudflareScraper): """ This class allows the FlexGet session to inherit from CFScraper instead of the requests.Session directly. """ if config is True: task.requests = CFScrapeWrapper.create_scraper(task.requests)
def on_task_modify(self, task, config): """ The downloaded file is accessible in modify phase """ try: from pynzb import nzb_parser except ImportError: # TODO: remove builtin status so this won't get repeated on every task execution # TODO: this will get loaded even without any need for nzb raise plugin.DependencyError(issued_by='nzb_size', missing='lib pynzb') for entry in task.accepted: if ( entry.get('mime-type') in ['text/nzb', 'application/x-nzb'] or entry.get('filename') and entry['filename'].endswith('.nzb') ): if 'file' not in entry: log.warning( '`%s` does not have a `file` that could be used to get size information' % entry['title'] ) continue filename = entry['file'] log.debug('reading %s' % filename) xmldata = open(filename).read() try: nzbfiles = nzb_parser.parse(xmldata) except Exception: log.debug('%s is not a valid nzb' % entry['title']) continue size = 0 for nzbfile in nzbfiles: for segment in nzbfile.segments: size += segment.bytes size_mb = size / 1024 / 1024 log.debug('%s content size: %s MB' % (entry['title'], size_mb)) entry['content_size'] = size_mb else: log.trace('%s does not seem to be nzb' % entry['title'])
def notify(self, sns_topic_arn, title, message, url, aws_region, aws_access_key_id=None, aws_secret_access_key=None, profile_name=None, **kwargs): """ Send an Amazon SNS notification :param str sns_topic_arn: SNS Topic ARN :param str title: Notification title :param str message: Notification message :param str aws_region: AWS region :param str aws_access_key_id: AWS access key ID. Will be taken from AWS_ACCESS_KEY_ID environment if not provided. :param str aws_secret_access_key: AWS secret access key ID. Will be taken from AWS_SECRET_ACCESS_KEY environment if not provided. :param str profile_name: If provided, use this profile name instead of the default. """ try: import boto3 # noqa except ImportError as e: log.debug("Error importing boto3: %s", e) raise plugin.DependencyError( "sns", "boto3", "Boto3 module required. ImportError: %s" % e) session = boto3.Session(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, profile_name=profile_name, region_name=aws_region) sns = session.resource('sns') topic = sns.Topic(title) sns_message = json.dumps({'entry': { 'title': title, 'url': url, }}) try: topic.publish(Message=sns_message) except Exception as e: raise PluginWarning("Error publishing %s: " % e.args[0])
def on_task_input(self, task, config): try: import cloudscraper except ImportError as e: logger.debug('Error importing cloudscraper: {}', e) raise plugin.DependencyError( issued_by='cfscraper', missing='cloudscraper', message='CLOudscraper module required. ImportError: %s' % e, ) scraper = cloudscraper.create_scraper() category = config['category'] persistence = SimplePersistence(plugin='magnetdl') last_magnet = persistence.get(category, None) logger.debug('last_magnet: {}', last_magnet) first_magnet = None stop = False for page in range(0, config['pages']): logger.verbose('Retrieving {} page {}', category, page + 1) url = self._url(category, page) logger.debug('Url: {}', url) try: for entry in self.parse_page(scraper, url): if first_magnet is None: first_magnet = entry['url'] logger.debug('Set first_magnet to {}', first_magnet) persistence[category] = first_magnet if last_magnet == entry['url']: logger.debug('Found page where we have left, stopping') stop = True yield entry except Page404Error: logger.warning('Page {} returned 404, stopping', page) return if stop: return time.sleep(random.randint(1, 5))
import logging from collections import MutableSet from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.plugin import PluginError from flexget.utils.requests import RequestException from flexget.utils.tools import split_title_year try: # NOTE: Importing other plugins is discouraged! from flexget.components.thetvdb import api_tvdb as plugin_api_tvdb except ImportError: raise plugin.DependencyError(issued_by=__name__, missing='api_tvdb') log = logging.getLogger('thetvdb_list') class TheTVDBSet(MutableSet): schema = { 'type': 'object', 'properties': { 'username': {'type': 'string'}, 'account_id': {'type': 'string'}, 'api_key': {'type': 'string'}, 'strip_dates': {'type': 'boolean'}, 'language': {'type': 'string'}, }, 'required': ['username', 'account_id', 'api_key'], 'additionalProperties': False,
from __future__ import unicode_literals, division, absolute_import import logging from flexget import plugin from flexget.event import event from flexget.entry import Entry from flexget.manager import Session from flexget.utils.imdb import make_url as make_imdb_url try: from flexget.plugins.filter.movie_queue import queue_get except ImportError: raise plugin.DependencyError(issued_by='emit_movie_queue', missing='movie_queue') log = logging.getLogger('emit_movie_queue') class EmitMovieQueue(object): """Use your movie queue as an input by emitting the content of it""" schema = { 'oneOf': [{ 'type': 'boolean' }, { 'type': 'object', 'properties': { 'year': { 'type': 'boolean' }, 'quality': {
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin from flexget import options from flexget import plugin from flexget.event import event from flexget.manager import Session from flexget.terminal import TerminalTable, TerminalTableError, table_parser, console try: # NOTE: Importing other plugins is discouraged! from flexget.components.rejected import remember_rejected as plugin_remember_rejected except ImportError: raise plugin.DependencyError(issued_by=__name__, missing='remember_rejected') def do_cli(manager, options): if options.rejected_action == 'list': list_rejected(options) elif options.rejected_action == 'clear': clear_rejected(manager) def list_rejected(options): with Session() as session: results = session.query(plugin_remember_rejected.RememberEntry).all() header = ['#', 'Title', 'Task', 'Rejected by', 'Reason'] table_data = [header] for entry in results: table_data.append(