示例#1
0
def runCouchPotato(options,
                   base_path,
                   args,
                   data_dir=None,
                   log_dir=None,
                   Env=None,
                   desktop=None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        encoding = 'UTF-8'

    # Do db stuff
    db_path = os.path.join(data_dir, 'couchpotato.db')

    # Backup before start and cleanup old databases
    new_backup = os.path.join(data_dir, 'db_backup', str(int(time.time())))

    # Create path and copy
    if not os.path.isdir(new_backup): os.makedirs(new_backup)
    src_files = [
        options.config_file, db_path, db_path + '-shm', db_path + '-wal'
    ]
    for src_file in src_files:
        if os.path.isfile(src_file):
            shutil.copy2(src_file,
                         os.path.join(new_backup, os.path.basename(src_file)))

    # Remove older backups, keep backups 3 days or at least 3
    backups = []
    for directory in os.listdir(os.path.dirname(new_backup)):
        backup = os.path.join(os.path.dirname(new_backup), directory)
        if os.path.isdir(backup):
            backups.append(backup)

    total_backups = len(backups)
    for backup in backups:
        if total_backups > 3:
            if tryInt(os.path.basename(backup)) < time.time() - 259200:
                for src_file in src_files:
                    b_file = os.path.join(backup, os.path.basename(src_file))
                    if os.path.isfile(b_file):
                        os.remove(b_file)
                os.rmdir(backup)
                total_backups -= 1

    # Register environment settings
    Env.set('encoding', encoding)
    Env.set('app_dir', base_path)
    Env.set('data_dir', data_dir)
    Env.set('log_path', os.path.join(log_dir, 'CouchPotato.log'))
    Env.set('db_path', 'sqlite:///' + db_path)
    Env.set('cache_dir', os.path.join(data_dir, 'cache'))
    Env.set('cache',
            FileSystemCache(os.path.join(Env.get('cache_dir'), 'python')))
    Env.set('console_log', options.console_log)
    Env.set('quiet', options.quiet)
    Env.set('desktop', desktop)
    Env.set('daemonized', options.daemon)
    Env.set('args', args)
    Env.set('options', options)

    # Determine debug
    debug = options.debug or Env.setting('debug', default=False, type='bool')
    Env.set('debug', debug)

    # Development
    development = Env.setting('development', default=False, type='bool')
    Env.set('dev', development)

    # Disable logging for some modules
    for logger_name in ['enzyme', 'guessit', 'subliminal', 'apscheduler']:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ['gntp', 'migrate']:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Use reloader
    reloader = debug is True and development and not Env.get(
        'desktop') and not options.daemon

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s',
                                  '%m-%d %H:%M:%S')
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, 'INFO')

    # To screen
    if (debug or
            options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10)
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    import color_logs
    from couchpotato.core.logger import CPLog
    log = CPLog(__name__)
    log.debug('Started with options %s', options)

    def customwarn(message, category, filename, lineno, file=None, line=None):
        log.warning('%s %s %s line:%s', (category, message, filename, lineno))

    warnings.showwarning = customwarn

    # Check if database exists
    db = Env.get('db_path')
    db_exists = os.path.isfile(db_path)

    # Load configs & plugins
    loader = Env.get('loader')
    loader.preload(root=base_path)
    loader.run()

    # Load migrations
    if db_exists:

        from migrate.versioning.api import version_control, db_version, version, upgrade
        repo = os.path.join(base_path, 'couchpotato', 'core', 'migration')

        latest_db_version = version(repo)
        try:
            current_db_version = db_version(db, repo)
        except:
            version_control(db, repo, version=latest_db_version)
            current_db_version = db_version(db, repo)

        if current_db_version < latest_db_version and not development:
            log.info('Doing database upgrade. From %d to %d',
                     (current_db_version, latest_db_version))
            upgrade(db, repo)

    # Configure Database
    from couchpotato.core.settings.model import setup
    setup()

    # Fill database with needed stuff
    if not db_exists:
        fireEvent('app.initialize', in_order=True)

    # Create app
    from couchpotato import app
    api_key = Env.setting('api_key')
    url_base = '/' + Env.setting('url_base').lstrip('/') if Env.setting(
        'url_base') else ''

    # Basic config
    app.secret_key = api_key
    host = Env.setting('host', default='0.0.0.0')
    # app.debug = development
    config = {
        'use_reloader': reloader,
        'port': tryInt(Env.setting('port', default=5000)),
        'host': host if host and len(host) > 0 else '0.0.0.0',
        'ssl_cert': Env.setting('ssl_cert', default=None),
        'ssl_key': Env.setting('ssl_key', default=None),
    }

    # Static path
    app.static_folder = os.path.join(base_path, 'couchpotato', 'static')
    web.add_url_rule('api/%s/static/<path:filename>' % api_key,
                     endpoint='static',
                     view_func=app.send_static_file)

    # Register modules
    app.register_blueprint(web, url_prefix='%s/' % url_base)
    app.register_blueprint(api, url_prefix='%s/api/%s/' % (url_base, api_key))

    # Some logging and fire load event
    try:
        log.info('Starting server on port %(port)s', config)
    except:
        pass
    fireEventAsync('app.load')

    # Go go go!
    from tornado.ioloop import IOLoop
    web_container = WSGIContainer(app)
    web_container._log = _log
    loop = IOLoop.instance()

    application = Application(
        [
            (r'%s/api/%s/nonblock/(.*)/' %
             (url_base, api_key), NonBlockHandler),
            (r'.*', FallbackHandler, dict(fallback=web_container)),
        ],
        log_function=lambda x: None,
        debug=config['use_reloader'],
        gzip=True,
    )

    if config['ssl_cert'] and config['ssl_key']:
        server = HTTPServer(application,
                            no_keep_alive=True,
                            ssl_options={
                                "certfile": config['ssl_cert'],
                                "keyfile": config['ssl_key'],
                            })
    else:
        server = HTTPServer(application, no_keep_alive=True)

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            server.listen(config['port'], config['host'])
            loop.start()
        except Exception, e:
            try:
                nr, msg = e
                if nr == 48:
                    log.info(
                        'Already in use, try %s more time after few seconds',
                        restart_tries)
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except:
                pass

            raise

        try_restart = False
示例#2
0
import re
import traceback

from bs4 import BeautifulSoup

from couchpotato.core.helpers.variable import try_int, get_identifier
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider

log = CPLog(__name__)


class Base(TorrentProvider):

    urls = {
        'test':
        'https://awesome-hd.me/',
        'detail':
        'https://awesome-hd.me/torrents.php?torrentid=%s',
        'search':
        'https://awesome-hd.me/searchapi.php?action=imdbsearch&passkey=%s&imdb=%s&internal=%s',
        'download':
        'https://awesome-hd.me/torrents.php?action=download&id=%s&authkey=%s&torrent_pass=%s',
    }
    http_time_between_calls = 1
    login_fail_msg = 'Please check that you provided a valid API Key, username, and action.'

    def _search(self, movie, quality, results):

        data = self.getHTMLData(self.urls['search'] %
                                (self.conf('passkey'), get_identifier(movie),
示例#3
0
class Deluge(Downloader):

    protocol = ['torrent', 'torrent_magnet']
    log = CPLog(__name__)
    drpc = None

    def connect(self):
        # Load host from config and split out port.
        host = cleanHost(self.conf('host'), protocol=False).split(':')
        if not isInt(host[1]):
            log.error(
                'Config properties are not filled in correctly, port is missing.'
            )
            return False

        if not self.drpc:
            self.drpc = DelugeRPC(host[0],
                                  port=host[1],
                                  username=self.conf('username'),
                                  password=self.conf('password'))

        return self.drpc

    def download(self, data=None, media=None, filedata=None):
        if not media: media = {}
        if not data: data = {}

        log.info('Sending "%s" (%s) to Deluge.',
                 (data.get('name'), data.get('protocol')))

        if not self.connect():
            return False

        if not filedata and data.get('protocol') == 'torrent':
            log.error('Failed sending torrent, no data')
            return False

        # Set parameters for Deluge
        options = {
            'add_paused': self.conf('paused', default=0),
            'label': self.conf('label')
        }

        if self.conf('directory'):
            if os.path.isdir(self.conf('directory')):
                options['download_location'] = self.conf('directory')
            else:
                log.error(
                    'Download directory from Deluge settings: %s doesn\'t exist',
                    self.conf('directory'))

        if self.conf('completed_directory'):
            if os.path.isdir(self.conf('completed_directory')):
                options['move_completed'] = 1
                options['move_completed_path'] = self.conf(
                    'completed_directory')
            else:
                log.error(
                    'Download directory from Deluge settings: %s doesn\'t exist',
                    self.conf('directory'))

        if data.get('seed_ratio'):
            options['stop_at_ratio'] = 1
            options['stop_ratio'] = tryFloat(data.get('seed_ratio'))


#        Deluge only has seed time as a global option. Might be added in
#        in a future API release.
#        if data.get('seed_time'):

# Send request to Deluge
        if data.get('protocol') == 'torrent_magnet':
            remote_torrent = self.drpc.add_torrent_magnet(
                data.get('url'), options)
        else:
            filename = self.createFileName(data, filedata, media)
            remote_torrent = self.drpc.add_torrent_file(
                filename, filedata, options)

        if not remote_torrent:
            log.error('Failed sending torrent to Deluge')
            return False

        log.info('Torrent sent to Deluge successfully.')
        return self.downloadReturnId(remote_torrent)

    def getAllDownloadStatus(self, ids):

        log.debug('Checking Deluge download status.')

        if not self.connect():
            return []

        release_downloads = ReleaseDownloadList(self)

        queue = self.drpc.get_alltorrents(ids)

        if not queue:
            log.debug('Nothing in queue or error')
            return []

        for torrent_id in queue:
            torrent = queue[torrent_id]

            if not 'hash' in torrent:
                # When given a list of ids, deluge will return an empty item for a non-existant torrent.
                continue

            log.debug(
                'name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s',
                (torrent['name'], torrent['hash'], torrent['save_path'],
                 torrent['move_on_completed'], torrent['move_completed_path'],
                 torrent['hash'], torrent['progress'], torrent['state'],
                 torrent['eta'], torrent['ratio'], torrent['stop_ratio'],
                 torrent['is_seed'], torrent['is_finished'],
                 torrent['paused']))

            # Deluge has no easy way to work out if a torrent is stalled or failing.
            #status = 'failed'
            status = 'busy'
            if torrent['is_seed'] and tryFloat(torrent['ratio']) < tryFloat(
                    torrent['stop_ratio']):
                # We have torrent['seeding_time'] to work out what the seeding time is, but we do not
                # have access to the downloader seed_time, as with deluge we have no way to pass it
                # when the torrent is added. So Deluge will only look at the ratio.
                # See above comment in download().
                status = 'seeding'
            elif torrent['is_seed'] and torrent['is_finished'] and torrent[
                    'paused'] and torrent['state'] == 'Paused':
                status = 'completed'

            download_dir = sp(torrent['save_path'])
            if torrent['move_on_completed']:
                download_dir = torrent['move_completed_path']

            torrent_files = []
            for file_item in torrent['files']:
                torrent_files.append(
                    sp(os.path.join(download_dir, file_item['path'])))

            release_downloads.append({
                'id':
                torrent['hash'],
                'name':
                torrent['name'],
                'status':
                status,
                'original_status':
                torrent['state'],
                'seed_ratio':
                torrent['ratio'],
                'timeleft':
                str(timedelta(seconds=torrent['eta'])),
                'folder':
                sp(download_dir if len(torrent_files) ==
                   1 else os.path.join(download_dir, torrent['name'])),
                'files':
                '|'.join(torrent_files),
            })

        return release_downloads

    def pause(self, release_download, pause=True):
        if pause:
            return self.drpc.pause_torrent([release_download['id']])
        else:
            return self.drpc.resume_torrent([release_download['id']])

    def removeFailed(self, release_download):
        log.info('%s failed downloading, deleting...',
                 release_download['name'])
        return self.drpc.remove_torrent(release_download['id'], True)

    def processComplete(self, release_download, delete_files=False):
        log.debug(
            'Requesting Deluge to remove the torrent %s%s.',
            (release_download['name'],
             ' and cleanup the downloaded files' if delete_files else ''))
        return self.drpc.remove_torrent(release_download['id'],
                                        remove_local_data=delete_files)
示例#4
0
def runCouchPotato(options,
                   base_path,
                   args,
                   data_dir=None,
                   log_dir=None,
                   Env=None,
                   desktop=None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        encoding = 'UTF-8'

    Env.set('encoding', encoding)

    # Do db stuff
    db_path = toUnicode(os.path.join(data_dir, 'couchpotato_v2.db'))

    # Backup before start and cleanup old databases
    new_backup = toUnicode(
        os.path.join(data_dir, 'db_backup', str(int(time.time()))))

    # Create path and copy
    if not os.path.isdir(new_backup): os.makedirs(new_backup)
    src_files = [
        options.config_file, db_path, db_path + '-shm', db_path + '-wal'
    ]
    for src_file in src_files:
        if os.path.isfile(src_file):
            dst_file = toUnicode(
                os.path.join(new_backup, os.path.basename(src_file)))
            shutil.copyfile(src_file, dst_file)

            # Try and copy stats seperately
            try:
                shutil.copystat(src_file, dst_file)
            except:
                pass

    # Remove older backups, keep backups 3 days or at least 3
    backups = []
    for directory in os.listdir(os.path.dirname(new_backup)):
        backup = toUnicode(os.path.join(os.path.dirname(new_backup),
                                        directory))
        if os.path.isdir(backup):
            backups.append(backup)

    total_backups = len(backups)
    for backup in backups:
        if total_backups > 3:
            if tryInt(os.path.basename(backup)) < time.time() - 259200:
                for the_file in os.listdir(backup):
                    file_path = os.path.join(backup, the_file)
                    try:
                        if os.path.isfile(file_path):
                            os.remove(file_path)
                    except:
                        raise

                os.rmdir(backup)
                total_backups -= 1

    # Register environment settings
    Env.set('app_dir', toUnicode(base_path))
    Env.set('data_dir', toUnicode(data_dir))
    Env.set('log_path', toUnicode(os.path.join(log_dir, 'CouchPotato.log')))
    Env.set('db_path', toUnicode('sqlite:///' + db_path))
    Env.set('cache_dir', toUnicode(os.path.join(data_dir, 'cache')))
    Env.set(
        'cache',
        FileSystemCache(toUnicode(os.path.join(Env.get('cache_dir'),
                                               'python'))))
    Env.set('console_log', options.console_log)
    Env.set('quiet', options.quiet)
    Env.set('desktop', desktop)
    Env.set('daemonized', options.daemon)
    Env.set('args', args)
    Env.set('options', options)

    # Determine debug
    debug = options.debug or Env.setting('debug', default=False, type='bool')
    Env.set('debug', debug)

    # Development
    development = Env.setting('development', default=False, type='bool')
    Env.set('dev', development)

    # Disable logging for some modules
    for logger_name in ['enzyme', 'guessit', 'subliminal', 'apscheduler']:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ['gntp', 'migrate']:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Use reloader
    reloader = debug is True and development and not Env.get(
        'desktop') and not options.daemon and options.noreloader is True

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s',
                                  '%m-%d %H:%M:%S')
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, 'INFO')

    # To screen
    if (debug or
            options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10)
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    import color_logs
    from couchpotato.core.logger import CPLog
    log = CPLog(__name__)
    log.debug('Started with options %s', options)

    def customwarn(message, category, filename, lineno, file=None, line=None):
        log.warning('%s %s %s line:%s', (category, message, filename, lineno))

    warnings.showwarning = customwarn

    # Check if database exists
    db = Env.get('db_path')
    db_exists = os.path.isfile(toUnicode(db_path))

    # Load migrations
    if db_exists:

        from migrate.versioning.api import version_control, db_version, version, upgrade
        repo = os.path.join(base_path, 'couchpotato', 'core', 'migration')

        latest_db_version = version(repo)
        try:
            current_db_version = db_version(db, repo)
        except:
            version_control(db, repo, version=latest_db_version)
            current_db_version = db_version(db, repo)

        if current_db_version < latest_db_version:
            if development:
                log.error(
                    'There is a database migration ready, but you are running development mode, so it won\'t be used. If you see this, you are stupid. Please disable development mode.'
                )
            else:
                log.info('Doing database upgrade. From %d to %d',
                         (current_db_version, latest_db_version))
                upgrade(db, repo)

    # Configure Database
    from couchpotato.core.settings.model import setup
    setup()

    # Create app
    from couchpotato import WebHandler
    web_base = ('/' + Env.setting('url_base').lstrip('/') +
                '/') if Env.setting('url_base') else '/'
    Env.set('web_base', web_base)

    api_key = Env.setting('api_key')
    api_base = r'%sapi/%s/' % (web_base, api_key)
    Env.set('api_base', api_base)

    # Basic config
    host = Env.setting('host', default='0.0.0.0')
    # app.debug = development
    config = {
        'use_reloader': reloader,
        'port': tryInt(Env.setting('port', default=5050)),
        'host': host if host and len(host) > 0 else '0.0.0.0',
        'ssl_cert': Env.setting('ssl_cert', default=None),
        'ssl_key': Env.setting('ssl_key', default=None),
    }

    # Load the app
    application = Application(
        [],
        log_function=lambda x: None,
        debug=config['use_reloader'],
        gzip=True,
        cookie_secret=api_key,
        login_url='%slogin/' % web_base,
    )
    Env.set('app', application)

    # Request handlers
    application.add_handlers(
        ".*$",
        [
            (r'%snonblock/(.*)(/?)' % api_base, NonBlockHandler),

            # API handlers
            (r'%s(.*)(/?)' % api_base, ApiHandler),  # Main API handler
            (r'%sgetkey(/?)' % web_base, KeyHandler),  # Get API key
            (r'%s' % api_base, RedirectHandler, {
                "url": web_base + 'docs/'
            }),  # API docs

            # Login handlers
            (r'%slogin(/?)' % web_base, LoginHandler),
            (r'%slogout(/?)' % web_base, LogoutHandler),

            # Catch all webhandlers
            (r'%s(.*)(/?)' % web_base, WebHandler),
            (r'(.*)', WebHandler),
        ])

    # Static paths
    static_path = '%sstatic/' % web_base
    for dir_name in ['fonts', 'images', 'scripts', 'style']:
        application.add_handlers(
            ".*$",
            [('%s%s/(.*)' % (static_path, dir_name), StaticFileHandler, {
                'path':
                toUnicode(
                    os.path.join(base_path, 'couchpotato', 'static', dir_name))
            })])
    Env.set('static_path', static_path)

    # Load configs & plugins
    loader = Env.get('loader')
    loader.preload(root=toUnicode(base_path))
    loader.run()

    # Fill database with needed stuff
    if not db_exists:
        fireEvent('app.initialize', in_order=True)

    # Go go go!
    from tornado.ioloop import IOLoop
    loop = IOLoop.current()

    # Some logging and fire load event
    try:
        log.info('Starting server on port %(port)s', config)
    except:
        pass
    fireEventAsync('app.load')

    if config['ssl_cert'] and config['ssl_key']:
        server = HTTPServer(application,
                            no_keep_alive=True,
                            ssl_options={
                                "certfile": config['ssl_cert'],
                                "keyfile": config['ssl_key'],
                            })
    else:
        server = HTTPServer(application, no_keep_alive=True)

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            server.listen(config['port'], config['host'])
            loop.start()
        except Exception, e:
            log.error('Failed starting: %s', traceback.format_exc())
            try:
                nr, msg = e
                if nr == 48:
                    log.info(
                        'Port (%s) needed for CouchPotato is already in use, try %s more time after few seconds',
                        (config.get('port'), restart_tries))
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except:
                pass

            raise

        try_restart = False
class Transmission(DownloaderBase):

    protocol = ['torrent', 'torrent_magnet']
    log = CPLog(__name__)
    trpc = None

    def connect(self):
        # Load host from config and split out port.
        host = cleanHost(self.conf('host'), protocol=False).split(':')
        if not isInt(host[1]):
            log.error(
                'Config properties are not filled in correctly, port is missing.'
            )
            return False

        self.trpc = TransmissionRPC(host[0],
                                    port=host[1],
                                    rpc_url=self.conf('rpc_url').strip('/ '),
                                    username=self.conf('username'),
                                    password=self.conf('password'))
        return self.trpc

    def download(self, data=None, media=None, filedata=None):
        if not media: media = {}
        if not data: data = {}

        log.info('Sending "%s" (%s) to Transmission.',
                 (data.get('name'), data.get('protocol')))

        if not self.connect():
            return False

        if not filedata and data.get('protocol') == 'torrent':
            log.error('Failed sending torrent, no data')
            return False

        # Set parameters for adding torrent
        params = {'paused': self.conf('paused', default=False)}

        if self.conf('directory'):
            if os.path.isdir(self.conf('directory')):
                params['download-dir'] = self.conf('directory')
            else:
                log.error(
                    'Download directory from Transmission settings: %s doesn\'t exist',
                    self.conf('directory'))

        # Change parameters of torrent
        torrent_params = {}
        if data.get('seed_ratio'):
            torrent_params['seedRatioLimit'] = tryFloat(data.get('seed_ratio'))
            torrent_params['seedRatioMode'] = 1

        if data.get('seed_time'):
            torrent_params['seedIdleLimit'] = tryInt(
                data.get('seed_time')) * 60
            torrent_params['seedIdleMode'] = 1

        # Send request to Transmission
        if data.get('protocol') == 'torrent_magnet':
            remote_torrent = self.trpc.add_torrent_uri(data.get('url'),
                                                       arguments=params)
            torrent_params['trackerAdd'] = self.torrent_trackers
        else:
            remote_torrent = self.trpc.add_torrent_file(b64encode(filedata),
                                                        arguments=params)

        if not remote_torrent:
            log.error('Failed sending torrent to Transmission')
            return False

        # Change settings of added torrents
        if torrent_params:
            self.trpc.set_torrent(
                remote_torrent['torrent-added']['hashString'], torrent_params)

        log.info('Torrent sent to Transmission successfully.')
        return self.downloadReturnId(
            remote_torrent['torrent-added']['hashString'])

    def test(self):
        if self.connect() and self.trpc.get_session():
            return True
        return False

    def getAllDownloadStatus(self, ids):

        log.debug('Checking Transmission download status.')

        if not self.connect():
            return []

        release_downloads = ReleaseDownloadList(self)

        return_params = {
            'fields': [
                'id', 'name', 'hashString', 'percentDone', 'status', 'eta',
                'isStalled', 'isFinished', 'downloadDir', 'uploadRatio',
                'secondsSeeding', 'seedIdleLimit', 'files'
            ]
        }

        session = self.trpc.get_session()
        queue = self.trpc.get_alltorrents(return_params)
        if not (queue and queue.get('torrents')):
            log.debug('Nothing in queue or error')
            return []

        for torrent in queue['torrents']:
            if torrent['hashString'] in ids:
                log.debug(
                    'name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s',
                    (torrent['name'], torrent['id'], torrent['downloadDir'],
                     torrent['hashString'], torrent['percentDone'],
                     torrent['status'], torrent.get('isStalled', 'N/A'),
                     torrent['eta'], torrent['uploadRatio'],
                     torrent['isFinished'], session['incomplete-dir-enabled'],
                     session['incomplete-dir']))

                status = 'busy'
                if torrent.get('isStalled') and not torrent[
                        'percentDone'] == 1 and self.conf('stalled_as_failed'):
                    status = 'failed'
                elif torrent['status'] == 0 and torrent['percentDone'] == 1:
                    status = 'completed'
                elif torrent['status'] in [5, 6]:
                    status = 'seeding'

                if session['incomplete-dir-enabled'] and status == 'busy':
                    torrent_folder = session['incomplete-dir']
                else:
                    torrent_folder = torrent['downloadDir']

                torrent_files = []
                for file_item in torrent['files']:
                    torrent_files.append(
                        sp(os.path.join(torrent_folder, file_item['name'])))

                release_downloads.append({
                    'id':
                    torrent['hashString'],
                    'name':
                    torrent['name'],
                    'status':
                    status,
                    'original_status':
                    torrent['status'],
                    'seed_ratio':
                    torrent['uploadRatio'],
                    'timeleft':
                    str(timedelta(seconds=torrent['eta'])),
                    'folder':
                    sp(torrent_folder if len(torrent_files) ==
                       1 else os.path.join(torrent_folder, torrent['name'])),
                    'files':
                    torrent_files
                })

        return release_downloads

    def pause(self, release_download, pause=True):
        if pause:
            return self.trpc.stop_torrent(release_download['id'])
        else:
            return self.trpc.start_torrent(release_download['id'])

    def removeFailed(self, release_download):
        log.info('%s failed downloading, deleting...',
                 release_download['name'])
        return self.trpc.remove_torrent(release_download['id'], True)

    def processComplete(self, release_download, delete_files=False):
        log.debug(
            'Requesting Transmission to remove the torrent %s%s.',
            (release_download['name'],
             ' and cleanup the downloaded files' if delete_files else ''))
        return self.trpc.remove_torrent(release_download['id'], delete_files)
示例#6
0
def runCouchPotato(options,
                   base_path,
                   args,
                   data_dir=None,
                   log_dir=None,
                   Env=None,
                   desktop=None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        encoding = 'UTF-8'

    Env.set('encoding', encoding)

    # Do db stuff
    db_path = sp(os.path.join(data_dir, 'database'))
    old_db_path = os.path.join(data_dir, 'couchpotato.db')

    # Remove database folder if both exists
    if os.path.isdir(db_path) and os.path.isfile(old_db_path):
        db = SuperThreadSafeDatabase(db_path)
        db.open()
        db.destroy()

    # Check if database exists
    db = SuperThreadSafeDatabase(db_path)
    db_exists = db.exists()
    if db_exists:

        # Backup before start and cleanup old backups
        backup_path = sp(os.path.join(data_dir, 'db_backup'))
        backup_count = 5
        existing_backups = []
        if not os.path.isdir(backup_path): os.makedirs(backup_path)

        for root, dirs, files in os.walk(backup_path):
            # Only consider files being a direct child of the backup_path
            if root == backup_path:
                for backup_file in sorted(files):
                    ints = re.findall('\d+', backup_file)

                    # Delete non zip files
                    if len(ints) != 1:
                        try:
                            os.remove(os.path.join(root, backup_file))
                        except:
                            pass
                    else:
                        existing_backups.append((int(ints[0]), backup_file))
            else:
                # Delete stray directories.
                shutil.rmtree(root)

        # Remove all but the last 5
        for eb in existing_backups[:-backup_count]:
            os.remove(os.path.join(backup_path, eb[1]))

        # Create new backup
        new_backup = sp(
            os.path.join(backup_path, '%s.tar.gz' % int(time.time())))
        zipf = tarfile.open(new_backup, 'w:gz')
        for root, dirs, files in os.walk(db_path):
            for zfilename in files:
                zipf.add(os.path.join(root, zfilename),
                         arcname='database/%s' %
                         os.path.join(root[len(db_path) + 1:], zfilename))
        zipf.close()

        # Open last
        db.open()

    else:
        db.create()

    # Force creation of cachedir
    log_dir = sp(log_dir)
    cache_dir = sp(os.path.join(data_dir, 'cache'))
    python_cache = sp(os.path.join(cache_dir, 'python'))

    if not os.path.exists(cache_dir):
        os.mkdir(cache_dir)
    if not os.path.exists(python_cache):
        os.mkdir(python_cache)

    session = requests.Session()
    session.max_redirects = 5

    # Register environment settings
    Env.set('app_dir', sp(base_path))
    Env.set('data_dir', sp(data_dir))
    Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log')))
    Env.set('db', db)
    Env.set('http_opener', session)
    Env.set('cache_dir', cache_dir)
    Env.set('cache', FileSystemCache(python_cache))
    Env.set('console_log', options.console_log)
    Env.set('quiet', options.quiet)
    Env.set('desktop', desktop)
    Env.set('daemonized', options.daemon)
    Env.set('args', args)
    Env.set('options', options)

    # Determine debug
    debug = options.debug or Env.setting('debug', default=False, type='bool')
    Env.set('debug', debug)

    # Development
    development = Env.setting('development', default=False, type='bool')
    Env.set('dev', development)

    # Disable logging for some modules
    for logger_name in [
            'enzyme', 'guessit', 'subliminal', 'apscheduler', 'tornado',
            'requests'
    ]:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ['gntp']:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Disable SSL warning
    disable_warnings()

    # Use reloader
    reloader = debug is True and development and not Env.get(
        'desktop') and not options.daemon

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s',
                                  '%m-%d %H:%M:%S')
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, 'INFO')

    # To screen
    if (debug or
            options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'),
                                         'a',
                                         500000,
                                         10,
                                         encoding=Env.get('encoding'))
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    # noinspection PyUnresolvedReferences
    import color_logs
    from couchpotato.core.logger import CPLog
    log = CPLog(__name__)
    log.debug('Started with options %s', options)

    # Check soft-chroot dir exists:
    try:
        # Load Soft-Chroot
        soft_chroot = Env.get('softchroot')
        soft_chroot_dir = Env.setting('soft_chroot',
                                      section='core',
                                      default=None,
                                      type='unicode')
        soft_chroot.initialize(soft_chroot_dir)
    except SoftChrootInitError as exc:
        log.error(exc)
        return
    except:
        log.error('Unable to check whether SOFT-CHROOT is defined')
        return

    # Check available space
    try:
        total_space, available_space = getFreeSpace(data_dir)
        if available_space < 100:
            log.error(
                'Shutting down as CP needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left',
                available_space)
            return
    except:
        log.error('Failed getting diskspace: %s', traceback.format_exc())

    def customwarn(message, category, filename, lineno, file=None, line=None):
        log.warning('%s %s %s line:%s', (category, message, filename, lineno))

    warnings.showwarning = customwarn

    # Create app
    from couchpotato import WebHandler
    web_base = ('/' + Env.setting('url_base').lstrip('/') +
                '/') if Env.setting('url_base') else '/'
    Env.set('web_base', web_base)

    api_key = Env.setting('api_key')
    if not api_key:
        api_key = uuid4().hex
        Env.setting('api_key', value=api_key)

    api_base = r'%sapi/%s/' % (web_base, api_key)
    Env.set('api_base', api_base)

    # Basic config
    host = Env.setting('host', default='0.0.0.0')
    host6 = Env.setting('host6', default='::')

    config = {
        'use_reloader': reloader,
        'port': tryInt(Env.setting('port', default=5050)),
        'host': host if host and len(host) > 0 else '0.0.0.0',
        'host6': host6 if host6 and len(host6) > 0 else '::',
        'ssl_cert': Env.setting('ssl_cert', default=None),
        'ssl_key': Env.setting('ssl_key', default=None),
    }

    # Load the app
    application = Application(
        [],
        log_function=lambda x: None,
        debug=config['use_reloader'],
        gzip=True,
        cookie_secret=api_key,
        login_url='%slogin/' % web_base,
    )
    Env.set('app', application)

    # Request handlers
    application.add_handlers(
        ".*$",
        [
            (r'%snonblock/(.*)(/?)' % api_base, NonBlockHandler),

            # API handlers
            (r'%s(.*)(/?)' % api_base, ApiHandler),  # Main API handler
            (r'%sgetkey(/?)' % web_base, KeyHandler),  # Get API key
            (r'%s' % api_base, RedirectHandler, {
                "url": web_base + 'docs/'
            }),  # API docs

            # Login handlers
            (r'%slogin(/?)' % web_base, LoginHandler),
            (r'%slogout(/?)' % web_base, LogoutHandler),

            # Catch all webhandlers
            (r'%s(.*)(/?)' % web_base, WebHandler),
            (r'(.*)', WebHandler),
        ])

    # Static paths
    static_path = '%sstatic/' % web_base
    for dir_name in ['fonts', 'images', 'scripts', 'style']:
        application.add_handlers(
            ".*$",
            [('%s%s/(.*)' % (static_path, dir_name), StaticFileHandler, {
                'path':
                sp(os.path.join(base_path, 'couchpotato', 'static', dir_name))
            })])
    Env.set('static_path', static_path)

    # Load configs & plugins
    loader = Env.get('loader')
    loader.preload(root=sp(base_path))
    loader.run()

    # Fill database with needed stuff
    fireEvent('database.setup')
    if not db_exists:
        fireEvent('app.initialize', in_order=True)
    fireEvent('app.migrate')

    # Go go go!
    from tornado.ioloop import IOLoop
    from tornado.autoreload import add_reload_hook
    loop = IOLoop.current()

    # Reload hook
    def reload_hook():
        fireEvent('app.shutdown')

    add_reload_hook(reload_hook)

    # Some logging and fire load event
    try:
        log.info('Starting server on port %(port)s', config)
    except:
        pass
    fireEventAsync('app.load')

    ssl_options = None
    if config['ssl_cert'] and config['ssl_key']:
        ssl_options = {
            'certfile': config['ssl_cert'],
            'keyfile': config['ssl_key'],
        }

    server = HTTPServer(application,
                        no_keep_alive=True,
                        ssl_options=ssl_options)

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            server.listen(config['port'], config['host'])

            if Env.setting('ipv6', default=False):
                try:
                    server.listen(config['port'], config['host6'])
                except:
                    log.info2('Tried to bind to IPV6 but failed')

            loop.start()
            server.close_all_connections()
            server.stop()
            loop.close(all_fds=True)
        except Exception as e:
            log.error('Failed starting: %s', traceback.format_exc())
            try:
                nr, msg = e
                if nr == 48:
                    log.info(
                        'Port (%s) needed for CouchPotato is already in use, try %s more time after few seconds',
                        (config.get('port'), restart_tries))
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except ValueError:
                return
            except:
                pass

            raise

        try_restart = False
示例#7
0
class Deluge(DownloaderBase):

    protocol = ['torrent', 'torrent_magnet']
    log = CPLog(__name__)
    drpc = None

    def connect(self, reconnect=False):
        """ Connect to the delugeRPC, re-use connection when already available
        :param reconnect: force reconnect
        :return: DelugeRPC instance
        """

        # Load host from config and split out port.
        host = cleanHost(self.conf('host'), protocol=False).split(':')

        # Force host assignment
        if len(host) == 1:
            host.append(80)

        if not isInt(host[1]):
            log.error(
                'Config properties are not filled in correctly, port is missing.'
            )
            return False

        if not self.drpc or reconnect:
            self.drpc = DelugeRPC(host[0],
                                  port=host[1],
                                  username=self.conf('username'),
                                  password=self.conf('password'))

        return self.drpc

    def download(self, data=None, media=None, filedata=None):
        """ Send a torrent/nzb file to the downloader

        :param data: dict returned from provider
            Contains the release information
        :param media: media dict with information
            Used for creating the filename when possible
        :param filedata: downloaded torrent/nzb filedata
            The file gets downloaded in the searcher and send to this function
            This is done to have failed checking before using the downloader, so the downloader
            doesn't need to worry about that
        :return: boolean
            One faile returns false, but the downloaded should log his own errors
        """

        if not media: media = {}
        if not data: data = {}

        log.info('Sending "%s" (%s) to Deluge.',
                 (data.get('name'), data.get('protocol')))

        if not self.connect():
            return False

        if not filedata and data.get('protocol') == 'torrent':
            log.error('Failed sending torrent, no data')
            return False

        # Set parameters for Deluge
        options = {
            'add_paused': self.conf('paused', default=0),
            'label': self.conf('label')
        }

        if self.conf('directory'):
            #if os.path.isdir(self.conf('directory')):
            options['download_location'] = self.conf('directory')
        #else:
        #    log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))

        if self.conf('completed_directory'):
            #if os.path.isdir(self.conf('completed_directory')):
            options['move_completed'] = 1
            options['move_completed_path'] = self.conf('completed_directory')
        #else:
        #    log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))

        if data.get('seed_ratio'):
            options['stop_at_ratio'] = 1
            options['stop_ratio'] = tryFloat(data.get('seed_ratio'))


#        Deluge only has seed time as a global option. Might be added in
#        in a future API release.
#        if data.get('seed_time'):

# Send request to Deluge
        if data.get('protocol') == 'torrent_magnet':
            remote_torrent = self.drpc.add_torrent_magnet(
                data.get('url'), options)
        else:
            filename = self.createFileName(data, filedata, media)
            remote_torrent = self.drpc.add_torrent_file(
                filename, filedata, options)

        if not remote_torrent:
            log.error('Failed sending torrent to Deluge')
            return False

        log.info('Torrent sent to Deluge successfully.')
        return self.downloadReturnId(remote_torrent)

    def test(self):
        """ Check if connection works
        :return: bool
        """
        if self.connect(True) and self.drpc.test():
            return True
        return False

    def getAllDownloadStatus(self, ids):
        """ Get status of all active downloads

        :param ids: list of (mixed) downloader ids
            Used to match the releases for this downloader as there could be
            other downloaders active that it should ignore
        :return: list of releases
        """

        log.debug('Checking Deluge download status.')

        if not self.connect():
            return []

        release_downloads = ReleaseDownloadList(self)

        queue = self.drpc.get_alltorrents(ids)

        if not queue:
            log.debug('Nothing in queue or error')
            return []

        for torrent_id in queue:
            torrent = queue[torrent_id]

            if not 'hash' in torrent:
                # When given a list of ids, deluge will return an empty item for a non-existant torrent.
                continue

            log.debug(
                'name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s',
                (torrent['name'], torrent['hash'], torrent['save_path'],
                 torrent['move_on_completed'], torrent['move_completed_path'],
                 torrent['hash'], torrent['progress'], torrent['state'],
                 torrent['eta'], torrent['ratio'], torrent['stop_ratio'],
                 torrent['is_seed'], torrent['is_finished'],
                 torrent['paused']))

            # Deluge has no easy way to work out if a torrent is stalled or failing.
            #status = 'failed'
            status = 'busy'
            # If an user opts to seed a torrent forever (usually associated to private trackers usage), stop_ratio will be 0 or -1 (depending on Deluge version).
            # In this scenario the status of the torrent would never change from BUSY to SEEDING.
            # The last check takes care of this case.
            if torrent['is_seed'] and (
                (tryFloat(torrent['ratio']) < tryFloat(torrent['stop_ratio']))
                    or (tryFloat(torrent['stop_ratio']) < 0)):
                # We have torrent['seeding_time'] to work out what the seeding time is, but we do not
                # have access to the downloader seed_time, as with deluge we have no way to pass it
                # when the torrent is added. So Deluge will only look at the ratio.
                # See above comment in download().
                status = 'seeding'
            elif torrent['is_seed'] and torrent['is_finished'] and torrent[
                    'paused'] and torrent['state'] == 'Paused':
                status = 'completed'

            download_dir = sp(torrent['save_path'])
            if torrent['move_on_completed']:
                download_dir = torrent['move_completed_path']

            torrent_files = []
            for file_item in torrent['files']:
                torrent_files.append(
                    sp(os.path.join(download_dir, file_item['path'])))

            release_downloads.append({
                'id':
                torrent['hash'],
                'name':
                torrent['name'],
                'status':
                status,
                'original_status':
                torrent['state'],
                'seed_ratio':
                torrent['ratio'],
                'timeleft':
                str(timedelta(seconds=torrent['eta'])),
                'folder':
                sp(download_dir if len(torrent_files) ==
                   1 else os.path.join(download_dir, torrent['name'])),
                'files':
                torrent_files,
            })

        return release_downloads

    def pause(self, release_download, pause=True):
        if pause:
            return self.drpc.pause_torrent([release_download['id']])
        else:
            return self.drpc.resume_torrent([release_download['id']])

    def removeFailed(self, release_download):
        log.info('%s failed downloading, deleting...',
                 release_download['name'])
        return self.drpc.remove_torrent(release_download['id'], True)

    def processComplete(self, release_download, delete_files=False):
        log.debug(
            'Requesting Deluge to remove the torrent %s%s.',
            (release_download['name'],
             ' and cleanup the downloaded files' if delete_files else ''))
        return self.drpc.remove_torrent(release_download['id'],
                                        remove_local_data=delete_files)
示例#8
0
文件: main.py 项目: babbel4ever/PBI
class Transmission(Downloader):

    type = ['torrent', 'torrent_magnet']
    log = CPLog(__name__)

    def download(self, data, movie, manual=False, filedata=None):

        if self.isDisabled(manual) or not self.isCorrectType(data.get('type')):
            return

        log.debug('Sending "%s" (%s) to Transmission.',
                  (data.get('name'), data.get('type')))

        # Load host from config and split out port.
        host = self.conf('host').split(':')
        if not isInt(host[1]):
            log.error(
                'Config properties are not filled in correctly, port is missing.'
            )
            return False

        # Set parameters for Transmission
        folder_name = self.createFileName(data, filedata,
                                          movie)[:-len(data.get('type')) - 1]
        folder_path = os.path.join(self.conf('directory', default=''),
                                   folder_name).rstrip(os.path.sep)

        # Create the empty folder to download too
        self.makeDir(folder_path)

        params = {
            'paused': self.conf('paused', default=0),
            'download-dir': folder_path
        }

        torrent_params = {
            'seedRatioLimit': self.conf('ratio'),
            'seedRatioMode': (0 if self.conf('ratio') else 1)
        }

        if not filedata and data.get('type') == 'torrent':
            log.error('Failed sending torrent, no data')
            return False

        # Send request to Transmission
        try:
            trpc = TransmissionRPC(host[0],
                                   port=host[1],
                                   username=self.conf('username'),
                                   password=self.conf('password'))
            if data.get('type') == 'torrent_magnet':
                remote_torrent = trpc.add_torrent_uri(data.get('url'),
                                                      arguments=params)
                torrent_params['trackerAdd'] = self.torrent_trackers
            else:
                remote_torrent = trpc.add_torrent_file(b64encode(filedata),
                                                       arguments=params)

            # Change settings of added torrents
            trpc.set_torrent(remote_torrent['torrent-added']['hashString'],
                             torrent_params)

            return True
        except Exception, err:
            log.error('Failed to change settings for transfer: %s', err)
            return False
示例#9
0
class YGG(TorrentProvider, MovieProvider):
    """
    Couchpotato plugin to search movies torrents on www.yggtorrent.com.

    .. seealso:: YarrProvider.login, Plugin.wait
    """

    url_scheme = 'https'
    domain_name = 'yggtorrent.is'
    limit = 50
    http_time_between_calls = 0
    log = CPLog(__name__)

    def __init__(self):
        """
        Default constructor
        """
        TorrentProvider.__init__(self)
        MovieProvider.__init__(self)
        path_www = YGG.url_scheme + '://' + YGG.domain_name
        self.urls = {
            'login': path_www + '/user/login',
            'login_check': path_www + '/user/account',
            'search': path_www + '/engine/search?{0}',
            'torrent': path_www + '/torrent',
            'url': path_www + '/engine/download_torrent?id={0}'
        }

    def getLoginParams(self):
        """
        Return YGG login parameters.

        .. seealso:: YarrProvider.getLoginParams
        """
        return {
            'id': self.conf('username'),
            'pass': self.conf('password')
        }

    def loginSuccess(self, output):
        """
        Check server's response on authentication.

        .. seealso:: YarrProvider.loginSuccess
        """
        return len(output) == 0

    def loginCheckSuccess(self, output):
        """
        Check if we are still connected.

        .. seealso:: YarrProvider.loginCheckSuccess
        """
        result = False
        soup = BeautifulSoup(output, 'html.parser')
        if soup.find(text=u'Déconnexion'):
            result = True
        return result

    def getMoreInfo(self, nzb):
        """
        Get details about a torrent.

        .. seealso:: MovieSearcher.correctRelease
        """
        data = self.getHTMLData(nzb['detail_url'])
        soup = BeautifulSoup(data, 'html.parser')
        description = soup.find(class_='description-header').find_next('div')
        if description:
            nzb['description'] = description.prettify()
        line = soup.find(text=u'Uploadé le').find_next('td')
        added = datetime.strptime(line.getText().split('(')[0].strip(),
                                  '%d/%m/%Y %H:%M')
        nzb['age'] = (datetime.now() - added).days
        self.log.debug(nzb['age'])

    def extraCheck(self, nzb):
        """
        Exclusion when movie's description contains more than one IMDB
        reference to prevent a movie bundle downloading. CouchPotato
        is not able to extract a specific movie from an archive.

        .. seealso:: MovieSearcher.correctRelease
        """
        result = True
        ids = getImdb(nzb.get('description', ''), multiple=True)
        if len(ids) not in [0, 1]:
            YGG.log.info('Too much IMDB ids: {0}'.format(', '.join(ids)))
            result = False
        return result

    def parseText(self, node):
        """
        Retrieve the text content from a HTML node.
        """
        return node.getText().strip()

    def _searchOnTitle(self, title, media, quality, results, offset=0):
        """
        Do a search based on possible titles. This function doesn't check
        the quality because CouchPotato do the job when parsing results.
        Furthermore the URL must stay generic to use native CouchPotato
        caching feature.

        .. seealso:: YarrProvider.search
        """
        try:
            params = {
                'category': 2145,  # Film/Vidéo
                'description': '',
                'do': 'search',
                'file': '',
                'name': simplifyString(title),
                'sub_category': 'all',
                'uploader': ''
            }
            if offset > 0:
                params['page'] = offset * YGG.limit
            url = self.urls['search'].format(tryUrlencode(params))
            data = self.getHTMLData(url)
            soup = BeautifulSoup(data, 'html.parser')
            filter_ = '^{0}'.format(self.urls['torrent'])
            for link in soup.find_all(href=re.compile(filter_)):
                detail_url = link['href']
                if re.search(u'/filmvidéo/(film|animation|documentaire)/',
                             detail_url):
                    name = self.parseText(link)
                    id_ = tryInt(re.search('/(\d+)-[^/\s]+$', link['href']).
                                 group(1))
                    columns = link.parent.parent.find_all('td')
                    size = self.parseSize(self.parseText(columns[5]))
                    seeders = tryInt(self.parseText(columns[7]))
                    leechers = tryInt(self.parseText(columns[8]))
                    result = {
                        'id': id_,
                        'name': name,
                        'seeders': seeders,
                        'leechers': leechers,
                        'size': size,
                        'url': self.urls['url'].format(id_),
                        'detail_url': detail_url,
                        'verified': True,
                        'get_more_info': self.getMoreInfo,
                        'extra_check': self.extraCheck
                    }
                    results.append(result)
                    YGG.log.debug(result)
            # Get next page if we don't have all results
            pagination = soup.find('ul', class_='pagination')
            if pagination:
                for page in pagination.find_all('li'):
                    next_ = tryInt(self.parseText(page.find('a')))
                    if next_ > offset + 1:
                        self._searchOnTitle(title, media, quality, results,
                                            offset + 1)
                        break
        except:
            YGG.log.error('Failed searching release from {0}: {1}'.
                          format(self.getName(), traceback.format_exc()))