예제 #1
0
 def emit(self, record):
     session = Session()
     try:
         session.add(LogEntry(record))
         session.commit()
     finally:
         session.close()
예제 #2
0
파일: archive.py 프로젝트: DColl/Flexget
def tag_source(source_name, tag_names=None):
    """
    Tags all archived entries within a source with supplied tags

    :param string source_name: Source name
    :param list tag_names: List of tag names to add
    """

    if not tag_names or tag_names is None:
        return

    session = Session()
    try:
        # check that source exists
        source = session.query(ArchiveSource).filter(ArchiveSource.name == source_name).first()
        if not source:
            log.critical('Source `%s` does not exists' % source_name)
            srcs = ', '.join([s.name for s in session.query(ArchiveSource).order_by(ArchiveSource.name)])
            if srcs:
                log.info('Known sources: %s' % srcs)
            return

        # construct tags list
        tags = []
        for tag_name in tag_names:
            tags.append(get_tag(tag_name, session))

        # tag 'em
        log.verbose('Please wait while adding tags %s ...' % (', '.join(tag_names)))
        for a in session.query(ArchiveEntry).\
            filter(ArchiveEntry.sources.any(name=source_name)).yield_per(5):
            a.tags.extend(tags)
    finally:
        session.commit()
        session.close()
예제 #3
0
파일: log.py 프로젝트: s-m-b/Flexget
def log_once(message, logger=logging.getLogger('log_once')):
    """
    Log message only once using given logger. Returns False if suppressed logging.
    When suppressed verbose level is still logged.
    """

    digest = hashlib.md5()
    digest.update(message.encode('latin1', 'replace'))  # ticket:250
    md5sum = digest.hexdigest()

    session = Session()
    try:
        # abort if this has already been logged
        if session.query(LogMessage).filter_by(md5sum=md5sum).first():
            logger.verbose(message)
            return False

        row = LogMessage(md5sum)
        session.add(row)
        session.commit()
    finally:
        session.close()

    logger.info(message)
    return True
예제 #4
0
def tag_source(source_name, tag_names=None):
    """
    Tags all archived entries within a source with supplied tags

    :param string source_name: Source name
    :param list tag_names: List of tag names to add
    """

    if not tag_names or tag_names is None:
        return

    session = Session()
    try:
        # check that source exists
        source = session.query(ArchiveSource).filter(ArchiveSource.name == source_name).first()
        if not source:
            log.critical('Source `%s` does not exists' % source_name)
            srcs = ', '.join([s.name for s in session.query(ArchiveSource).order_by(ArchiveSource.name)])
            if srcs:
                log.info('Known sources: %s' % srcs)
            return

        # construct tags list
        tags = []
        for tag_name in tag_names:
            tags.append(get_tag(tag_name, session))

        # tag 'em
        log.verbose('Please wait while adding tags %s ...' % (', '.join(tag_names)))
        for a in session.query(ArchiveEntry).\
                filter(ArchiveEntry.sources.any(name=source_name)).yield_per(5):
            a.tags.extend(tags)
    finally:
        session.commit()
        session.close()
예제 #5
0
    def notify(self, title, message, config):
        if not message.strip():
            return
        self._parse_config(config)
        session = Session()

        self._save_message(message, session)
        session.commit()

        message_list = session.query(MessageEntry).filter(
            MessageEntry.sent == False).all()

        try:
            if access_token := self._get_access_token(session, self._corp_id,
                                                      self._corp_secret):
                for message_entry in message_list:
                    self._send_msgs(message_entry, access_token)
                    time.sleep(1)
                    if message_entry.sent:
                        session.delete(message_entry)
                        session.commit()
                if self.image:
                    self._send_images(access_token)
        except Exception as e:
            raise PluginError(str(e))
예제 #6
0
파일: database.py 프로젝트: Klaboe/Flexget
def vacuum():
    console("Running VACUUM on sqlite database, this could take a while.")
    session = Session()
    session.execute("VACUUM")
    session.commit()
    session.close()
    console("VACUUM complete.")
예제 #7
0
    def on_process_start(self, feed):
        if not feed.manager.options.repair_seen_movies:
            return

        feed.manager.disable_feeds()

        from progressbar import ProgressBar, Percentage, Bar, ETA
        from flexget.manager import Session
        from seen import SeenField
        from flexget.utils.imdb import extract_id

        session = Session()

        index = 0
        count = 0
        total = session.query(SeenField).filter(SeenField.field == u'imdb_url').count()

        widgets = ['Repairing: ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')]
        bar = ProgressBar(widgets=widgets, maxval=total).start()

        for seen in session.query(SeenField).filter(SeenField.field == u'imdb_url').all():
            index += 1
            if index % 5 == 0:
                bar.update(index)
            value = u'http://www.imdb.com/title/%s/' % extract_id(seen.value)
            if value != seen.value:
                count += 1
                seen.value = value
                seen.field = unicode('imdb_url')

        bar.finish()
        session.commit()

        print 'Fixed %s/%s URLs' % (count, total)
예제 #8
0
파일: backlog.py 프로젝트: Donavan/Flexget
    def add_backlog(self, task, entry, amount=''):
        """Add single entry to task backlog

        If :amount: is not specified, entry will only be injected on next execution."""
        snapshot = entry.snapshots.get('after_input')
        if not snapshot:
            if task.current_phase != 'input':
                # Not having a snapshot is normal during input phase, don't display a warning
                log.warning(
                    'No input snapshot available for `%s`, using current state'
                    % entry['title'])
            snapshot = entry
        session = Session()
        expire_time = datetime.now() + parse_timedelta(amount)
        backlog_entry = session.query(BacklogEntry).filter(BacklogEntry.title == entry['title']).\
                                                filter(BacklogEntry.task == task.name).first()
        if backlog_entry:
            # If there is already a backlog entry for this, update the expiry time if necessary.
            if backlog_entry.expire < expire_time:
                log.debug('Updating expiry time for %s' % entry['title'])
                backlog_entry.expire = expire_time
        else:
            log.debug('Saving %s' % entry['title'])
            backlog_entry = BacklogEntry()
            backlog_entry.title = entry['title']
            backlog_entry.entry = snapshot
            backlog_entry.task = task.name
            backlog_entry.expire = expire_time
            session.add(backlog_entry)
        session.commit()
예제 #9
0
파일: log.py 프로젝트: Nemesis7/Flexget
def log_once(message, logger=logging.getLogger('log_once')):
    """
    Log message only once using given logger. Returns False if suppressed logging.
    When suppressed verbose level is still logged.
    """

    digest = hashlib.md5()
    digest.update(message.encode('latin1', 'replace')) # ticket:250
    md5sum = digest.hexdigest()

    session = Session()
    try:
        # abort if this has already been logged
        if session.query(LogMessage).filter_by(md5sum=md5sum).first():
            logger.verbose(message)
            return False

        row = LogMessage(md5sum)
        session.add(row)
        session.commit()
    finally:
        session.close()

    logger.info(message)
    return True
예제 #10
0
 def emit(self, record):
     session = Session()
     try:
         session.add(LogEntry(record))
         session.commit()
     finally:
         session.close()
예제 #11
0
파일: seen.py 프로젝트: xfouloux/Flexget
def forget(value):
    """
    See module docstring
    :param string value: Can be task name, entry title or field value
    :return: count, field_count where count is number of entries removed and field_count number of fields
    """
    log.debug('forget called with %s' % value)
    session = Session()

    try:
        count = 0
        field_count = 0
        for se in session.query(SeenEntry).filter(
                or_(SeenEntry.title == value, SeenEntry.task == value)).all():
            field_count += len(se.fields)
            count += 1
            log.debug('forgetting %s' % se)
            session.delete(se)

        for sf in session.query(SeenField).filter(
                SeenField.value == value).all():
            se = session.query(SeenEntry).filter(
                SeenEntry.id == sf.seen_entry_id).first()
            field_count += len(se.fields)
            count += 1
            log.debug('forgetting %s' % se)
            session.delete(se)
        return count, field_count
    finally:
        session.commit()
        session.close()
예제 #12
0
파일: backlog.py 프로젝트: Donavan/Flexget
    def add_backlog(self, task, entry, amount=''):
        """Add single entry to task backlog

        If :amount: is not specified, entry will only be injected on next execution."""
        snapshot = entry.snapshots.get('after_input')
        if not snapshot:
            if task.current_phase != 'input':
                # Not having a snapshot is normal during input phase, don't display a warning
                log.warning('No input snapshot available for `%s`, using current state' % entry['title'])
            snapshot = entry
        session = Session()
        expire_time = datetime.now() + parse_timedelta(amount)
        backlog_entry = session.query(BacklogEntry).filter(BacklogEntry.title == entry['title']).\
                                                filter(BacklogEntry.task == task.name).first()
        if backlog_entry:
            # If there is already a backlog entry for this, update the expiry time if necessary.
            if backlog_entry.expire < expire_time:
                log.debug('Updating expiry time for %s' % entry['title'])
                backlog_entry.expire = expire_time
        else:
            log.debug('Saving %s' % entry['title'])
            backlog_entry = BacklogEntry()
            backlog_entry.title = entry['title']
            backlog_entry.entry = snapshot
            backlog_entry.task = task.name
            backlog_entry.expire = expire_time
            session.add(backlog_entry)
        session.commit()
예제 #13
0
 def get_file(self, only_cached=False):
     """Makes sure the poster is downloaded to the local cache (in userstatic folder) and
     returns the path split into a list of directory and file components"""
     from flexget.manager import manager
     base_dir = os.path.join(manager.config_base, 'userstatic')
     if self.file and os.path.isfile(os.path.join(base_dir, self.file)):
         return self.file.split(os.sep)
     elif only_cached:
         return
     # If we don't already have a local copy, download one.
     log.debug('Downloading poster %s' % self.url)
     dirname = os.path.join('tmdb', 'posters', str(self.movie_id))
     # Create folders if they don't exist
     fullpath = os.path.join(base_dir, dirname)
     if not os.path.isdir(fullpath):
         os.makedirs(fullpath)
     filename = os.path.join(dirname, posixpath.basename(self.url))
     thefile = file(os.path.join(base_dir, filename), 'wb')
     thefile.write(urlopener(self.url, log).read())
     self.file = filename
     # If we are detached from a session, update the db
     if not Session.object_session(self):
         session = Session()
         poster = session.query(TMDBPoster).filter(
             TMDBPoster.db_id == self.db_id).first()
         if poster:
             poster.file = filename
             session.commit()
         session.close()
     return filename.split(os.sep)
예제 #14
0
파일: seen.py 프로젝트: StunMan/Flexget
def forget(value):
    """
    See module docstring
    :param string value: Can be task name, entry title or field value
    :return: count, field_count where count is number of entries removed and field_count number of fields
    """
    log.debug('forget called with %s' % value)
    session = Session()

    try:
        count = 0
        field_count = 0
        for se in session.query(SeenEntry).filter(or_(SeenEntry.title == value, SeenEntry.task == value)).all():
            field_count += len(se.fields)
            count += 1
            log.debug('forgetting %s' % se)
            session.delete(se)

        for sf in session.query(SeenField).filter(SeenField.value == value).all():
            se = session.query(SeenEntry).filter(SeenEntry.id == sf.seen_entry_id).first()
            field_count += len(se.fields)
            count += 1
            log.debug('forgetting %s' % se)
            session.delete(se)
        return count, field_count
    finally:
        session.commit()
        session.close()
예제 #15
0
    def add_failed(self, entry, reason=None, **kwargs):
        """Adds entry to internal failed list, displayed with --failed"""
        reason = reason or 'Unknown'
        failed = Session()
        try:
            # query item's existence
            item = failed.query(FailedEntry).filter(FailedEntry.title == entry['title']).\
                filter(FailedEntry.url == entry['original_url']).first()
            if not item:
                item = FailedEntry(entry['title'], entry['original_url'],
                                   reason)
            else:
                item.count += 1
                item.tof = datetime.now()
                item.reason = reason
            failed.merge(item)
            log.debug('Marking %s in failed list. Has failed %s times.' %
                      (item.title, item.count))

            # limit item number to 25
            for row in failed.query(FailedEntry).order_by(
                    FailedEntry.tof.desc())[25:]:
                failed.delete(row)
            failed.commit()
        finally:
            failed.close()
예제 #16
0
 def get_file(self, only_cached=False):
     """Makes sure the poster is downloaded to the local cache (in userstatic folder) and
     returns the path split into a list of directory and file components"""
     from flexget.manager import manager
     base_dir = os.path.join(manager.config_base, 'userstatic')
     if self.file and os.path.isfile(os.path.join(base_dir, self.file)):
         return self.file.split(os.sep)
     elif only_cached:
         return
     # If we don't already have a local copy, download one.
     log.debug('Downloading poster %s' % self.url)
     dirname = os.path.join('tmdb', 'posters', str(self.movie_id))
     # Create folders if they don't exist
     fullpath = os.path.join(base_dir, dirname)
     if not os.path.isdir(fullpath):
         os.makedirs(fullpath)
     filename = os.path.join(dirname, posixpath.basename(self.url))
     thefile = file(os.path.join(base_dir, filename), 'wb')
     thefile.write(requests.get(self.url).content)
     self.file = filename
     # If we are detached from a session, update the db
     if not Session.object_session(self):
         session = Session()
         poster = session.query(TMDBPoster).filter(TMDBPoster.db_id == self.db_id).first()
         if poster:
             poster.file = filename
             session.commit()
         session.close()
     return filename.split(os.sep)
예제 #17
0
파일: log.py 프로젝트: cnzhuo/Flexget
def log_once(message, logger=logging.getLogger('log_once'), once_level=logging.INFO, suppressed_level=f_logger.VERBOSE):
    """
    Log message only once using given logger`. Returns False if suppressed logging.
    When suppressed, `suppressed_level` level is still logged.
    """
    # If there is no active manager, don't access the db
    from flexget.manager import manager
    if not manager:
        log.warning('DB not initialized. log_once will not work properly.')
        logger.log(once_level, message)
        return

    digest = hashlib.md5()
    digest.update(message.encode('latin1', 'replace')) # ticket:250
    md5sum = digest.hexdigest()

    session = Session()
    try:
        # abort if this has already been logged
        if session.query(LogMessage).filter_by(md5sum=md5sum).first():
            logger.log(suppressed_level, message)
            return False

        row = LogMessage(md5sum)
        session.add(row)
        session.commit()
    finally:
        session.close()

    logger.log(once_level, message)
    return True
예제 #18
0
 def upgrade_wrapper(manager):
     ver = get_version(plugin)
     session = Session()
     try:
         new_ver = func(ver, session)
         if new_ver > ver:
             log.info('Plugin `%s` schema upgraded successfully' %
                      plugin)
             set_version(plugin, new_ver)
             session.commit()
             manager.db_upgraded = True
         elif new_ver < ver:
             log.critical(
                 'A lower schema version was returned (%s) from the %s upgrade function '
                 'than passed in (%s)' % (new_ver, plugin, ver))
             manager.shutdown(finish_queue=False)
     except UpgradeImpossible:
         log.info(
             'Plugin %s database is not upgradable. Flushing data and regenerating.'
             % plugin)
         reset_schema(plugin)
         session.commit()
     except Exception as e:
         log.exception('Failed to upgrade database for plugin %s: %s' %
                       (plugin, e))
         manager.shutdown(finish_queue=False)
     finally:
         session.close()
예제 #19
0
def set_version(plugin, version):
    if plugin not in plugin_schemas:
        raise ValueError(
            'Tried to set schema version for %s plugin with no versioned_base.'
            % plugin)
    base_version = plugin_schemas[plugin]['version']
    if version != base_version:
        raise ValueError('Tried to set %s plugin schema version to %d when '
                         'it should be %d as defined in versioned_base.' %
                         (plugin, version, base_version))
    session = Session()
    try:
        schema = session.query(PluginSchema).filter(
            PluginSchema.plugin == plugin).first()
        if not schema:
            log.debug('Initializing plugin %s schema version to %i' %
                      (plugin, version))
            schema = PluginSchema(plugin, version)
            session.add(schema)
        else:
            if version < schema.version:
                raise ValueError(
                    'Tried to set plugin %s schema version to lower value' %
                    plugin)
            if version != schema.version:
                log.debug('Updating plugin %s schema version to %i' %
                          (plugin, version))
                schema.version = version
        session.commit()
    finally:
        session.close()
예제 #20
0
def age_series(**kwargs):
    from flexget.plugins.filter.series import Release
    from flexget.manager import Session
    import datetime
    session = Session()
    session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)})
    session.commit()
예제 #21
0
    def on_process_start(self, task):
        if not task.manager.options.tail_reset:
            return

        task.manager.disable_tasks()

        from flexget.utils.simple_persistence import SimpleKeyValue
        from flexget.manager import Session

        session = Session()
        try:
            poses = session.query(SimpleKeyValue).filter(
                SimpleKeyValue.key == task.manager.options.tail_reset).all()
            if not poses:
                print 'No position stored for file %s' % task.manager.options.tail_reset
                print 'Note that file must give in same format as in config, ie. ~/logs/log can not be given as /home/user/logs/log'
            for pos in poses:
                if pos.value == 0:
                    print 'Task %s tail position is already zero' % pos.task
                else:
                    print 'Task %s tail position (%s) reseted to zero' % (
                        pos.task, pos.value)
                    pos.value = 0
            session.commit()
        finally:
            session.close()
예제 #22
0
    def queue_add(self, title=None, imdb_id=None, quality='ANY', force=True):
        """Add an item to the queue with the specified quality"""

        if not title or not imdb_id:
            # We don't have all the info we need to add movie, do a lookup for more info
            result = self.parse_what(imdb_id or title)
            title = result['title']
            imdb_id = result['imdb_id']
        quality = self.validate_quality(quality)

        session = Session()

        # check if the item is already queued
        item = session.query(QueuedMovie).filter(
            QueuedMovie.imdb_id == imdb_id).first()
        if not item:
            #TODO: fix
            item = QueuedMovie(imdb_id=imdb_id,
                               quality=quality,
                               immortal=force,
                               title=title)
            session.add(item)
            session.commit()
            session.close()
            return {
                'title': title,
                'imdb_id': imdb_id,
                'quality': quality,
                'force': force
            }
        else:
            raise QueueError('ERROR: %s is already in the queue' % title)
예제 #23
0
파일: log.py 프로젝트: goto100/Flexget
def log_once(message,
             logger=logging.getLogger('log_once'),
             once_level=logging.INFO,
             suppressed_level=f_logger.VERBOSE):
    """
    Log message only once using given logger`. Returns False if suppressed logging.
    When suppressed, `suppressed_level` level is still logged.
    """
    # If there is no active manager, don't access the db
    from flexget.manager import manager
    if not manager:
        log.warning('DB not initialized. log_once will not work properly.')
        logger.log(once_level, message)
        return

    digest = hashlib.md5()
    digest.update(message.encode('latin1', 'replace'))  # ticket:250
    md5sum = digest.hexdigest()

    session = Session()
    try:
        # abort if this has already been logged
        if session.query(LogMessage).filter_by(md5sum=md5sum).first():
            logger.log(suppressed_level, message)
            return False

        row = LogMessage(md5sum)
        session.add(row)
        session.commit()
    finally:
        session.close()

    logger.log(once_level, message)
    return True
예제 #24
0
    def get_login_cookies(self, username, password):
        url_auth = 'http://www.t411.me/users/login'
        db_session = Session()
        account = db_session.query(torrent411Account).filter(
            torrent411Account.username == username).first()
        if account:
            if account.expiry_time < datetime.now():
                db_session.delete(account)
                db_session.commit()
            log.debug("Cookies found in db!")
            return account.auth
        else:
            log.debug("Getting login cookies from : %s " % url_auth)
            params = urllib.urlencode({
                'login': username,
                'password': password,
                'remember': '1'
            })
            cj = cookielib.CookieJar()
            #           WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
            #           NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK
            opener.addheaders = [('User-agent', self.USER_AGENT)]
            try:
                opener.open(url_auth, params)
            except Exception as e:
                raise UrlRewritingError("Connection Error for %s : %s" %
                                        (url_auth, e))

            authKey = None
            uid = None
            password = None

            for cookie in cj:
                if cookie.name == "authKey":
                    authKey = cookie.value
                if cookie.name == "uid":
                    uid = cookie.value
                if cookie.name == "pass":
                    password = cookie.value

            if authKey is not None and \
               uid is not None and \
               password is not None:
                authCookie = {
                    'uid': uid,
                    'password': password,
                    'authKey': authKey
                }
                db_session.add(
                    torrent411Account(username=username,
                                      auth=authCookie,
                                      expiry_time=datetime.now() +
                                      timedelta(days=1)))
                db_session.commit()
                return authCookie

        return {"uid": "", "password": "", "authKey": ""}
예제 #25
0
def consolidate():
    """
    Converts previous archive data model to new one.
    """

    session = Session()
    try:
        log.verbose('Checking archive size ...')
        count = session.query(ArchiveEntry).count()
        log.verbose('Found %i items to migrate, this can be aborted with CTRL-C safely.' % count)

        # consolidate old data
        from progressbar import ProgressBar, Percentage, Bar, ETA

        widgets = ['Process - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')]
        bar = ProgressBar(widgets=widgets, maxval=count).start()

        # id's for duplicates
        duplicates = []

        for index, orig in enumerate(session.query(ArchiveEntry).yield_per(5)):
            bar.update(index)

            # item already processed
            if orig.id in duplicates:
                continue

            # item already migrated
            if orig.sources:
                log.info('Database looks like it has already been consolidated, '
                         'item %s has already sources ...' % orig.title)
                session.rollback()
                return

            # add legacy task to the sources list
            orig.sources.append(get_source(orig.task, session))
            # remove task, deprecated .. well, let's still keep it ..
            # orig.task = None

            for dupe in session.query(ArchiveEntry).\
                filter(ArchiveEntry.id != orig.id).\
                filter(ArchiveEntry.title == orig.title).\
                    filter(ArchiveEntry.url == orig.url).all():
                orig.sources.append(get_source(dupe.task, session))
                duplicates.append(dupe.id)

        if duplicates:
            log.info('Consolidated %i items, removing duplicates ...' % len(duplicates))
            for id in duplicates:
                session.query(ArchiveEntry).filter(ArchiveEntry.id == id).delete()
        session.commit()
        log.info('Completed! This does NOT need to be ran again.')
    except KeyboardInterrupt:
        session.rollback()
        log.critical('Aborted, no changes saved')
    finally:
        session.close()
예제 #26
0
def vacuum():
    console('Running VACUUM on sqlite database, this could take a while.')
    session = Session()
    try:
        session.execute('VACUUM')
        session.commit()
    finally:
        session.close()
    console('VACUUM complete.')
예제 #27
0
def vacuum():
    console('Running VACUUM on sqlite database, this could take a while.')
    session = Session()
    try:
        session.execute('VACUUM')
        session.commit()
    finally:
        session.close()
    console('VACUUM complete.')
예제 #28
0
파일: archive.py 프로젝트: DColl/Flexget
def consolidate():
    """
    Converts previous archive data model to new one.
    """

    session = Session()
    try:
        log.verbose('Checking archive size ...')
        count = session.query(ArchiveEntry).count()
        log.verbose('Found %i items to migrate, this can be aborted with CTRL-C safely.' % count)

        # consolidate old data
        from progressbar import ProgressBar, Percentage, Bar, ETA

        widgets = ['Process - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')]
        bar = ProgressBar(widgets=widgets, maxval=count).start()

        # id's for duplicates
        duplicates = []

        for index, orig in enumerate(session.query(ArchiveEntry).yield_per(5)):
            bar.update(index)

            # item already processed
            if orig.id in duplicates:
                continue

            # item already migrated
            if orig.sources:
                log.info('Database looks like it has already been consolidated, '
                         'item %s has already sources ...' % orig.title)
                session.rollback()
                return

            # add legacy task to the sources list
            orig.sources.append(get_source(orig.task, session))
            # remove task, deprecated .. well, let's still keep it ..
            #orig.task = None

            for dupe in session.query(ArchiveEntry).\
                filter(ArchiveEntry.id != orig.id).\
                filter(ArchiveEntry.title == orig.title).\
                    filter(ArchiveEntry.url == orig.url).all():
                orig.sources.append(get_source(dupe.task, session))
                duplicates.append(dupe.id)

        if duplicates:
            log.info('Consolidated %i items, removing duplicates ...' % len(duplicates))
            for id in duplicates:
                session.query(ArchiveEntry).filter(ArchiveEntry.id == id).delete()
        session.commit()
        log.info('Completed! This does NOT need to be ran again.')
    except KeyboardInterrupt:
        session.rollback()
        log.critical('Aborted, no changes saved')
    finally:
        session.close()
예제 #29
0
    def get_login_cookies(self, username, password):
        url_auth = 'http://www.t411.li/users/login'
        db_session = Session()
        account = db_session.query(torrent411Account).filter(
            torrent411Account.username == username).first()
        if account:
            if account.expiry_time < datetime.now():
                db_session.delete(account)
                db_session.commit()
            log.debug("Cookies found in db!")
            return account.auth
        else:
            log.debug("Getting login cookies from : %s " % url_auth)
            params = {'login': username, 'password': password, 'remember': '1'}
            cj = http.cookiejar.CookieJar()
            #           WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES
            opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
            #           NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK
            opener.addheaders = [('User-agent', self.USER_AGENT)]
            login_output = None
            try:
                login_output = opener.open(url_auth, urllib.parse.urlencode(params)).read()
            except Exception as e:
                raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e))

            if b'confirmer le captcha' in login_output:
                log.warning("Captcha requested for login.")
                login_output = self._solveCaptcha(login_output, url_auth, params, opener)

            if b'logout' in login_output:
                authKey = None
                uid = None
                password = None

                for cookie in cj:
                    if cookie.name == "authKey":
                        authKey = cookie.value
                    if cookie.name == "uid":
                        uid = cookie.value
                    if cookie.name == "pass":
                        password = cookie.value

                if authKey is not None and \
                        uid is not None and \
                        password is not None:
                    authCookie = {'uid': uid,
                                  'password': password,
                                  'authKey': authKey
                                  }
                    db_session.add(torrent411Account(username=username,
                                                     auth=authCookie,
                                                     expiry_time=datetime.now() + timedelta(days=1)))
                    db_session.commit()
                    return authCookie
            else:
                log.error("Login failed (Torrent411). Check your login and password.")
                return {}
예제 #30
0
파일: series.py 프로젝트: kop1/flexget
def forget_series(name):
    """Remove a whole series :name: from database."""
    session = Session()
    series = session.query(Series).filter(Series.name == name).first()
    if series:
        session.delete(series)
        session.commit()
        log.debug('Removed series %s from database.' % name)
    else:
        raise ValueError('Unknown series %s' % name)
예제 #31
0
파일: series.py 프로젝트: kop1/flexget
def repair(manager):
    """Perform database repairing and upgrading at startup."""
    if not manager.persist.get('series_repaired', False):
        session = Session()
        # For some reason at least I have some releases in database which don't belong to any episode.
        for release in session.query(Release).filter(Release.episode == None).all():
            log.info('Purging orphan release %s from database' % release.title)
            session.delete(release)
        session.commit()
        manager.persist['series_repaired'] = True
예제 #32
0
def purge():
    """Purge old messages from database"""
    old = datetime.now() - timedelta(days=365)
    session = Session()
    try:
        for message in session.query(LogMessage).filter(LogMessage.added < old):
            log.debug('purging: %s' % message)
            session.delete(message)
    finally:
        session.commit()
예제 #33
0
def clear_rejected(manager):
    session = Session()
    try:
        results = session.query(RememberEntry).delete()
        console("Cleared %i items." % results)
        session.commit()
        if results:
            manager.config_changed()
    finally:
        session.close()
예제 #34
0
def clear_failed(manager):
    session = Session()
    try:
        results = session.query(FailedEntry).delete()
        console('Cleared %i items.' % results)
        session.commit()
        if results:
            manager.config_changed()
    finally:
        session.close()
예제 #35
0
def clear_rejected(manager):
    session = Session()
    try:
        results = session.query(RememberEntry).delete()
        console('Cleared %i items.' % results)
        session.commit()
        if results:
            manager.config_changed()
    finally:
        session.close()
예제 #36
0
    def get_login_cookies(self, username, password):
        url_auth = 'http://www.t411.me/users/login'
        db_session = Session()
        account = db_session.query(torrent411Account).filter(
            torrent411Account.username == username).first()
        if account:
            if account.expiry_time < datetime.now():
                db_session.delete(account)
                db_session.commit()
            log.debug("Cookies found in db!")
            return account.auth
        else:
            log.debug("Getting login cookies from : %s " % url_auth)
            params = urllib.urlencode({'login': username,
                                       'password': password,
                                       'remember': '1'})
            cj = cookielib.CookieJar()
#           WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
#           NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK
            opener.addheaders = [('User-agent', self.USER_AGENT)]
            try:
                opener.open(url_auth, params)
            except Exception as e:
                raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e))

            authKey = None
            uid = None
            password = None

            for cookie in cj:
                if cookie.name == "authKey":
                    authKey = cookie.value
                if cookie.name == "uid":
                    uid = cookie.value
                if cookie.name == "pass":
                    password = cookie.value

            if authKey is not None and \
               uid is not None and \
               password is not None:
                authCookie = {'uid': uid,
                              'password': password,
                              'authKey': authKey
                              }
                db_session.add(torrent411Account(username=username,
                                                 auth=authCookie,
                                                 expiry_time=datetime.now() + timedelta(days=1)))
                db_session.commit()
                return authCookie

        return {"uid": "",
                "password": "",
                "authKey": ""
                }
예제 #37
0
 def clear_failed(self):
     """Clears list of failed entries"""
     session = Session()
     try:
         results = session.query(FailedEntry).all()
         for row in results:
             session.delete(row)
         console('Cleared %i items.' % len(results))
         session.commit()
     finally:
         session.close()
예제 #38
0
 def clear_failed(self):
     """Clears list of failed entries"""
     session = Session()
     try:
         results = session.query(FailedEntry).all()
         for row in results:
             session.delete(row)
         console('Cleared %i items.' % len(results))
         session.commit()
     finally:
         session.close()
예제 #39
0
 def _set_db_last_run(self):
     session = Session()
     try:
         db_trigger = session.query(DBTrigger).get(self.uid)
         if not db_trigger:
             db_trigger = DBTrigger(self.uid)
             session.add(db_trigger)
         db_trigger.last_run = self.last_run
         session.commit()
     finally:
         session.close()
     log.debug('recorded last_run to the database')
예제 #40
0
파일: task.py 프로젝트: BrainDamage/Flexget
def config_changed(task):
    """Forces config_modified flag to come out true on next run. Used when the db changes, and all
    entries need to be reprocessed."""
    log.debug('Marking config as changed.')
    session = Session()
    try:
        task_hash = session.query(TaskConfigHash).filter(TaskConfigHash.task == task).first()
        if task_hash:
            task_hash.hash = ''
        session.commit()
    finally:
        session.close()
예제 #41
0
파일: task.py 프로젝트: fcharlier/Flexget
def config_changed(task):
    """Forces config_modified flag to come out true on next run. Used when the db changes, and all
    entries need to be reprocessed."""
    log.debug('Marking config as changed.')
    session = Session()
    try:
        task_hash = session.query(TaskConfigHash).filter(TaskConfigHash.task == task).first()
        if task_hash:
            task_hash.hash = ''
        session.commit()
    finally:
        session.close()
예제 #42
0
 def _set_db_last_run(self):
     session = Session()
     try:
         db_trigger = session.query(DBTrigger).get(self.uid)
         if not db_trigger:
             db_trigger = DBTrigger(self.uid)
             session.add(db_trigger)
         db_trigger.last_run = self.last_run
         session.commit()
     finally:
         session.close()
     log.debug("recorded last_run to the database")
예제 #43
0
 def queue_edit(self, imdb_id, quality):
     """Change the required quality for a movie in the queue"""
     self.validate_quality(quality)
     session = Session()
     # check if the item is queued
     item = session.query(QueuedMovie).filter(
         QueuedMovie.imdb_id == imdb_id).first()
     if item:
         item.quality = quality
         session.commit()
         return item.title
     else:
         raise QueueError('%s is not in the queue' % imdb_id)
예제 #44
0
파일: test_delay.py 프로젝트: umeku/Flexget
 def test_delay(self, execute_task):
     task = execute_task('test')
     assert not task.entries, 'No entries should have passed delay'
     # Age the entry in the db
     session = Session()
     delayed_entries = session.query(DelayedEntry).all()
     for entry in delayed_entries:
         entry.expire = entry.expire - timedelta(hours=1)
     session.commit()
     task = execute_task('test')
     assert task.entries, 'Entry should have passed delay and been inserted'
     # Make sure entry is only injected once
     task = execute_task('test')
     assert not task.entries, 'Entry should only be insert'
예제 #45
0
파일: seen.py 프로젝트: kop1/flexget
    def on_process_start(self, feed):
        if not feed.manager.options.seen:
            return

        feed.manager.disable_feeds()

        session = Session()
        se = SeenEntry(u'--seen', unicode(feed.name))
        sf = SeenField(u'--seen', unicode(feed.manager.options.seen))
        se.fields.append(sf)
        session.add(se)
        session.commit()

        log.info('Added %s as seen. This will affect all feeds.' % feed.manager.options.seen)
예제 #46
0
    def clear_failed(self):
        """
        Clears list of failed entries

        :return: The number of entries cleared.
        """
        session = Session()
        try:
            results = session.query(FailedEntry).delete()
            console('Cleared %i items.' % results)
            session.commit()
            return results
        finally:
            session.close()
예제 #47
0
def seen_add(options):
    seen_name = options.add_value
    if is_imdb_url(seen_name):
        imdb_id = extract_id(seen_name)
        if imdb_id:
            seen_name = imdb_id

    session = Session()
    se = SeenEntry(seen_name, 'cli_seen')
    sf = SeenField('cli_seen', seen_name)
    se.fields.append(sf)
    session.add(se)
    session.commit()
    console('Added %s as seen. This will affect all tasks.' % seen_name)
예제 #48
0
    def queue_del(self, imdb_id):
        """Delete the given item from the queue"""

        session = Session()
        # check if the item is queued
        item = session.query(QueuedMovie).filter(
            QueuedMovie.imdb_id == imdb_id).first()
        if item:
            title = item.title
            session.delete(item)
            session.commit()
            return title
        else:
            raise QueueError('%s is not in the queue' % imdb_id)
예제 #49
0
파일: seen.py 프로젝트: Doppia/Flexget
def seen_add(options):
    seen_name = options.add_value
    if is_imdb_url(seen_name):
        imdb_id = extract_id(seen_name)
        if imdb_id:
            seen_name = imdb_id

    session = Session()
    se = SeenEntry(seen_name, 'cli_seen')
    sf = SeenField('cli_seen', seen_name)
    se.fields.append(sf)
    session.add(se)
    session.commit()
    console('Added %s as seen. This will affect all tasks.' % seen_name)
예제 #50
0
    def clear_failed(self):
        """
        Clears list of failed entries

        :return: The number of entries cleared.
        """
        session = Session()
        try:
            results = session.query(FailedEntry).delete()
            console('Cleared %i items.' % results)
            session.commit()
            return results
        finally:
            session.close()
예제 #51
0
파일: schema.py 프로젝트: achuprin/Flexget
 def upgrade(manager):
     ver = get_version(plugin)
     session = Session()
     try:
         new_ver = func(ver, session)
         if new_ver > ver:
             set_version(plugin, new_ver)
             session.commit()
         elif new_ver < ver:
             log.critical('A lower schema version was returned (%s) from the %s upgrade function '
                          'than passed in (%s)' % (new_ver, plugin, ver))
             manager.disable_feeds()
     except Exception, e:
         log.exception('Failed to upgrade database for plugin %s: %s' % (plugin, e))
         manager.disable_feeds()
예제 #52
0
def migrate_imdb_queue(manager):
    """If imdb_queue table is found, migrate the data to movie_queue"""
    session = Session()
    try:
        if table_exists('imdb_queue', session):
            log.info('Migrating imdb_queue items to movie_queue')
            old_table = table_schema('imdb_queue', session)
            for row in session.execute(old_table.select()):
                try:
                    queue_add(imdb_id=row['imdb_id'], quality=row['quality'], session=session)
                except QueueError as e:
                    log.error('Unable to migrate %s from imdb_queue to movie_queue' % row['title'])
            old_table.drop()
            session.commit()
    finally:
        session.close()
예제 #53
0
파일: series.py 프로젝트: kop1/flexget
def forget_series_episode(name, identifier):
    """Remove all episodes by :identifier: from series :name: from database."""
    session = Session()
    series = session.query(Series).filter(Series.name == name).first()
    if series:
        episode = session.query(Episode).filter(Episode.identifier == identifier).\
            filter(Episode.series_id == series.id).first()
        if episode:
            series.identified_by = '' # reset identified_by flag so that it will be recalculated
            session.delete(episode)
            session.commit()
            log.debug('Episode %s from series %s removed from database.' % (identifier, name))
        else:
            raise ValueError('Unknown identifier %s for series %s' % (identifier, name.capitalize()))
    else:
        raise ValueError('Unknown series %s' % name)
예제 #54
0
def migrate_imdb_queue(manager):
    """If imdb_queue table is found, migrate the data to movie_queue"""
    session = Session()
    try:
        if table_exists('imdb_queue', session):
            log.info('Migrating imdb_queue items to movie_queue')
            old_table = table_schema('imdb_queue', session)
            for row in session.execute(old_table.select()):
                try:
                    queue_add(imdb_id=row['imdb_id'], quality=row['quality'], session=session)
                except QueueError as e:
                    log.error('Unable to migrate %s from imdb_queue to movie_queue' % row['title'])
            old_table.drop()
            session.commit()
    finally:
        session.close()
예제 #55
0
파일: schema.py 프로젝트: Donavan/Flexget
 def upgrade_wrapper(manager):
     ver = get_version(plugin)
     session = Session()
     try:
         new_ver = func(ver, session)
         if new_ver > ver:
             log.info('Plugin `%s` schema upgraded successfully' % plugin)
             set_version(plugin, new_ver)
             session.commit()
             manager.db_upgraded = True
         elif new_ver < ver:
             log.critical('A lower schema version was returned (%s) from the %s upgrade function '
                          'than passed in (%s)' % (new_ver, plugin, ver))
             manager.disable_tasks()
     except Exception, e:
         log.exception('Failed to upgrade database for plugin %s: %s' % (plugin, e))
         manager.disable_tasks()
예제 #56
0
파일: seen.py 프로젝트: drbashar315/Flexget
    def on_process_start(self, task):
        if not task.manager.options.seen:
            return

        task.manager.disable_tasks()

        seen_name = task.manager.options.seen
        if is_imdb_url(seen_name):
            imdb_id = extract_id(seen_name)
            if imdb_id:
                seen_name = imdb_id

        session = Session()
        se = SeenEntry(u'--seen', unicode(task.name))
        sf = SeenField(u'--seen', seen_name)
        se.fields.append(sf)
        session.add(se)
        session.commit()

        log.info('Added %s as seen. This will affect all tasks.' % seen_name)
예제 #57
0
def begin(manager, options):
    series_name = options.series_name
    ep_id = options.episode_id
    session = Session()
    try:
        series = session.query(Series).filter(Series.name == series_name).first()
        if not series:
            console('Series not yet in database, adding `%s`' % series_name)
            series = Series()
            series.name = series_name
            session.add(series)
        try:
            set_series_begin(series, ep_id)
        except ValueError as e:
            console(e)
        else:
            console('Episodes for `%s` will be accepted starting with `%s`' % (series.name, ep_id))
            session.commit()
    finally:
        session.close()
    manager.config_changed()
예제 #58
0
    def test_mark_expired(self):
        def test_run():
            # Run the task and check tvdb data was populated.
            self.execute_task('test_mark_expired')
            entry = self.task.find_entry(title='House.S02E02.hdtv')
            assert entry['tvdb_ep_name'] == 'Autopsy'

        # Run the task once, this populates data from tvdb
        test_run()
        # Run the task again, this should load the data from cache
        test_run()
        # Manually mark the data as expired, to test cache update
        session = Session()
        ep = lookup_episode(name='House',
                            seasonnum=2,
                            episodenum=2,
                            session=session)
        ep.expired = True
        ep.series.expired = True
        session.commit()
        session.close()
        test_run()
예제 #59
0
    def notify(self, title, message, config):
        session = Session()

        access_token = self._real_init(session, config)

        failure_message = self._get_failure_message(session, config)

        all_messages = failure_message + message

        if access_token:
            try:
                self._send_msgs(all_messages, access_token)
            except Exception as e:
                entry = MessageEntry(
                    content=all_messages,
                    failure_time=datetime.now()
                )
                session.add(entry)
                session.commit()
                raise PluginError(str(e))
            if self.image:
                self._send_images(access_token)