def forget(value): """ See module docstring :param string value: Can be task name, entry title or field value :return: count, field_count where count is number of entries removed and field_count number of fields """ log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter( or_(SeenEntry.title == value, SeenEntry.task == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter( SeenField.value == value).all(): se = session.query(SeenEntry).filter( SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def add_failed(self, entry, reason=None, **kwargs): """Adds entry to internal failed list, displayed with --failed""" reason = reason or 'Unknown' failed = Session() try: # query item's existence item = failed.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['original_url']).first() if not item: item = FailedEntry(entry['title'], entry['original_url'], reason) else: item.count += 1 item.tof = datetime.now() item.reason = reason failed.merge(item) log.debug('Marking %s in failed list. Has failed %s times.' % (item.title, item.count)) # limit item number to 25 for row in failed.query(FailedEntry).order_by( FailedEntry.tof.desc())[25:]: failed.delete(row) failed.commit() finally: failed.close()
def notify(self, title, message, config): if not message.strip(): return self._parse_config(config) session = Session() self._save_message(message, session) session.commit() message_list = session.query(MessageEntry).filter( MessageEntry.sent == False).all() try: if access_token := self._get_access_token(session, self._corp_id, self._corp_secret): for message_entry in message_list: self._send_msgs(message_entry, access_token) time.sleep(1) if message_entry.sent: session.delete(message_entry) session.commit() if self.image: self._send_images(access_token) except Exception as e: raise PluginError(str(e))
def forget(value): """ See module docstring :param string value: Can be task name, entry title or field value :return: count, field_count where count is number of entries removed and field_count number of fields """ log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter(or_(SeenEntry.title == value, SeenEntry.task == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter(SeenField.value == value).all(): se = session.query(SeenEntry).filter(SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def get_login_cookies(self, username, password): url_auth = 'http://www.t411.me/users/login' db_session = Session() account = db_session.query(torrent411Account).filter( torrent411Account.username == username).first() if account: if account.expiry_time < datetime.now(): db_session.delete(account) db_session.commit() log.debug("Cookies found in db!") return account.auth else: log.debug("Getting login cookies from : %s " % url_auth) params = urllib.urlencode({ 'login': username, 'password': password, 'remember': '1' }) cj = cookielib.CookieJar() # WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) # NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK opener.addheaders = [('User-agent', self.USER_AGENT)] try: opener.open(url_auth, params) except Exception as e: raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e)) authKey = None uid = None password = None for cookie in cj: if cookie.name == "authKey": authKey = cookie.value if cookie.name == "uid": uid = cookie.value if cookie.name == "pass": password = cookie.value if authKey is not None and \ uid is not None and \ password is not None: authCookie = { 'uid': uid, 'password': password, 'authKey': authKey } db_session.add( torrent411Account(username=username, auth=authCookie, expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() return authCookie return {"uid": "", "password": "", "authKey": ""}
def get_login_cookies(self, username, password): url_auth = 'http://www.t411.li/users/login' db_session = Session() account = db_session.query(torrent411Account).filter( torrent411Account.username == username).first() if account: if account.expiry_time < datetime.now(): db_session.delete(account) db_session.commit() log.debug("Cookies found in db!") return account.auth else: log.debug("Getting login cookies from : %s " % url_auth) params = {'login': username, 'password': password, 'remember': '1'} cj = http.cookiejar.CookieJar() # WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) # NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK opener.addheaders = [('User-agent', self.USER_AGENT)] login_output = None try: login_output = opener.open(url_auth, urllib.parse.urlencode(params)).read() except Exception as e: raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e)) if b'confirmer le captcha' in login_output: log.warning("Captcha requested for login.") login_output = self._solveCaptcha(login_output, url_auth, params, opener) if b'logout' in login_output: authKey = None uid = None password = None for cookie in cj: if cookie.name == "authKey": authKey = cookie.value if cookie.name == "uid": uid = cookie.value if cookie.name == "pass": password = cookie.value if authKey is not None and \ uid is not None and \ password is not None: authCookie = {'uid': uid, 'password': password, 'authKey': authKey } db_session.add(torrent411Account(username=username, auth=authCookie, expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() return authCookie else: log.error("Login failed (Torrent411). Check your login and password.") return {}
def purge(): """Purge old messages from database""" old = datetime.now() - timedelta(days=365) session = Session() try: for message in session.query(LogMessage).filter(LogMessage.added < old): log.debug('purging: %s' % message) session.delete(message) finally: session.commit()
def repair(manager): """Perform database repairing and upgrading at startup.""" if not manager.persist.get('series_repaired', False): session = Session() # For some reason at least I have some releases in database which don't belong to any episode. for release in session.query(Release).filter(Release.episode == None).all(): log.info('Purging orphan release %s from database' % release.title) session.delete(release) session.commit() manager.persist['series_repaired'] = True
def forget_series(name): """Remove a whole series :name: from database.""" session = Session() series = session.query(Series).filter(Series.name == name).first() if series: session.delete(series) session.commit() log.debug('Removed series %s from database.' % name) else: raise ValueError('Unknown series %s' % name)
def get_login_cookies(self, username, password): url_auth = 'http://www.t411.me/users/login' db_session = Session() account = db_session.query(torrent411Account).filter( torrent411Account.username == username).first() if account: if account.expiry_time < datetime.now(): db_session.delete(account) db_session.commit() log.debug("Cookies found in db!") return account.auth else: log.debug("Getting login cookies from : %s " % url_auth) params = urllib.urlencode({'login': username, 'password': password, 'remember': '1'}) cj = cookielib.CookieJar() # WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) # NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK opener.addheaders = [('User-agent', self.USER_AGENT)] try: opener.open(url_auth, params) except Exception as e: raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e)) authKey = None uid = None password = None for cookie in cj: if cookie.name == "authKey": authKey = cookie.value if cookie.name == "uid": uid = cookie.value if cookie.name == "pass": password = cookie.value if authKey is not None and \ uid is not None and \ password is not None: authCookie = {'uid': uid, 'password': password, 'authKey': authKey } db_session.add(torrent411Account(username=username, auth=authCookie, expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() return authCookie return {"uid": "", "password": "", "authKey": "" }
def clear_failed(self): """Clears list of failed entries""" session = Session() try: results = session.query(FailedEntry).all() for row in results: session.delete(row) console('Cleared %i items.' % len(results)) session.commit() finally: session.close()
def queue_del(self, imdb_id): """Delete the given item from the queue""" session = Session() # check if the item is queued item = session.query(QueuedMovie).filter( QueuedMovie.imdb_id == imdb_id).first() if item: title = item.title session.delete(item) session.commit() return title else: raise QueueError('%s is not in the queue' % imdb_id)
def forget_series_episode(name, identifier): """Remove all episodes by :identifier: from series :name: from database.""" session = Session() series = session.query(Series).filter(Series.name == name).first() if series: episode = session.query(Episode).filter(Episode.identifier == identifier).\ filter(Episode.series_id == series.id).first() if episode: series.identified_by = '' # reset identified_by flag so that it will be recalculated session.delete(episode) session.commit() log.debug('Episode %s from series %s removed from database.' % (identifier, name)) else: raise ValueError('Unknown identifier %s for series %s' % (identifier, name.capitalize())) else: raise ValueError('Unknown series %s' % name)
def add_failed(self, entry): """Adds entry to internal failed list, displayed with --failed""" failed = Session() try: # query item's existence item = failed.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['url']).first() if not item: item = FailedEntry(entry['title'], entry['url']) else: item.count += 1 item.tof = datetime.now() failed.merge(item) # limit item number to 25 for row in failed.query(FailedEntry).order_by(FailedEntry.tof.desc())[25:]: failed.delete(row) failed.commit() finally: failed.close()
def on_process_start(self, feed, config): """Purge remembered entries if the config has changed and write new hash""" # No session on process start, make our own session = Session() # Delete expired items session.query(RememberEntry).filter(RememberEntry.expires < datetime.now()).delete() # Generate hash for current config config_hash = hashlib.md5(str(feed.config.items())).hexdigest() # See if the feed has the same hash as last run old_feed = session.query(RememberFeed).filter(RememberFeed.name == feed.name).first() if old_feed and (old_feed.hash != config_hash or feed.manager.options.forget_rejected): if feed.manager.options.forget_rejected: log.info('Forgetting previous rejections.') else: log.verbose('Config has changed since last run, purging remembered entries.') session.delete(old_feed) old_feed = None if not old_feed: # Create this feed in the db if not present session.add(RememberFeed(name=feed.name, hash=config_hash)) session.commit()
def forget(value): log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter(or_(SeenEntry.title == value, SeenEntry.feed == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter(SeenField.value == value).all(): se = session.query(SeenEntry).filter(SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def add_failed(self, entry, reason=None, **kwargs): """Adds entry to internal failed list, displayed with --failed""" reason = reason or 'Unknown' failed = Session() try: # query item's existence item = failed.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['original_url']).first() if not item: item = FailedEntry(entry['title'], entry['original_url'], reason) else: item.count += 1 item.tof = datetime.now() item.reason = reason failed.merge(item) log.debug('Marking %s in failed list. Has failed %s times.' % (item.title, item.count)) # limit item number to 25 for row in failed.query(FailedEntry).order_by(FailedEntry.tof.desc())[25:]: failed.delete(row) failed.commit() finally: failed.close()
def on_process_end(self, task): """Write RSS file at application terminate.""" if not rss2gen: return # don't generate rss when learning if task.manager.options.learn: return config = self.get_config(task) if config['file'] in self.written: log.trace('skipping already written file %s' % config['file']) return # in terminate phase there is no open session in task, so open new one from flexget.manager import Session session = Session() db_items = session.query(RSSEntry).filter(RSSEntry.file == config['file']).\ order_by(RSSEntry.published.desc()).all() # make items rss_items = [] for db_item in db_items: add = True if config['items'] != -1: if len(rss_items) > config['items']: add = False if config['days'] != -1: if datetime.datetime.today() - datetime.timedelta(days=config['days']) > db_item.published: add = False if add: # add into generated feed gen = {'title': db_item.title, 'description': db_item.description, 'link': db_item.link, 'pubDate': db_item.published} log.trace('Adding %s into rss %s' % (gen['title'], config['file'])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed session.delete(db_item) session.commit() session.close() # make rss rss = PyRSS2Gen.RSS2(title='FlexGet', link=config.get('rsslink', 'http://flexget.com'), description='FlexGet generated RSS feed', lastBuildDate=datetime.datetime.utcnow(), items=rss_items) # write rss fn = os.path.expanduser(config['file']) try: log.verbose('Writing output rss to %s' % fn) rss.write_xml(open(fn, 'w'), encoding=config['encoding']) except LookupError: log.critical('Unknown encoding %s' % config['encoding']) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical('Unable to write %s' % fn) return self.written[config['file']] = True
def migrate2(self): session = Session() try: from progressbar import ProgressBar, Percentage, Bar, ETA except: print 'Critical: progressbar library not found, try running `bin/easy_install progressbar` ?' return class Seen(Base): __tablename__ = 'seen' id = Column(Integer, primary_key=True) field = Column(String) value = Column(String, index=True) task = Column('feed', String) added = Column(DateTime) def __init__(self, field, value, task): self.field = field self.value = value self.task = task self.added = datetime.now() def __str__(self): return '<Seen(%s=%s)>' % (self.field, self.value) print '' # REPAIR / REMOVE DUPLICATES index = 0 removed = 0 total = session.query(Seen).count() + 1 widgets = [ 'Repairing - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']') ] bar = ProgressBar(widgets=widgets, maxval=total).start() for seen in session.query(Seen).all(): index += 1 if index % 10 == 0: bar.update(index) amount = 0 for dupe in session.query(Seen).filter(Seen.value == seen.value): amount += 1 if amount > 1: removed += 1 session.delete(dupe) bar.finish() # MIGRATE total = session.query(Seen).count() + 1 widgets = [ 'Upgrading - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']') ] bar = ProgressBar(widgets=widgets, maxval=total).start() index = 0 for seen in session.query(Seen).all(): index += 1 if not index % 10: bar.update(index) se = SeenEntry(u'N/A', seen.task, u'migrated') se.added = seen.added se.fields.append(SeenField(seen.field, seen.value)) session.add(se) bar.finish() session.execute('drop table seen;') session.commit()
def get_login_cookies(self, username, password): url_auth = 'http://www.t411.in/users/login' db_session = Session() account = db_session.query(torrent411Account).filter( torrent411Account.username == username).first() if account: if account.expiry_time < datetime.now(): db_session.delete(account) db_session.commit() log.debug("Cookies found in db!") return account.auth else: log.debug("Getting login cookies from : %s " % url_auth) params = {'login': username, 'password': password, 'remember': '1'} cj = cookielib.CookieJar() # WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) # NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK opener.addheaders = [('User-agent', self.USER_AGENT)] login_output = None try: login_output = opener.open(url_auth, urllib.urlencode(params)).read() except Exception as e: raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e)) if b'confirmer le captcha' in login_output: log.warn("Captcha requested for login.") login_output = self._solveCaptcha(login_output, url_auth, params, opener) if b'logout' in login_output: authKey = None uid = None password = None for cookie in cj: if cookie.name == "authKey": authKey = cookie.value if cookie.name == "uid": uid = cookie.value if cookie.name == "pass": password = cookie.value if authKey is not None and \ uid is not None and \ password is not None: authCookie = { 'uid': uid, 'password': password, 'authKey': authKey } db_session.add( torrent411Account(username=username, auth=authCookie, expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() return authCookie else: log.error( "Login failed (Torrent411). Check your login and password." ) return {}
def on_process_end(self, task): """Write RSS file at application terminate.""" if not rss2gen: return # don't generate rss when learning if task.manager.options.learn: return config = self.get_config(task) if config['file'] in self.written: log.trace('skipping already written file %s' % config['file']) return # in terminate phase there is no open session in task, so open new one from flexget.manager import Session session = Session() db_items = session.query(RSSEntry).filter(RSSEntry.file == config['file']).\ order_by(RSSEntry.published.desc()).all() # make items rss_items = [] for db_item in db_items: add = True if config['items'] != -1: if len(rss_items) > config['items']: add = False if config['days'] != -1: if datetime.datetime.today() - datetime.timedelta( days=config['days']) > db_item.published: add = False if add: # add into generated feed gen = {} gen['title'] = db_item.title gen['description'] = db_item.description gen['link'] = db_item.link gen['pubDate'] = db_item.published log.trace('Adding %s into rss %s' % (gen['title'], config['file'])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed session.delete(db_item) session.commit() session.close() # make rss rss = PyRSS2Gen.RSS2(title='FlexGet', link=config.get('rsslink', 'http://flexget.com'), description='FlexGet generated RSS feed', lastBuildDate=datetime.datetime.utcnow(), items=rss_items) # write rss fn = os.path.expanduser(config['file']) try: rss.write_xml(open(fn, 'w'), encoding=config['encoding']) except LookupError: log.critical('Unknown encoding %s' % config['encoding']) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical('Unable to write %s' % fn) return self.written[config['file']] = True
def migrate2(self): session = Session() try: from progressbar import ProgressBar, Percentage, Bar, ETA except: print 'Critical: progressbar library not found, try running `bin/easy_install progressbar` ?' return class Seen(Base): __tablename__ = 'seen' id = Column(Integer, primary_key=True) field = Column(String) value = Column(String, index=True) task = Column('feed', String) added = Column(DateTime) def __init__(self, field, value, task): self.field = field self.value = value self.task = task self.added = datetime.now() def __str__(self): return '<Seen(%s=%s)>' % (self.field, self.value) print '' # REPAIR / REMOVE DUPLICATES index = 0 removed = 0 total = session.query(Seen).count() + 1 widgets = ['Repairing - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=total).start() for seen in session.query(Seen).all(): index += 1 if index % 10 == 0: bar.update(index) amount = 0 for dupe in session.query(Seen).filter(Seen.value == seen.value): amount += 1 if amount > 1: removed += 1 session.delete(dupe) bar.finish() # MIGRATE total = session.query(Seen).count() + 1 widgets = ['Upgrading - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=total).start() index = 0 for seen in session.query(Seen).all(): index += 1 if not index % 10: bar.update(index) se = SeenEntry(u'N/A', seen.task, u'migrated') se.added = seen.added se.fields.append(SeenField(seen.field, seen.value)) session.add(se) bar.finish() session.execute('drop table seen;') session.commit()
def on_process_end(self, task): """Write RSS file at application terminate.""" if not rss2gen: return # don't generate rss when learning if task.manager.options.learn: return config = self.get_config(task) if config['file'] in self.written: log.trace('skipping already written file %s' % config['file']) return # in terminate phase there is no open session in task, so open new one from flexget.manager import Session session = Session() db_items = session.query(RSSEntry).filter(RSSEntry.file == config['file']).\ order_by(RSSEntry.published.desc()).all() # make items rss_items = [] for db_item in db_items: add = True if config['items'] != -1: if len(rss_items) > config['items']: add = False if config['days'] != -1: if datetime.datetime.today() - datetime.timedelta( days=config['days']) > db_item.published: add = False if add: # add into generated feed hasher = hashlib.sha1() hasher.update(db_item.title.encode('utf-8')) hasher.update(db_item.description.encode('utf-8')) hasher.update(db_item.link) guid = base64.urlsafe_b64encode(hasher.digest()) gen = { 'title': db_item.title, 'description': db_item.description, 'link': db_item.link, 'pubDate': db_item.published, 'guid': guid } log.trace('Adding %s into rss %s' % (gen['title'], config['file'])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed session.delete(db_item) session.commit() session.close() # make rss rss = PyRSS2Gen.RSS2(title='FlexGet', link=config.get('rsslink', 'http://flexget.com'), description='FlexGet generated RSS feed', lastBuildDate=datetime.datetime.utcnow(), items=rss_items) # don't run with --test if task.manager.options.test: log.info('Would write rss file with %d entries.', len(rss_items)) return # write rss fn = os.path.expanduser(config['file']) with open(fn, 'w') as file: try: log.verbose('Writing output rss to %s' % fn) rss.write_xml(file, encoding=config['encoding']) except LookupError: log.critical('Unknown encoding %s' % config['encoding']) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical('Unable to write %s' % fn) return self.written[config['file']] = True
def on_process_end(self, task): """Write RSS file at application terminate.""" if not rss2gen: return # don't generate rss when learning if task.manager.options.learn: return config = self.get_config(task) if config["file"] in self.written: log.trace("skipping already written file %s" % config["file"]) return # in terminate phase there is no open session in task, so open new one from flexget.manager import Session session = Session() db_items = ( session.query(RSSEntry).filter(RSSEntry.file == config["file"]).order_by(RSSEntry.published.desc()).all() ) # make items rss_items = [] for db_item in db_items: add = True if config["items"] != -1: if len(rss_items) > config["items"]: add = False if config["days"] != -1: if datetime.datetime.today() - datetime.timedelta(days=config["days"]) > db_item.published: add = False if add: # add into generated feed gen = { "title": db_item.title, "description": db_item.description, "link": db_item.link, "pubDate": db_item.published, } log.trace("Adding %s into rss %s" % (gen["title"], config["file"])) rss_items.append(PyRSS2Gen.RSSItem(**gen)) else: # no longer needed session.delete(db_item) session.commit() session.close() # make rss rss = PyRSS2Gen.RSS2( title="FlexGet", link=config.get("rsslink", "http://flexget.com"), description="FlexGet generated RSS feed", lastBuildDate=datetime.datetime.utcnow(), items=rss_items, ) # write rss fn = os.path.expanduser(config["file"]) with open(fn, "w") as file: try: log.verbose("Writing output rss to %s" % fn) rss.write_xml(file, encoding=config["encoding"]) except LookupError: log.critical("Unknown encoding %s" % config["encoding"]) return except IOError: # TODO: plugins cannot raise PluginWarnings in terminate event .. log.critical("Unable to write %s" % fn) return self.written[config["file"]] = True