def get_intervals(): schedule_items = db_session.query(Schedule).filter(Schedule.task != u'__DEFAULT__').all() default_interval = db_session.query(Schedule).filter(Schedule.task == u'__DEFAULT__').first() if default_interval: default_interval = default_interval.interval else: default_interval = DEFAULT_INTERVAL return {'default_interval': default_interval, 'schedule_items': schedule_items, 'tasks': set(get_all_tasks()) - set(get_scheduled_tasks())}
def update_menus(): import time strftime = lambda secs: time.strftime('%Y-%m-%d %H:%M', time.localtime(float(secs))) menu_tasks = [i[0] for i in db_session.query(LogEntry.task).filter(LogEntry.task != u'') .distinct().order_by(asc(LogEntry.task))[:]] menu_execs = [(i[0], strftime(i[0])) for i in db_session.query(LogEntry.execution) .filter(LogEntry.execution != '') .distinct().order_by(desc('execution'))[:10]] return {'menu_tasks': menu_tasks, 'menu_execs': menu_execs}
def get_intervals(): schedule_items = db_session.query(Schedule).filter(Schedule.feed != u"__DEFAULT__").all() default_interval = db_session.query(Schedule).filter(Schedule.feed == u"__DEFAULT__").first() if default_interval: default_interval = default_interval.interval else: default_interval = DEFAULT_INTERVAL return { "default_interval": default_interval, "schedule_items": schedule_items, "feeds": set(get_all_feeds()) - set(get_scheduled_feeds()), }
def update_menus(): import time strftime = lambda secs: time.strftime("%Y-%m-%d %H:%M", time.localtime(float(secs))) menu_feeds = [ i[0] for i in db_session.query(LogEntry.feed).filter(LogEntry.feed != "").distinct().order_by(asc("feed"))[:] ] menu_execs = [ (i[0], strftime(i[0])) for i in db_session.query(LogEntry.execution) .filter(LogEntry.execution != "") .distinct() .order_by(desc("execution"))[:10] ] return {"menu_feeds": menu_feeds, "menu_execs": menu_execs}
def stop_empty_timers(): """Stops timers that don't have any more tasks using them.""" current_intervals = set( [i.interval for i in db_session.query(Schedule).all()]) for interval in timers.keys(): if interval not in current_intervals: stop_timer(interval)
def forget_episode(rel_id): ''' Executes a --series-forget statement for an episode. Redirects back to the series index. ''' release = db_session.query(Release).get(rel_id) context = { 'release': release, 'command': '--series-forget "%s" %s' % (release.episode.series.name, release.episode.identifier) } if request.method == 'POST': if request.form.get('really', False): try: forget_series_episode(release.episode.series.name, release.episode.identifier) flash( 'Forgot %s %s.' % (release.episode.series.name, release.episode.identifier), 'delete') except ValueError as e: flash(e.message, 'error') return redirect(url_for('index')) return render_template('series/forget.html', **context)
def add_schedule(task): schedule = db_session.query(Schedule).filter(Schedule.task == task).first() if not schedule: schedule = Schedule(task, DEFAULT_INTERVAL) db_session.add(schedule) db_session.commit() start_timer(DEFAULT_INTERVAL) return redirect(url_for('.index'))
def add_schedule(feed): schedule = db_session.query(Schedule).filter(Schedule.feed == feed).first() if not schedule: schedule = Schedule(feed, DEFAULT_INTERVAL) db_session.add(schedule) db_session.commit() start_timer(DEFAULT_INTERVAL) return redirect(url_for("index"))
def add_schedule(task): schedule = db_session.query(Schedule).filter(Schedule.task == task).first() if not schedule: schedule = Schedule(task, DEFAULT_INTERVAL) db_session.add(schedule) db_session.commit() start_timer(DEFAULT_INTERVAL) return redirect(url_for('index'))
def set_feed_interval(feed, interval): feed_interval = db_session.query(Schedule).filter(Schedule.feed == feed).first() if feed_interval: log.debug("Updating %s interval" % feed) feed_interval.interval = interval else: log.debug("Creating new %s interval" % feed) db_session.add(Schedule(feed, interval)) db_session.commit() stop_empty_timers()
def index(): releases = db_session.query(Release).order_by(desc(Release.id)).limit(10).all() for release in releases: if release.downloaded == False and len(release.episode.releases) > 1: for prev_rel in release.episode.releases: if prev_rel.downloaded: release.previous = prev_rel context = {'releases': releases} return render_template('series/series.html', **context)
def set_task_interval(task, interval): task_interval = db_session.query(Schedule).filter(Schedule.task == task).first() if task_interval: log.debug('Updating %s interval' % task) task_interval.interval = interval else: log.debug('Creating new %s interval' % task) db_session.add(Schedule(task, interval)) db_session.commit() stop_empty_timers()
def index(): releases = db_session.query(Release).order_by(desc( Release.id)).limit(10).all() for release in releases: if release.downloaded == False and len(release.episode.releases) > 1: for prev_rel in release.episode.releases: if prev_rel.downloaded: release.previous = prev_rel context = {'outputs': [{'name': 'transmission'}]} return render_template('outputs/index.html', **context)
def index(): releases = db_session.query(Release).order_by(desc(Release.id)).limit(10).all() for release in releases: if release.downloaded == False and len(release.episode.releases) > 1: for prev_rel in release.episode.releases: if prev_rel.downloaded: release.previous = prev_rel context = {'outputs': [ {'name': 'transmission'} ]} return render_template('outputs/index.html', **context)
def enable_authentication(): if manager.options.no_auth: return global credentials credentials = db_session.query(AuthCredentials).first() if not credentials: credentials = AuthCredentials('flexget', 'flexget') db_session.add(credentials) if manager.options.username: credentials.username = manager.options.username if manager.options.password: credentials.password = manager.options.password db_session.commit() app.before_request(check_authenticated)
def enable_authentication(): if manager.options.webui.no_auth: return global credentials credentials = db_session.query(AuthCredentials).first() if not credentials: credentials = AuthCredentials('flexget', 'flexget') db_session.add(credentials) if manager.options.webui.username: credentials.username = manager.options.username if manager.options.webui.password: credentials.password = manager.options.webui.password db_session.commit() app.before_request(check_authenticated)
def forget_episode(rel_id): ''' Executes a --series-forget statement for an episode. Redirects back to the series index. ''' release = db_session.query(Release).get(rel_id) context = {'release': release, 'command': '--series-forget "%s" %s' % ( release.episode.series.name, release.episode.identifier)} if request.method == 'POST': if request.form.get('really', False): try: forget_series_episode(release.episode.series.name, release.episode.identifier) flash('Forgot %s %s.' % ( release.episode.series.name, release.episode.identifier), 'delete') except ValueError, e: flash(e.message, 'error') return redirect(url_for('index'))
def execute(interval): """Adds a run to the executor""" # Make a list of tasks that run on this interval schedules = db_session.query(Schedule).filter(Schedule.interval == interval).all() tasks = set([sch.task for sch in schedules]) if u'__DEFAULT__' in tasks: tasks.remove(u'__DEFAULT__') # Get a list of all tasks that do not have their own schedule default_tasks = set(get_all_tasks()) - set(get_scheduled_tasks()) tasks.update(default_tasks) if not tasks: # No tasks scheduled to run at this interval, stop the timer stop_timer(interval) return log.info('Executing tasks: %s' % ", ".join(tasks)) fire_event('scheduler.execute') executor.execute(tasks=tasks)
def execute(interval): """Adds a run to the executor""" # Make a list of feeds that run on this interval schedules = db_session.query(Schedule).filter(Schedule.interval == interval).all() feeds = set([sch.feed for sch in schedules]) if u"__DEFAULT__" in feeds: feeds.remove(u"__DEFAULT__") # Get a list of all feeds that do not have their own schedule default_feeds = set(get_all_feeds()) - set(get_scheduled_feeds()) feeds.update(default_feeds) if not feeds: # No feeds scheduled to run at this interval, stop the timer stop_timer(interval) return log.info("Executing feeds: %s" % ", ".join(feeds)) fire_event("scheduler.execute") executor.execute(feeds=feeds)
def get_logdata(): log_type = request.args.get('log_type') task = request.args.get('task') execution = request.args.get('exec') page = int(request.args.get('page')) limit = int(request.args.get('rows', 0)) sidx = request.args.get('sidx') sord = request.args.get('sord') sord = desc if sord == 'desc' else asc # Generate the filtered query query = db_session.query(LogEntry) if log_type == 'webui': query = query.filter( or_(LogEntry.logger.in_(['webui', 'werkzeug', 'event']), LogEntry.logger.like('%ui.%'))) elif log_type == 'core': query = query.filter( and_(~LogEntry.logger.in_(['webui', 'werkzeug', 'event']), ~LogEntry.logger.like('%ui.%'))) if task: query = query.filter(LogEntry.task == task) if execution: query = query.filter(LogEntry.execution == execution) count = query.count() # Use a trick to do ceiling division total_pages = 0 - ((0 - count) / limit) if page > total_pages: page = total_pages start = limit * page - limit json = {'total': total_pages, 'page': page, 'records': count, 'rows': []} result = query.order_by(sord(sidx))[start:start + limit] for entry in result: json['rows'].append({ 'id': entry.id, 'created': entry.created.strftime('%Y-%m-%d %H:%M'), 'levelno': logging.getLevelName(entry.levelno), 'logger': entry.logger, 'task': entry.task, 'message': entry.message }) return jsonify(json)
def get_logdata(): log_type = request.args.get("log_type") feed = request.args.get("feed") execution = request.args.get("exec") page = int(request.args.get("page")) limit = int(request.args.get("rows", 0)) sidx = request.args.get("sidx") sord = request.args.get("sord") sord = desc if sord == "desc" else asc # Generate the filtered query query = db_session.query(LogEntry) if log_type == "webui": query = query.filter(or_(LogEntry.logger.in_(["webui", "werkzeug", "event"]), LogEntry.logger.like("ui%"))) elif log_type == "core": query = query.filter(and_(~LogEntry.logger.in_(["webui", "werkzeug", "event"]), ~LogEntry.logger.like("ui%"))) if feed: query = query.filter(LogEntry.feed == feed) if execution: query = query.filter(LogEntry.execution == execution) count = query.count() # Use a trick to do ceiling division total_pages = 0 - ((0 - count) / limit) if page > total_pages: page = total_pages start = limit * page - limit json = {"total": total_pages, "page": page, "records": count, "rows": []} result = query.order_by(sord(sidx))[start : start + limit] for entry in result: json["rows"].append( { "id": entry.id, "created": entry.created.strftime("%Y-%m-%d %H:%M"), "levelno": logging.getLevelName(entry.levelno), "logger": entry.logger, "feed": entry.feed, "message": entry.message, } ) return jsonify(json)
def get_logdata(): log_type = request.args.get('log_type') task = request.args.get('task') execution = request.args.get('exec') page = int(request.args.get('page')) limit = int(request.args.get('rows', 0)) sidx = request.args.get('sidx') sord = request.args.get('sord') sord = desc if sord == 'desc' else asc # Generate the filtered query query = db_session.query(LogEntry) if log_type == 'webui': query = query.filter(or_(LogEntry.logger.in_(['webui', 'werkzeug', 'event']), LogEntry.logger.like('%ui.%'))) elif log_type == 'core': query = query.filter(and_(~LogEntry.logger.in_(['webui', 'werkzeug', 'event']), ~LogEntry.logger.like('%ui.%'))) if task: query = query.filter(LogEntry.task == task) if execution: query = query.filter(LogEntry.execution == execution) count = query.count() # Use a trick to do ceiling division total_pages = 0 - ((0 - count) / limit) if page > total_pages: page = total_pages start = limit * page - limit json = {'total': total_pages, 'page': page, 'records': count, 'rows': []} result = query.order_by(sord(sidx))[start:start + limit] for entry in result: json['rows'].append({'id': entry.id, 'created': entry.created.strftime('%Y-%m-%d %H:%M'), 'levelno': logging.getLevelName(entry.levelno), 'logger': entry.logger, 'task': entry.task, 'message': entry.message}) return jsonify(json)
def forget_episode(rel_id): """ Executes a --series-forget statement for an episode. Redirects back to the series index. """ release = db_session.query(Release).get(rel_id) context = { "release": release, "command": '--series-forget "%s" %s' % (release.episode.series.name, release.episode.identifier), } if request.method == "POST": if request.form.get("really", False): try: forget_series_episode(release.episode.series.name, release.episode.identifier) flash("Forgot %s %s." % (release.episode.series.name, release.episode.identifier), "delete") except ValueError as e: flash(e.message, "error") return redirect(url_for("index")) return render_template("series/forget.html", **context)
def mark_not_downloaded(rel_id): db_session.query(Release).get(rel_id).downloaded = False db_session.commit() return redirect('/series')
def episodes(name): query = db_session.query(Episode).join(Episode.series) episodes = query.filter(Series.name == name).order_by(desc(Episode.identifier)).all() context = {'episodes': episodes, 'name': name} return render_template('outputs/index.html', **context)
def series_list(): """Add series list to all pages under series""" return {'report': db_session.query(Series).order_by(asc(Series.name)).all()}
def get_scheduled_tasks(): return [item.task for item in db_session.query(Schedule).all()]
def get_scheduled_feeds(): return [item.feed for item in db_session.query(Schedule).all()]
def delete_schedule(task): db_session.query(Schedule).filter(Schedule.task == task).delete() db_session.commit() stop_empty_timers() return redirect(url_for('.index'))
def get_task_interval(task): task_interval = db_session.query(Schedule).filter(Schedule.task == task).first() if task_interval: return task_interval.interval
def episodes(name): query = db_session.query(Episode).join(Series) episodes = query.filter(Series.name == name).order_by( desc(Episode.identifier)).all() context = {'episodes': episodes, 'name': name} return render_template('series/series.html', **context)
def get_feed_interval(feed): feed_interval = db_session.query(Schedule).filter(Schedule.feed == feed).first() if feed_interval: return feed_interval.interval
def on_webui_start(): # Start timers for all schedules for interval in set([item.interval for item in db_session.query(Schedule).all()]): start_timer(interval)
def stop_empty_timers(): """Stops timers that don't have any more feeds using them.""" current_intervals = set([i.interval for i in db_session.query(Schedule).all()]) for interval in timers.keys(): if interval not in current_intervals: stop_timer(interval)
def index(): context = {'items': db_session.query(History).order_by(desc(History.time)).limit(50).all()} return render_template('history/history.html', **context)
def index(): context = { 'items': db_session.query(History).order_by(desc(History.time)).limit(50).all() } return render_template('history/history.html', **context)
def series_list(): """Add series list to all pages under series""" return { 'report': db_session.query(Series).order_by(asc(Series.name)).all() }
def delete_schedule(task): db_session.query(Schedule).filter(Schedule.task == task).delete() db_session.commit() stop_empty_timers() return redirect(url_for('index'))
def count(): log.debug('getting count for archive') return str(db_session.query(ArchiveEntry).count())
def delete_schedule(feed): db_session.query(Schedule).filter(Schedule.feed == feed).delete() db_session.commit() stop_empty_timers() return redirect(url_for("index"))