def api_admin_option_set(key, val): global KEYS if key not in KEYS: raise Exception("Unknown key \'%s\'" % key) OptionsController.set(key=key, val=val) return "key set"
def post(self): global keys args = self.reqparse.parse_args() args = {k: v for k, v in args.items() if v is not None} if not args['key'] in keys: return abort(404, message='Unknown key \'%s\'' % args['key']) OptionsController.set(key=args['key'], val=args['val']) return flask.jsonify(**{'message': 'key set'})
def get(self, key): global keys controller = OptionsController.get(key) if controller: return flask.jsonify(**{key: controller.val}) else: return abort(404, message='Unknown key \'%s\'' % key)
def loop(): tasks = Worker.collect_tasks() scheduled_resources = [] for task in tasks: if task.group.mq: # fire via AMQP if isinstance(task, NmapRule): pass elif isinstance(task, Resource): # v = AmqpConnectionController() pass else: # fire directly, without AMQP if isinstance(task, NmapRule): # blocking code nmap = NmapScan(twisted=False) now = datetime.now() scan_results = nmap.scan(task.rule) scan_time = (datetime.now() - now).total_seconds() task.date_scanned = datetime.now() task.output["data"] = { "output": scan_results, "time": scan_time } db.session.commit() db.session.flush() added_resources = nmap.nmap_to_resource(task, scan_results) for added_resource in added_resources: scheduled_resources.append(added_resource) elif isinstance(task, Resource): scheduled_resources.append(task) if scheduled_resources: Crawler.spawn(scheduled_resources) log_msg("Scheduler loop finished", level=1, category="scheduler") OptionsController.set("scheduler_last_ran", {"date": datetime.now().isoformat()})
def api_admin_option_get(key): """ Fetch an option """ global KEYS controller = OptionsController.get(key) if controller: return controller.val return Exception("Unknown key \'%s\'" % key)
def api_admin_scheduler_last_ran(): rtn = {} val = OptionsController.get("scheduler_last_ran") if not val: return None try: dt = dateutil.parser.parse(val.val["date"]) rtn["last_ran"] = TimeMagic().ago_dt(dt) except: pass after = datetime.now() - timedelta(seconds=12) rtn["active_crawlers"] = len( db.session.query(Crawler).filter(Crawler.heartbeat >= after).all()) return rtn
def admin_metadata(): if request.method == 'POST': from findex_gui.bin.utils import log_msg from findex_gui.controllers.meta.controller import MetaController try: MetaController.load_new_db() except Exception as ex: log_msg(str(ex), category="meta_import", level=3) return redirect(request.url) meta_movies_count = db.session.execute( "SELECT COUNT(*) FROM meta_movies;").fetchone() if meta_movies_count: meta_movies_count = meta_movies_count[0] meta_version = OptionsController.get("meta") if meta_version: meta_version = meta_version.val return themes.render("main/metadata", theme="_admin", meta_movies_count=meta_movies_count, meta_version=meta_version)
def get_active(self): return OptionsController.theme_get_active()
def load_new_db(): """bad code""" # handle POST file upload def _err(msg=None): if msg: log_msg(str(msg), category="meta_import", level=3) raise Exception(msg) raise Exception("error") if 'file' not in request.files: _err() file = request.files["file"] if file.filename == "" or not file.filename.startswith("findex_meta_"): _err("bad filename") if not file: _err("bad file") if file.mimetype != "application/zip": _err("bad mimetype") filename = secure_filename(file.filename) dirpath = "%s/meta/" % cwd() destination = os.path.join(dirpath, filename) file.save(destination) os.popen("cd %s && unzip -o %s && rm %s" % (dirpath, filename, filename)).read() info = {} try: f = open("%sinfo.txt" % dirpath, "r") info = json.loads(f.read()) f.close() except Exception as ex: _err("could not open %s: %s" % ("%sinfo.txt", str(ex)) % dirpath) if "version" in info: OptionsController.set("meta", info) if os.path.isfile("%smeta_movies.txt" % dirpath): db.session.query(MetaMovies).delete(synchronize_session=False) db.session.commit() db.session.flush() f = open("%smeta_movies.txt" % dirpath, "r") movies = f.readlines() f.close() movies = [json.loads(movie) for movie in movies] # cleanup os.popen("rm %smeta_movies.txt" % dirpath).read() # fill table `MetaMovies` objects = [] for movie in movies: m = MetaMovies(title=movie["title"], year=movie["year"], rating=movie["rating"], plot=movie["plot"], director=movie["director"], genres=movie["genres"], actors=movie["actors"], meta=movie.get("meta", {})) objects.append(m) db.session.bulk_save_objects(objects) db.session.commit() db.session.flush() meta_movies = { "%s:%d" % (k.title.lower(), k.year): k for k in ZdbQuery(MetaMovies, session=db.session).all() } # 'relink' existing files to new metadata q = ZdbQuery(Files, session=db.session) q = q.filter(Files.file_format == 2) q = q.filter(Files.meta_info != None) q = q.filter(Files.file_size >= 134217728) updates = [] for result in q.all(): if "ptn" not in result.meta_info: continue ptn = result.meta_info["ptn"] if "year" in ptn and "title" in ptn: uid = "%s:%d" % (ptn["title"].lower(), ptn["year"]) if uid in meta_movies: updates.append( "UPDATE files SET meta_movie_id=%d WHERE files.id=%d;" % (meta_movies[uid].id, result.id)) if updates: try: db.session.execute(text("\n".join(updates))).fetchall() except Exception as ex: pass db.session.commit() db.session.flush() return True