def restart_dataset(key): """ Run a dataset's query again Deletes all underlying datasets, marks dataset as unfinished, and queues a job for it. :param str key: Dataset key :return: """ try: dataset = DataSet(key=key, db=db) except TypeError: return error(404, message="Dataset not found.") if current_user.get_id() != dataset.parameters.get("user", "") and not current_user.is_admin: return error(403, message="Not allowed.") if not dataset.is_finished(): return render_template("error.html", message="This dataset is not finished yet - you cannot re-run it.") if "type" not in dataset.parameters: return render_template("error.html", message="This is an older dataset that unfortunately lacks the information necessary to properly restart it.") for child in dataset.children: child.delete() dataset.unfinish() queue = JobQueue(logger=log, database=db) queue.add_job(jobtype=dataset.parameters["type"], remote_id=dataset.key) flash("Dataset queued for re-running.") return redirect("/results/" + dataset.key + "/")
cli.add_argument("-b", "--board", type=str, required=True, help="Board name") args = cli.parse_args() if not Path(args.input).exists() or not Path(args.input).is_dir(): print("%s is not a valid folder name." % args.input) sys.exit(1) input = Path(args.input).resolve() jsons = input.glob("*.json") print("Initialising queue...") logger = Logger() queue = JobQueue(logger=logger, database=Database(logger=logger, appname="queue-folder")) print("Adding files to queue...") files = 0 deadline = time.time() for file in jsons: files += 1 file = str(file) queue.add_job(args.datasource + "-thread", remote_id=file, details={ "board": args.board, "file": str(file) }, claim_after=int(deadline)) deadline += 0.1 print("Queued %i files." % files)