def api(*func_args, **func_kwgs): args = request.args.to_dict() try: params = schema.validate(args) return jsonify(func(**params)) except SchemaError as ex: return jsonify(ApiErrorBuild('%s:%s' % (ex.__class__.__name__, ex), 511))
def api(*func_args, **func_kwgs): args = request.args.to_dict() try: params = schema.validate(args) return jsonify(func(**params)) except SchemaError as ex: return jsonify( ApiErrorBuild('%s:%s' % (ex.__class__.__name__, ex), 511))
def api_job_result(job_id): collection = connections.mongodb_jobs.mrq_jobs job_data = collection.find_one({"_id": ObjectId(job_id)}, projection=["result"]) if not job_data: return jsonify({}) return jsonify({"result": job_data.get("result")})
def api_job_traceback(job_id): collection = connections.mongodb_jobs.mrq_jobs job_data = collection.find_one( {"_id": ObjectId(job_id)}, projection=["traceback_history", "traceback"]) if not job_data: return jsonify({"traceback": "No exception raised"}) return jsonify(job_data)
def api_job_action(): params = {k: v for k, v in request.form.iteritems()} if params.get("status") and "-" in params.get("status"): params["status"] = params.get("status").split("-") return jsonify({"job_id": queue_job("mrq.basetasks.utils.JobAction", params, queue=get_current_config()["dashboard_queue"])})
def api_taskpaths(): stats = list( connections.mongodb_jobs.mrq_jobs.aggregate([ { "$sort": { "path": 1 } }, # https://jira.mongodb.org/browse/SERVER-11447 { "$group": { "_id": "$path", "jobs": { "$sum": 1 } } } ])) sSortField, sSortDirection = _get_sort_args(request, 'jobs', 'desc') stats.sort(key=lambda x: x.get(sSortField, ''), reverse=sSortDirection == 'desc') data = {"aaData": stats, "iTotalDisplayRecords": len(stats)} data["sEcho"] = request.args["sEcho"] return jsonify(data)
def api_task_exceptions(): stats = list( connections.mongodb_jobs.mrq_jobs.aggregate([ { "$match": { "status": { "$in": ["failed", "maxretries"] } } }, { "$group": { "_id": { "path": "$path", "exceptiontype": "$exceptiontype", "status": "$status" }, "jobs": { "$sum": 1 } } }, ])) stats.sort(key=lambda x: -x["jobs"]) start = int(request.args.get("iDisplayStart", 0)) end = int(request.args.get("iDisplayLength", 20)) + start data = {"aaData": stats[start:end], "iTotalDisplayRecords": len(stats)} data["sEcho"] = request.args["sEcho"] return jsonify(data)
def api_task_exceptions(): stats = list( connections.mongodb_jobs.mrq_jobs.aggregate([ { "$match": { "status": "failed" } }, { "$group": { "_id": { "path": "$path", "exceptiontype": "$exceptiontype" }, "jobs": { "$sum": 1 } } }, ])) sSortField, sSortDirection = _get_sort_args(request, 'jobs', 'desc') stats.sort(key=lambda x: x.get(sSortField, ''), reverse=sSortDirection == 'desc') start = int(request.args.get("iDisplayStart", 0)) end = int(request.args.get("iDisplayLength", 20)) + start data = {"aaData": stats[start:end], "iTotalDisplayRecords": len(stats)} data["sEcho"] = request.args["sEcho"] return jsonify(data)
def api_jobstatuses(): stats = list( connections.mongodb_jobs.mrq_jobs.aggregate([ # https://jira.mongodb.org/browse/SERVER-11447 { "$sort": { "status": 1 } }, { "$group": { "_id": "$status", "jobs": { "$sum": 1 } } } ])) stats.sort(key=lambda x: x["_id"]) data = {"aaData": stats, "iTotalDisplayRecords": len(stats)} data["sEcho"] = request.args["sEcho"] return jsonify(data)
def api_job_action(): params = {k: v for k, v in iteritems(request.form)} if params.get("status") and "-" in params.get("status"): params["status"] = params.get("status").split("-") return jsonify({"job_id": queue_job("mrq.basetasks.utils.JobAction", params, queue=get_current_config()["dashboard_queue"])})
def api_job_traceback(job_id): collection = connections.mongodb_jobs.mrq_jobs job_data = collection.find_one({"_id": ObjectId(job_id)}, projection=["traceback"]) if not job_data: job_data = {} return jsonify({"traceback": job_data.get("traceback", "No exception raised")})
def api_queue_job(task): #存在参数相互依赖的关系 无法使用 ApiSchemaWrapper queue = request.args.get('queue', '').strip() args = request.args.to_dict() params, err = fixTaskParams(MRQ_TASK_DICT, task, args) rst = ApiErrorBuild() rst = {"job_id": queue_job(task, params, queue=queue if queue else None)} if err is None else err return jsonify(rst)
def get_workergroups(): collection = connections.mongodb_jobs.mrq_workergroups data = { "workergroups": { str(row.pop("_id")): row for row in collection.find(sort=[("_id", 1)]) } } return jsonify(data)
def api_job_traceback(job_id): collection = connections.mongodb_jobs.mrq_jobs job_data = collection.find_one({"_id": ObjectId(job_id)}, projection=["traceback"]) if not job_data: job_data = {} return jsonify( {"traceback": job_data.get("traceback", "No exception raised")})
def api_taskpaths(): stats = list(connections.mongodb_jobs.mrq_jobs.aggregate([ {"$sort": {"path": 1}}, # https://jira.mongodb.org/browse/SERVER-11447 {"$group": {"_id": "$path", "jobs": {"$sum": 1}}} ])) stats.sort(key=lambda x: -x["jobs"]) data = { "aaData": stats, "iTotalDisplayRecords": len(stats) } data["sEcho"] = request.args["sEcho"] return jsonify(data)
def post_workergroups(): workergroups = json.loads(request.form["workergroups"]) collection = connections.mongodb_jobs.mrq_workergroups current = [ str(row.pop("_id")) for row in collection.find(sort=[("_id", 1)], projection=["_id"]) ] # delete groups that are not present any more for k in current: if k not in workergroups: collection.remove({"_id": k}) # upsert groups for k, v in workergroups.iteritems(): collection.update_one({"_id": k}, {"$set": v}, upsert=True) return jsonify({"status": "ok"})
def api_task_exceptions(): stats = list(connections.mongodb_jobs.mrq_jobs.aggregate([ {"$match": {"status": "failed"}}, {"$group": {"_id": {"path": "$path", "exceptiontype": "$exceptiontype"}, "jobs": {"$sum": 1}}}, ])["result"]) stats.sort(key=lambda x: -x["jobs"]) start = int(request.args.get("iDisplayStart", 0)) end = int(request.args.get("iDisplayLength", 20)) + start data = { "aaData": stats[start:end], "iTotalDisplayRecords": len(stats) } data["sEcho"] = request.args["sEcho"] return jsonify(data)
def api_job_traceback(job_id): collection = connections.mongodb_jobs.mrq_jobss if get_current_config().get("save_traceback_history"): field_sent = "traceback_history" else: field_sent = "traceback" job_data = collection.find_one({"_id": ObjectId(job_id)}, projection=[field_sent]) if not job_data: # If a job has no traceback history, we fallback onto traceback if field_sent == "traceback_history": field_sent = "traceback" job_data = collection.find_one({"_id": ObjectId(job_id)}, projection=[field_sent]) if not job_data: job_data = {} return jsonify( {field_sent: job_data.get(field_sent, "No exception raised")})
def api_logs(): collection = connections.mongodb_logs.mrq_logs if request.args.get("job"): query = {"job": ObjectId(request.args.get("job"))} elif request.args.get("worker"): query = {"worker": ObjectId(request.args.get("worker"))} else: raise Exception("No ID") if request.args.get("last_log_id"): query["_id"] = {"$gt": ObjectId(request.args.get("min_log_id"))} logs = list(collection.find(query, projection={"_id": 1, "logs": 1})) data = { "logs": "\n".join([lines["logs"] for lines in logs]), # Don't be surprised, this will send unexisting ObjectIds when we're up # to date! "last_log_id": logs[-1]["_id"] if len(logs) else ObjectId(), } return jsonify(data)
def api_job_traceback(job_id): collection = connections.mongodb_jobs.mrq_jobss if get_current_config().get("save_traceback_history"): field_sent = "traceback_history" else: field_sent = "traceback" job_data = collection.find_one( {"_id": ObjectId(job_id)}, projection=[field_sent]) if not job_data: # If a job has no traceback history, we fallback onto traceback if field_sent == "traceback_history": field_sent = "traceback" job_data = collection.find_one( {"_id": ObjectId(job_id)}, projection=[field_sent]) if not job_data: job_data = {} return jsonify({ field_sent: job_data.get(field_sent, "No exception raised") })
def api_logs(): collection = connections.mongodb_logs.mrq_logs if request.args.get("job"): query = {"job": ObjectId(request.args.get("job"))} elif request.args.get("worker"): query = {"worker": ObjectId(request.args.get("worker"))} else: raise Exception("No ID") if request.args.get("last_log_id"): query["_id"] = {"$gt": ObjectId(request.args.get("min_log_id"))} logs = list(collection.find(query, projection={"_id": 1, "logs": 1})) data = { "logs": "\n".join([lines["logs"] for lines in logs]), # Don't be surprised, this will send unexisting ObjectIds when we're up # to date! "last_log_id": logs[-1]["_id"] if len(logs) else ObjectId() } return jsonify(data)
def api_datatables(unit): collection = None sort = None skip = int(request.args.get("iDisplayStart", 0)) limit = int(request.args.get("iDisplayLength", 20)) with_mongodb_size = bool(request.args.get("with_mongodb_size")) if unit == "queues": queues = [] for name in Queue.all_known(): queue = Queue(name) jobs = None if with_mongodb_size: jobs = connections.mongodb_jobs.mrq_jobs.count({ "queue": name, "status": request.args.get("status") or "queued" }) q = { "name": name, "jobs": jobs, # MongoDB size "size": queue.size(), # Redis size "is_sorted": queue.is_sorted, "is_timed": queue.is_timed, "is_raw": queue.is_raw, "is_set": queue.is_set } if queue.is_sorted: raw_config = cfg.get("raw_queues", {}).get(name, {}) q["graph_config"] = raw_config.get( "dashboard_graph", lambda: { "start": time.time() - (7 * 24 * 3600), "stop": time.time() + (7 * 24 * 3600), "slices": 30 } if queue.is_timed else { "start": 0, "stop": 100, "slices": 30 })() if q["graph_config"]: q["graph"] = queue.get_sorted_graph(**q["graph_config"]) if queue.is_timed: q["jobs_to_dequeue"] = queue.count_jobs_to_dequeue() queues.append(q) sSortField, sSortDirection = _get_sort_args(request, 'size', 'desc') queues.sort(key=lambda x: x.get(sSortField, 0), reverse=sSortDirection == 'desc') data = {"aaData": queues, "iTotalDisplayRecords": len(queues)} elif unit == "workers": fields = None query = {"status": {"$nin": ["stop"]}} collection = connections.mongodb_jobs.mrq_workers sSortField, sSortDirection = _get_sort_args(request, 'datestarted', 'desc') sort = [(sSortField, -1 if sSortDirection == 'desc' else 1)] if request.args.get("showstopped"): query = {} elif unit == "scheduled_jobs": collection = connections.mongodb_jobs.mrq_scheduled_jobs fields = None query = {} sSortField, sSortDirection = _get_sort_args(request, 'interval', 'desc') sort = [(sSortField, -1 if sSortDirection == 'desc' else 1)] elif unit == "jobs": fields = None query = build_api_datatables_query(request) sSortField, sSortDirection = _get_sort_args(request) sort = [(sSortField, -1 if sSortDirection == 'desc' else 1)] time_s = request.args.get("time_s", '') time_e = request.args.get("time_e", '') if time_s and not time_e: print 'datestarted', time_s query.update({'datestarted': {'$gte': str2datetime(time_s)}}) elif time_e and not time_s: print 'datestarted', time_e query.update({'datestarted': {'$lte': str2datetime(time_e)}}) elif time_s and time_e: print 'datestarted', time_s, time_e query.update({ 'datestarted': { '$gte': str2datetime(time_s), '$lte': str2datetime(time_e) } }) # We can't search easily params because we store it as decoded JSON in mongo :( # Add a string index? # if request.args.get("sSearch"): # query.update(json.loads(request.args.get("sSearch"))) collection = connections.mongodb_jobs.mrq_jobs if collection is not None: cursor = collection.find(query, projection=fields) if sort: cursor.sort(sort) if skip is not None: cursor.skip(skip) if limit is not None: cursor.limit(limit) data = { "aaData": list(cursor), "iTotalDisplayRecords": collection.find(query).count() } data["sEcho"] = request.args["sEcho"] return jsonify(data)
def api_datatables(unit): # import time # time.sleep(5) collection = None sort = None skip = int(request.args.get("iDisplayStart", 0)) limit = int(request.args.get("iDisplayLength", 20)) if unit == "queues": queues = [] for name in Queue.all_known(): queue = Queue(name) q = { "name": name, "size": queue.size(), # Redis size "is_sorted": queue.is_sorted, "is_timed": queue.is_timed, "is_raw": queue.is_raw, "is_set": queue.is_set } if queue.is_sorted: raw_config = queue.get_config() q["graph_config"] = raw_config.get( "dashboard_graph", lambda: { "start": time.time() - (7 * 24 * 3600), "stop": time.time() + (7 * 24 * 3600), "slices": 30 } if queue.is_timed else { "start": 0, "stop": 100, "slices": 30 })() if q["graph_config"]: q["graph"] = queue.get_sorted_graph(**q["graph_config"]) if queue.is_timed: q["jobs_to_dequeue"] = queue.count_jobs_to_dequeue() queues.append(q) queues.sort(key=lambda x: -x["size"]) data = {"aaData": queues, "iTotalDisplayRecords": len(queues)} elif unit == "workers": fields = None collection = connections.mongodb_jobs.mrq_workers sort = [("datestarted", -1)] query = {} if request.args.get("id"): query["_id"] = ObjectId(request.args["id"]) else: if request.args.get("status"): statuses = request.args["status"].split("-") query["status"] = {"$in": statuses} if request.args.get("ip"): query["$or"] = [{ "config.local_ip": request.args["ip"] }, { "config.external_ip": request.args["ip"] }] if request.args.get("queue"): query["config.queues"] = request.args["queue"] elif unit == "agents": fields = None query = {"status": {"$nin": ["stop"]}} collection = connections.mongodb_jobs.mrq_agents sort = [("datestarted", -1)] if request.args.get("showstopped"): query = {} elif unit == "scheduled_jobs": collection = connections.mongodb_jobs.mrq_scheduled_jobs fields = None query = {} elif unit == "jobs": fields = None query = build_api_datatables_query(request) sort = None # TODO [("_id", 1)] # We can't search easily params because we store it as decoded JSON in mongo :( # Add a string index? # if request.args.get("sSearch"): # query.update(json.loads(request.args.get("sSearch"))) collection = connections.mongodb_jobs.mrq_jobs if collection is not None: cursor = collection.find(query, projection=fields) if sort: cursor.sort(sort) if skip is not None: cursor.skip(skip) if limit is not None: cursor.limit(limit) data = { "aaData": list(cursor), "iTotalDisplayRecords": collection.find(query).count() } data["sEcho"] = request.args["sEcho"] return jsonify(data)
def _(): report = self.get_worker_report() report.update({"_id": self.id}) return jsonify(report)
def api_datatables(unit): # import time # time.sleep(5) collection = None sort = None skip = int(request.args.get("iDisplayStart", 0)) limit = int(request.args.get("iDisplayLength", 20)) if unit == "queues": queues = [] for name, jobs in Queue.all_known().items(): queue = Queue(name) q = { "name": name, "jobs": jobs, # MongoDB size "size": queue.size(), # Redis size "is_sorted": queue.is_sorted, "is_timed": queue.is_timed, "is_raw": queue.is_raw, "is_set": queue.is_set } if queue.is_sorted: raw_config = cfg.get("raw_queues", {}).get(name, {}) q["graph_config"] = raw_config.get("dashboard_graph", lambda: { "start": time.time() - (7 * 24 * 3600), "stop": time.time() + (7 * 24 * 3600), "slices": 30 } if queue.is_timed else { "start": 0, "stop": 100, "slices": 30 })() if q["graph_config"]: q["graph"] = queue.get_sorted_graph(**q["graph_config"]) if queue.is_timed: q["jobs_to_dequeue"] = queue.count_jobs_to_dequeue() queues.append(q) queues.sort(key=lambda x: -(x["jobs"] + x["size"])) data = { "aaData": queues, "iTotalDisplayRecords": len(queues) } elif unit == "workers": fields = None query = {"status": {"$nin": ["stop"]}} collection = connections.mongodb_jobs.mrq_workers sort = [("datestarted", -1)] if request.args.get("showstopped"): query = {} elif unit == "scheduled_jobs": collection = connections.mongodb_jobs.mrq_scheduled_jobs fields = None query = {} elif unit == "jobs": fields = None query = build_api_datatables_query(request) sort = [("_id", 1)] # We can't search easily params because we store it as decoded JSON in mongo :( # Add a string index? # if request.args.get("sSearch"): # query.update(json.loads(request.args.get("sSearch"))) collection = connections.mongodb_jobs.mrq_jobs if collection is not None: cursor = collection.find(query, projection=fields) if sort: cursor.sort(sort) if skip is not None: cursor.skip(skip) if limit is not None: cursor.limit(limit) data = { "aaData": list(cursor), "iTotalDisplayRecords": collection.find(query).count() } data["sEcho"] = request.args["sEcho"] return jsonify(data)
def get_workers(): collection = connections.mongodb_jobs.mrq_workers cursor = collection.find({"status": {"$ne": "stop"}}) data = {"workers": list(cursor)} return jsonify(data)
def route_index(): report = self.get_worker_report() report.update({"_id": self.id}) return jsonify(report)
def api_job_action(): return jsonify({ "job_id": send_task("mrq.basetasks.utils.JobAction", {k: v for k, v in request.form.iteritems()}, queue=get_current_config()["dashboard_queue"]) })
def post_workergroups(): workergroups = json.loads(request.form["workergroups"]) for k, v in workergroups.iteritems(): connections.mongodb_jobs.mrq_workergroups.update_one({"_id": k}, {"$set": v}, upsert=True) return jsonify({"status": "ok"})