def call_check(data): join_room(data["transaction_id"]) url = data["url"] if not re.match(r"^http\://", url): data["url"] = "http://%s" % url for location in app.config["LOCATIONS"]: for testsuite in location["testsuites"]: input_data = { "transaction_id": data["transaction_id"], "location": location["location"], "country": location["country"], "ISP": location["ISP"], "url": data["url"], } location_queue = "%s_%s" % (location["location"].lower().replace(" ", "_"), location["ISP"].lower().replace(" ", "_")) logging.warn(location_queue) input_data["test_type"] = testsuite if testsuite in ("dns_google", "dns_TM", "dns_opendns"): for server in app.config["TESTSUITES"][testsuite]["servers"]: input_data["task_id"] = str(uuid.uuid4()) extra_attr = { "provider": app.config["TESTSUITES"][testsuite]["provider"], "server": server } input_data["extra_attr"] = extra_attr logging.warn("DNS Check") description = "%s server: %s " % (app.config["TESTSUITES"][testsuite]["description"], server) input_data["description"] = description result_data = ResultData.from_json(input_data, extra_attr=extra_attr) task = chain( call_dns_task.s(result_data.to_json()).set(queue=location_queue), update_entry.s().set(queue="basecamp"), post_update.s().set(queue="basecamp") ).apply_async() else: input_data["task_id"] = str(uuid.uuid4()) input_data["description"] = app.config["TESTSUITES"][testsuite]["description"] result_data = ResultData.from_json(input_data) if testsuite == "http": task = chain( call_http_task.s(result_data.to_json()).set(queue=location_queue), update_entry.s().set(queue="basecamp"), post_update.s().set(queue="basecamp") ).apply_async() elif testsuite == "http_dpi_tampering": task = chain( call_http_dpi_tampering_task.s(result_data.to_json()).set(queue=location_queue), update_entry.s().set(queue="basecamp"), post_update.s().set(queue="basecamp") ).apply_async() emit("result_received", result_data.to_json(), room=result_data.transaction_id)
def dump_json(): ITEM_PER_PAGE = 10 page = request.args.get("page") if not page: page = 1 page = int(page) output = [] result_data = ResultData.select() count = result_data.count() result_page = result_data.paginate(page, paginate_by=ITEM_PER_PAGE) num_page = count / ITEM_PER_PAGE + 1 for entry in result_page: output.append(entry.to_json()) json_output = { "pages": num_page, "total": count, "page": page, "item_per_page": ITEM_PER_PAGE, "results": output } if page > 1: json_output["prev_url"] = "%s/dump?page=%s" % (app.config["URL"], page - 1) if page < num_page: json_output["next_url"] = "%s/dump?page=%s" % (app.config["URL"], page + 1) return jsonify(json_output)
def dump_json(): output = [] result_data = ResultData.select() for entry in result_data: output.append(entry.to_json()) return jsonify({ "results": output, "total": len(output) })
def fetch_json(transaction_id): result_data = ResultData.select().where(ResultData.transaction_id==transaction_id) output = [] for entry in result_data: output.append(entry.to_json()) return jsonify({ "results": output, "total": len(output) })
def update_entry(data): # Do not rely that connection will be kept running. # If it is known to last as long as the task, just make it global logging.warn("Updating database") print data db.connect() result_data = ResultData.get(task_id=data["task_id"]) result_data.status = data["status_code"] result_data.task_status = data["status"] result_data.reason = data["reason"] result_data.save() return result_data.to_json()
def fetch_html(transaction_id): result_data = ResultData.select().where(ResultData.transaction_id==transaction_id) output = {} entry_url = "" for entry in result_data: # it is the same url, and I'm lazy entry_url = entry.url isp = output.setdefault(entry.isp, {}) location = isp.setdefault(entry.location, []) location.append({ "description": entry.description, "status": entry.status, "reason": entry.reason, "task_status": entry.task_status }) current_url = "%s/%s.html" % (app.config["URL"], transaction_id) return render_template("index.html", output=output, share=True, url=app.config["URL"], current_url=current_url, target_url= entry_url)
def store_to_db(data, extra_attr=None): create = False try: result = ResultData.get(task_id=data["task_id"]) except ResultData.DoesNotExist: create = True if create: result = ResultData() result.transaction_id = data["transaction_id"] result.task_id = data["task_id"] result.task_type = data["test_type"] result.location = data["location"] result.country = data["country"] result.url = data["url"] if extra_attr: result.extra_attr = extra_attr result.task_status = data["status"] # Difference between status and task_status. task_status is for celery task. result.status = str(data["status_code"]) result.raw_data = data result.save()
def initialize_entry(data, extra_attr={}): db.connect() # This should save the data result_data = ResultData.from_json(data, extra_attr=extra_attr) return result_data.to_json()