async def handle_new_token(): events = None try: payload = json.loads(await request.data) token = payload.get(TOKEN, None) email = payload.get(EMAIL, None) name = payload.get(NAME, None) description = payload.get(DESCRIPTION, None) is_admin = payload.get(IS_ADMIN, False) if token is None or email is None or name is None or description is None: return Response('', status=200) CA_LOGGER.info("Recv'd registration from {} ({})".format( sensor_id, sensor_ip)) if notifier_initted() and not get_single_notifier().is_admin(token): return Response('', status=403) if notifier_initted(): await get_single_notifier().touch_token(token, now) token_info = await get_single_notifier().new_token( token, email=email, name=name, description=description, is_admin=is_admin) return jsonify(token=token_info.token) except: return Response('', status=500) return Response('', status=500)
async def handle_ping(): events = None try: payload = json.loads(await request.data) events = payload.get(EVENTS, None) token = payload.get(TOKEN, None) sensor_ip = payload.get(SENSOR_IP, None) sensor_id = payload.get(SENSOR_ID, None) dt = payload.get(DATETIME, None) now = get_iso_time() CA_LOGGER.info("Recv'd ping from {} ({})".format(sensor_id, sensor_ip)) if sensor_id is None or sensor_ip is None or token is None or dt is None: return Response('', status=200) if notifier_initted( ) and not await get_single_notifier().is_valid(token): return Response('', status=403) if notifier_initted(): await get_single_notifier().touch_token(token, now) await get_single_notifier().ping_sensor(sensor_id, sensor_ip, token, dt, now) except: traceback.print_exc() return Response('', status=500) return Response('', status=200)
async def handle_get_event(token, event_id): result_id = event_id try: CA_LOGGER.info("Recv'd get_event request".format()) if notifier_initted() and await get_single_notifier().is_valid(token): result = await get_single_notifier().get_event(result_id) data = {} data['sensor_id'] = result.sensor_id data['sensor_ip'] = result.sensor_ip data['src_ip'] = result.src_ip data['src_port'] = result.src_port data['dst_ip'] = result.dst_ip data['dst_port'] = result.dst_port data['created_at'] = result.created_at data['rtype'] = result.rtype data['response'] = result.response data['request_data'] = result.request_data data['api'] = result.api data['sent'] = result.sent data['event_id'] = result.event_id return jsonify(data) except: CA_LOGGER.info("Download ID:{}, exception: {}".format( result_id, traceback.format_exc())) return Response('', status=500) return Response('', status=500)
async def handle_events(): events = None try: payload = json.loads(await request.data) events = payload.get(EVENTS, None) token = payload.get(TOKEN, None) sensor_ip = payload.get(SENSOR_IP, None) sensor_id = payload.get(SENSOR_ID, None) dt = payload.get(DATETIME, None) now = get_iso_time() if sensor_id is None or sensor_ip is None or token is None: return Response('', status=200) elif events is None or len(events) == 0: return Response('', status=400) CA_LOGGER.info("Recv'd {} events from {} ({}), initted: {}".format( len(events), sensor_id, sensor_ip, notifier_initted())) if notifier_initted( ) and not await get_single_notifier().is_valid(token): return Response('', status=403) if notifier_initted(): await get_single_notifier().touch_token(token, now) await get_single_notifier().collector_notify( sensor_id, sensor_ip, token, dt, now, events) CA_LOGGER.debug("Logged {} events from {} ({})".format( len(events), sensor_id, sensor_ip)) except: return Response('', status=500) return Response('', status=200)
async def make_response_for_chunk(self, url: str) -> Response: proxy_headers = self._get_proxy_headers(url) resp = await self.get(url, headers=proxy_headers) if not resp: return Response(b"", status=404) chunk = self.fix_chunk_data(url, await resp.read()) return Response(chunk, headers=dict(resp.headers), status=200)
async def _frontend_movie(): ct = json_ctype data: str form = await request.get_json() idx: Optional[str] = form.get("id") if not idx: return Response( json.dumps({"error": "no id provided"}), content_type=ct, status=400 ) _data: Optional[dict] = _get_data_from_player_cache(idx) if not _data: meta_ = movieData.query.filter_by(mid=idx).first() if not meta_: return Response(json.dumps({"error": "_nomovie_"}), content_type=ct) movie_name: str = meta_.moviedisplay thumbnail: str = meta_.thumb data = json.dumps( { "movie_name": movie_name, "thumbnail": thumbnail, "url": meta_.url, "alt1": meta_.alt1, "alt2": meta_.alt2, } ) open_and_write(os.path.join(".player-cache", f"{idx}.json"), "w", data) else: data = json.dumps(_data) return Response(data, content_type=ct)
async def make_response(self, range_field: str = None): """ 读取远程的视频流,并伪装成本地的响应返回给客户端, 206 连续请求会导致连接中断, asyncio 库在 Windows 平台触发 ConnectionAbortedError, 偶尔出现 LocalProtocolError, 是 RFC2616 与 RFC7231 HEAD 请求冲突导致, See: https://bugs.python.org/issue26509 https://gitlab.com/pgjones/quart/-/issues/45 """ if self._url.is_available(): return Response("resource not available", status=404) if self._url.format == "hls": # m3u8 不用代理 return redirect(self._url.real_url) url = self._url.real_url proxy_headers = self._get_proxy_headers(url) if range_field is not None: proxy_headers["range"] = range_field logger.debug(f"Client request stream range: {range_field}") await self.init_session() resp = await self.get(url, headers=proxy_headers) if not resp: return Response(b"", status=404) if self._url.format == "hls": return redirect(url) # url 不以 m3u8 结尾的跳过 Content-Type 识别 @stream_with_context async def stream_iter(): while chunk := await resp.content.read(4096): yield chunk
async def register_workflow(storeId): '''REST API for registering workflow to stock-analyzer service''' data = await request.get_json() data = json.loads(data) logger.info("Received workflow request for store::{},\nspecs:{}\n".format( storeId, data)) if storeId in workflows: logger.info("Workflow for store::{} already registered!!\nRequest Denied.\n".format( storeId)) return Response( status=409, response="Oops! A workflow already exists for this client!\n" + "Please teardown existing workflow before deploying " + "a new one\n" ) if not ("cass" in data["component-list"]): logging.info("Workflow-request rejected, cass is a required workflow component\n") return Response(status=422, response="workflow-request rejected, cass is a required workflow component\n") workflows[storeId] = data history[storeId] = {} logger.info("Workflow request for store::{} accepted\n".format(storeId)) return Response( status=201, response='Valid Workflow registered to stock-analyzer component\n')
async def register_workflow(storeId): '''REST API for registering workflow to delivery assigner service''' data = await request.get_json() data = json.loads(data) logger.info("Received workflow request for store::{},\nspecs:{}\n".format( storeId, data)) if storeId in workflows: logger.info("Workflow for store::{} already registered!!\nRequest Denied.\n".format( storeId)) return Response( status=409, response="Oops! A workflow already exists for this client!\n" + "Please teardown existing workflow before deploying " + "a new one\n" ) workflows[storeId] = data logger.info("Workflow request for store::{} accepted\n".format(storeId)) return Response( status=201, response='Valid Workflow registered to delivery assigner component\n')
def create_http_json_response( request: Request, query_response: Message ) -> Response: logger = logging.getLogger("doh-server") logger.debug( "[HTTP] " + str(request.method) + " " + str(request.headers.get("Accept")) ) response = Response(json.dumps({}), content_type=DOH_JSON_CONTENT_TYPE) if isinstance(query_response, Message): if query_response.answer: answers = [] for answer in query_response.answer: answers.append(str(answer)) with open(dir_path + "/template.json", "r", encoding="UTF-8") as template: s = Template(template.read()) response.content = json.dumps( s.substitute( data=json.dumps(answers), name=query_response.answer[0].name, type=query_response.answer[0].rdtype, ttl=query_response.answer[0].ttl, ) ) return set_headers(request, response, query_response) else: return Response(json.dumps({"content": str(query_response)}), status=200)
async def setup_workflow(storeId): logging.info("{:*^74}".format(" PUT /workflow-requests/" + storeId + " ")) request_data = await request.get_json() data = json.loads(request_data) # verify the workflow-request is valid valid, mess = await verify_workflow(data) if not valid: logging.info("workflow-request ill formatted") return Response( status=400, response="workflow-request ill formatted\n" + mess ) if storeId in workflows: logging.info("Workflow " + storeId + " already exists") return Response( status=409, response="Workflow " + storeId + " already exists\n" ) workflows[storeId] = data logging.info("Workflow started for Store " + storeId) return Response( status=201, response="Restocker deployed for {}\n".format(storeId) )
async def steps(): if request.method == "GET": step = controller.get_current_step() return Response(json.dumps(step), mimetype="application/json") elif request.method == "POST": steps = controller.set_step(await request.json) return Response(json.dumps(steps), mimetype="application/json")
async def pump(id): if request.method == "GET": pump_state = controller.get_pump_state(id) return Response(json.dumps(pump_state), mimetype="application/json") elif request.method == "POST": pump_state = controller.cycle_pump_state(id) return Response(json.dumps(pump_state), mimetype="application/json")
async def handle_remote_commands(): global NOTIFIER events = None try: payload = json.loads(await request.data) token = payload.get(TOKEN, '') sensor_ip = payload.get(SENSOR_IP, None) sensor_id = payload.get(SENSOR_ID, None) dt = payload.get(DATETIME, None) now = get_iso_time() LOGGER.info("Recv'd remote commands requests from {} ({})".format( sensor_id, sensor_ip)) if sensor_id is None or sensor_ip is None or token is None or dt is None: return Response('', status=200) secret_target_token = get_single_notifier().server_secret_key secret_target_match = secret_target_token == token LOGGER.info( "Authenticated incoming request with 'server_secret_key': sst: {} token{}" .format(secret_target_token, token)) if not secret_target_match: return Response('', status=403) LOGGER.info("Authenticated incoming request with 'server_secret_key'") if notifier_initted(): get_single_notifier().start_process_commands( sensor_id, sensor_ip, token, payload) except: traceback.print_exc() return Response('', status=500) return Response('', status=200)
async def command_save_programs(): programs = await request.json ok = storage.set_programs(programs) if ok: return Response('Ok', mimetype='text/plain') else: return Response('Compilation errors', status=400, mimetype='text/plain')
async def update_workflow(storeId): logging.info("{:*^74}".format(" PUT /workflow-update/" + storeId + " ")) request_data = await request.get_json() data = json.loads(request_data) # verify the workflow-request is valid valid, mess = await verify_workflow(data) if not valid: logging.info("workflow-request ill formatted") return Response(status=400, response="workflow-request ill formatted\n" + mess) if not ("cass" in data["component-list"]): logging.info( "workflow-request rejected, cass is a required workflow component") return Response( status=422, response= "workflow-request rejected, cass is a required workflow component\n" ) workflows[storeId] = data logging.info("Workflow updated for {}".format(storeId)) return Response( status=200, response="Order Processor updated for {}\n".format(storeId))
def _set_auth_info(user: User, res: Response = None, save_user=True): """ 为某用户设置session中的授权信息 并自动修改中的上次登录的信息 :param user: 用户模型 :param save_user: 是否自动执行user.save() :param res: 如果需要自动更新cookie,请传入返回的response """ now = int(time.time()) session['yobot_user'] = user.qqid session['csrf_token'] = rand_string(16) session['last_login_time'] = user.last_login_time session['last_login_ipaddr'] = user.last_login_ipaddr user.last_login_time = now user.last_login_ipaddr = request.headers.get('X-Real-IP', request.remote_addr) if res: new_key = rand_string(32) userlogin = User_login.create( qqid=user.qqid, auth_cookie=_add_salt_and_hash(new_key, user.salt), auth_cookie_expire_time=now + EXPIRED_TIME, ) new_cookie = f'{user.qqid}:{new_key}' res.set_cookie(LOGIN_AUTH_COOKIE_NAME, new_cookie, max_age=EXPIRED_TIME) if save_user: user.save()
async def index(): if request.method == 'POST': ct = request.headers.get('content-type') if ct != "application/dns-udpwireformat": abort(415) data = await request.get_data() r = bytes(data) info("Received %d bytes" % r) elif request.method == 'HEAD' or request.method == 'GET': form = request.args if check and "ct" not in form: # TODO an empty ct seems to be treated as missing? https://gitlab.com/pgjones/quart/issues/72 error("ct parameter is missing from the URI") # TODO test its value # TODO test the Accept: if "dns" not in form: if check: error("dns parameter is missing from the URI") return Response("dns parameter missing from the URI", status=400, mimetype='text/plain') padding = '=' * (-len(form['dns']) % 4) r = base64.urlsafe_b64decode(form['dns'] + padding) else: abort(405) try: message = dns.message.from_wire(r) if check and message.id != 0: error("Query ID not null (%i)" % message.id) except: error("exception %s" % sys.exc_info()[0]) return Response("Cannot parse your DNS request", status=400, mimetype='text/plain') return (dns.query.udp(message, resolver).to_wire(), {'Content-Type': 'application/dns-udpwireformat', 'Cache-Control': 'no-cache'}) # TODO cache-control max-age
async def route_dns_query() -> Response: logger = logging.getLogger("doh-server") accept_header = request.headers.get("Accept") message = await get_name_and_type_from_dns_question(request) if not message: return Response("", status=400) try: loop = asyncio.get_running_loop() query_response = None try: query_response = await loop.run_in_executor( None, functools.partial(resolver_dns.resolve, message)) except asyncio.CancelledError: pass if isinstance(query_response, Message): if query_response.answer: logger.debug("[DNS] " + str(query_response.answer[0])) else: logger.debug("[DNS] " + str(query_response.question[0])) else: logger.warning("[DNS] Timeout on " + resolver_dns.name_server) query_response = dns.message.make_response(message) query_response.set_rcode(dns.rcode.SERVFAIL) except Exception as ex: logger.exception(str(ex)) return Response("", status=400) if request.method == "GET" and accept_header == DOH_JSON_CONTENT_TYPE: return await create_http_json_response(request, query_response) else: return await create_http_wire_response(request, query_response)
async def retrieve_workflow(storeId): logging.info("{:*^74}".format(" GET /workflow-requests/" + storeId + " ")) if not (storeId in workflows): return Response( status=404, response="Workflow doesn't exist. Nothing to retrieve.\n") else: return Response(status=200, response=json.dumps(workflows[storeId]))
async def _get(topics: dict, topic: str, queue: str) -> Response: """Return the item found at the requested offset.""" try: offset = _get_offset() except ValueError as e: return Response(str(e), 400) return Response(await topics[topic][queue].pop_wait(offset), 200)
async def resp_headers(resp: Response): if "localhost" in request.headers.get("origin", ""): resp.headers["access-control-allow-origin"] = request.headers["origin"] else: resp.headers["access-control-allow-origin"] = "https://files.pycode.tk" resp.headers["Access-Control-Allow-Headers"] = request.headers.get( "Access-Control-Request-Headers", "*") resp.headers["access-control-allow-credentials"] = "true" return resp
async def teardown_workflow(storeId): logging.info("{:*^74}".format(" DELETE /workflow-requests/" + storeId + " ")) if not (storeId in workflows): return Response( status=404, response="Workflow doesn't exist. Nothing to teardown.\n") else: del workflows[storeId] return Response(status=204, response="")
async def step(): if request.method == "GET": return Response(json.dumps(controller.get_step()), mimetype="application/json") elif request.method == "POST": new_step = await request.get_json() return Response(json.dumps(controller.set_step(new_step)), mimetype="application/json") elif request.method == "PUT": return Response("True")
async def upload(): """Load multipart data and store it as a file.""" formdata = await request.files if 'file' not in formdata: return Response('ERROR', status=400) with open(f"/tmp/{uuid4().hex}", 'wb') as target: target.write(formdata['file'].read()) return Response(target.name, content_type='text/plain')
def set_headers( request: Request, response: Response, query_response: Message ) -> Response: response.headers["authority"] = AUTHORITY response.headers["method"] = request.method response.headers["scheme"] = get_scheme(request) if query_response.answer: ttl = min(r.ttl for r in query_response.answer) response.headers["cache-control"] = "max-age=" + str(ttl) return response
async def assign_entity(store_id, order): '''Assigns the best delivery entity to and order and updates orderTable in the DB. Parameters: store_id(string): Store ID of workflow. order(dict): Order dictionary. Returns: Response (object): Response object for POST Request. ''' try: store_info = await _get_store_info(store_id) except: return Response( status=409, response="Store ID not found in Database!\n" + "Please request with valid store ID." ) try: entities = await _get_entities(store_id) if len(entities) == 0: return Response( status=204, response="No Avaiblabe delivery entities for storeID::" + str(storeID) + "\n" + "Please update delivery entities or " + "wait for entities to finish active deliveries!" ) except: return Response( status=502, response="Entities table in database corrupted!\n" + "Please recreate delivery entities table." ) customer_info = (order['pizza-order']['custLocation']['lat'], order['pizza-order']['custLocation']['lon']) try: time, entity = await _get_delivery_time(entities, customer_info, store_info) except: return Response( status=502, response="Error in Google API!\n" + "Please contact admin." ) order['assignment'] = {} order['assignment']['deliveredBy'] = entity order['assignment']['estimatedTime'] = time return Response( status=200, response=json.dumps(order) )
async def order_funct(): logging.info("{:*^74}".format(" POST /order ")) request_data = await request.get_json() order = json.loads(request_data) if order["pizza-order"]["storeId"] not in workflows: message = "Workflow does not exist. Request Rejected." logging.info(message) return Response(status=422, response=message) store_id = order["pizza-order"]["storeId"] cust_name = order["pizza-order"]["custName"] logging.info("Store " + store_id + ":") logging.info("Verifying order from " + cust_name + ".") valid, mess = await verify_order(order["pizza-order"]) if not valid: # failure of some kind, return error message error_mess = "Request rejected, pizza-order is malformed: " + mess logging.info(error_mess) return Response(status=400, response=error_mess) order.update({"valid": valid}) log_mess = "Order from " + cust_name + " is valid." next_comp = await get_next_component(store_id) if next_comp is not None: # send order to next component in workflow next_comp_url = await get_component_url(next_comp, store_id) resp = await send_order_to_next_component(next_comp_url, order) if resp.status_code == 200: # successful response from next component, return same response logging.info(log_mess + " Order sent to next component.") return resp elif resp.status_code == 208: # an error occurred in the workflow but has been handled already # return the response unchanged return resp else: # an error occurred in the next component, add the response status # code and text to 'error' key in order dict and return it logging.info(log_mess + " Issue sending order to next component:") logging.info(resp.text) order.update({"error": {"status-code": resp.status_code, "text": resp.text}}) return Response(status=208, response=json.dumps(order)) # last component, print successful log message and return processed order logging.info(log_mess) return Response(status=200, response=json.dumps(order))
async def teardown_workflow(storeId): logging.info("{:*^74}".format(" DELETE /workflow-requests/" + storeId + " ")) if storeId not in workflows: return Response( status=404, response="Workflow doesn't exist. Nothing to teardown.\n" ) else: del workflows[storeId] logging.info("Restocker stopped for {}".format(storeId)) return Response(status=204, response="restocker stopped")
async def upload_route(): path = request.args.get('path') assert path is not None app.logger.info(f"Starting upload: {path}") result = await upload_block(path, await request.body, request.method == 'PUT') if result == 0: return Response("") elif result == errno.EEXIST: return Response("File already exists", 409) else: return Response("File upload failed", 500)