def subscribe(self, destination=""): def consume(): try: while self.loop(): try: result = self.session_queues[destination][queue_id].get(timeout=self.keepalive_interval) except: if self.keepalive: ev = ServerSentEvent(":keep-alive") yield ev.encode() else: ev = ServerSentEvent(str(result)) yield ev.encode() except GeneratorExit: self.__deleteSessionQueue(destination, queue_id) except Exception: self.__deleteSessionQueue(destination, queue_id) def close(): self.__deleteSessionQueue(destination, queue_id) queue_id = self.__addSessionQueue(destination) r = Response(consume(), mimetype="text/event-stream") r.call_on_close(close) return r
def user_subscribe(): def wrapper(listener): logger.debug("Beginning streaming of user events") try: yield "data: %s\n\n" % json.dumps({}) for event_id, data in listener.event_stream(): message = {"event": event_id, "data": data} json_string = json.dumps(message) yield "data: %s\n\n" % json_string finally: logger.debug("Closing listener due to exception") listener.stop() events = request.args.get("events", "").split(",") if not events: abort(404) try: listener = userevents.get_listener(current_user.db_user().username, events) except CannotReadUserEventsException: abort(504) def on_close(): logger.debug("Closing listener due to response close") listener.stop() r = Response(wrapper(listener), mimetype="text/event-stream") r.call_on_close(on_close) return r
def subscribe(self, destination=""): def consume(): try: while self.loop(): try: result = self.session_queues[destination][ queue_id].get(timeout=self.keepalive_interval) except: if self.keepalive: ev = ServerSentEvent(":keep-alive") yield ev.encode() else: ev = ServerSentEvent(str(result)) yield ev.encode() except GeneratorExit: self.__deleteSessionQueue(destination, queue_id) except Exception: self.__deleteSessionQueue(destination, queue_id) def close(): self.__deleteSessionQueue(destination, queue_id) queue_id = self.__addSessionQueue(destination) r = Response(consume(), mimetype="text/event-stream") r.call_on_close(close) return r
def proxy(path): logger.debug('Attempt to proxy: %s', request) url = endpoint_url + path body_hash = hashlib.sha256(b'').hexdigest() pre_auth_headers = tuple(((key, request.headers[key]) for key in proxied_request_headers if key in request.headers)) parsed_url = urllib.parse.urlsplit(url) request_headers = aws_sigv4_headers( pre_auth_headers, 's3', parsed_url.netloc, 'GET', parsed_url.path, (), body_hash, ) response = requests.get(url, headers=dict(request_headers), stream=True) response_headers = tuple(((key, response.headers[key]) for key in proxied_response_headers if key in response.headers)) allow_proxy = response.status_code in proxied_response_codes logger.debug('Response: %s', response) logger.debug('Allowing proxy: %s', allow_proxy) def body_upstream(): for chunk in response.iter_content(16384): yield chunk def body_empty(): # Ensure this is a generator while False: yield for _ in response.iter_content(16384): pass downstream_response = \ Response(body_upstream(), status=response.status_code, headers=response_headers) if allow_proxy else \ Response(body_empty(), status=500) downstream_response.call_on_close(response.close) return downstream_response
def streamed_response(self): """ Return a streamed :py:class:`flask.Response`. """ self._request() # XXX(damb): Only return a streamed response as soon as valid data # is available. Use a timeout and process errors here. self._wait() resp = Response(stream_with_context(self), mimetype=self.mimetype, content_type=self.content_type) resp.call_on_close(self._call_on_close) return resp
def sse(): def gen(): global _queue yield ServerSentEvent('INIT').encode() for result in _queue: if isinstance(result, dict) or isinstance(result, list): result = json.dumps(transform_json_types(result)) yield ServerSentEvent(str(result)).encode() def onclose(): stream.close() stream = gen() resp = Response(stream, mimetype="text/event-stream") resp.call_on_close(onclose) return resp
def webhook_route(webhook_token=''): # ingest request request_details = extract_request_details(request) # flask_app.logger.debug(request_details) response_details = construct_response(request_details) call_on_close = None if webhook_token not in webhook_map.keys(): response_details['error'] = 'Invalid webhook token.' response_details['code'] = 404 # send webhook content to bot if not response_details['error']: if request_details['json']: observation_details = { 'callback': False, 'gateway': 'webhook', 'details': request_details } for key, value in webhook_map[webhook_token].items(): observation_details[key] = value if observation_details['callback']: response_details = bot_client.analyze_observation(**observation_details) else: def webhook_callable(): bot_client.analyze_observation(**observation_details) # add placeholder for telegram if observation_details['service'] == 'telegram': telegram_client.send_message(request_details['json']['message']['chat']['id'], message_text='Gotcha. Working on it...') call_on_close = webhook_callable # response to request if call_on_close: response_kwargs = { 'response': json.dumps(response_details), 'status': response_details['code'], 'mimetype': 'application/json' } response_object = Response(**response_kwargs) response_object.call_on_close(call_on_close) # flask_app.logger.debug(response_details) return response_object else: # flask_app.logger.debug(response_details) return jsonify(response_details), response_details['code']
def events_for(address): listener = trader.listen_for_events(address) def generate(): while True: event = listener.event_queue_async.get() yield json.dumps( {'data': { 'event': event.as_dict(), 'type': event.type }}) + '\n' def on_close(): # stop listener on closed connection trader.stop_listen(listener) r = Response(generate(), content_type='application/x-json-stream') r.call_on_close(on_close) return r
def messages_for(topic): global nof_listeners listener = MessageListener(message_broker, topic) listener.start() def generate(): while True: yield json.dumps({'data': listener.get()}) + '\n' def on_close(): # stop listener on closed connection global nof_listeners nof_listeners -= 1 print('Nof-listeners: {}'.format(nof_listeners)) listener.stop() r = Response(generate(), content_type='application/x-json-stream') nof_listeners += 1 print('Nof-listeners: {} new for topic: {}'.format(nof_listeners, topic)) r.call_on_close(on_close) return r
def main(): if get_mode() == 'chat': controller = StreamController(870, 44) controllers[controller.uuid] = controller if 'controller' in session: print('closing existing controller') close(session['controller']) session['controller'] = controller.uuid response = Response(controller.generate_stream(), mimetype='image/apng') print(response.call_on_close(lambda: close(controller.uuid))) return response return redirect(url_for('static', filename='default.png'))
def start_downloads(self, format_id, file_detail, user_info): """开始下载""" queue = Queue() # 队列, 用来传递 status ext = file_detail['file_ext'] title = file_detail['file_title'] dl_status = DownloadStatus() # 用于同步当前下载状态 dl_status.url = self.video_url dl_status.title = title gl = gevent.spawn(self.download_work, format_id, queue, dl_status) try: rst = queue.peek(timeout=15) # switch to gl if isinstance(rst, DownloadStatus): content_type = self.get_content_type(ext) file_name = '{}.{}'.format(title, ext) resp = Response(self.response_data_generator(queue), content_type=content_type) resp.headers["Content-Disposition"] = "attachment; filename={}".format(quote(file_name.encode('utf-8'))) resp.call_on_close(self.on_close(gl, dl_status, user_info)) # 免费额度在开始下载就扣除,不等下载完毕 if is_free_download(current_user): # current_user.is_anonymous or current_user.brick <= 0: info = current_user, user_info[1], user_info[-1] set_free_download_flag(info) size = get_file_size_of_format_id(self.video_url, format_id) if size > 0: minus_free_download_size(size) # 扣除免费额度 return resp elif isinstance(rst, Exception): print("request occur exception:{}".format(rst)) gl.kill() # close greenlet return _("request occur exception") # 请求发生异常 else: print("youtube-dl occur an unknown error") gl.kill() # close greenlet return _("unknown error") # 未知异常 except Empty: print("request timeout with url:{0}".format(self.video_url)) gl.kill() # close greenlet return _("request timeout") # 请求超时
def start_query_download_tool(trans_id): sync_conn = None status, error_msg, conn, trans_obj, session_obj = check_transaction_status( trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: data = request.args if request.args else None try: if data and 'query' in data: sql = data['query'] conn_id = str(random.randint(1, 9999999)) sync_conn = conn.manager.connection(did=trans_obj.did, conn_id=conn_id, auto_reconnect=False, async=False) sync_conn.connect(autocommit=False) def cleanup(): conn.manager.connections[sync_conn.conn_id]._release() del conn.manager.connections[sync_conn.conn_id] # This returns generator of records. status, gen = sync_conn.execute_on_server_as_csv(sql, records=2000) if not status: r = Response('"{0}"'.format(gen), mimetype='text/csv') r.headers[ "Content-Disposition"] = "attachment;filename=error.csv" r.call_on_close(cleanup) return r r = Response(gen(), mimetype='text/csv') if 'filename' in data and data['filename'] != "": filename = data['filename'] else: import time filename = str(int(time.time())) + ".csv" r.headers[ "Content-Disposition"] = "attachment;filename={0}".format( filename) r.call_on_close(cleanup) return r except Exception as e: r = Response('"{0}"'.format(e), mimetype='text/csv') r.headers["Content-Disposition"] = "attachment;filename=error.csv" r.call_on_close(cleanup) return r else: return internal_server_error( errormsg=gettext("Transaction status check failed."))
def _create_response(self, stream_wrapper=None, **wrapper_kwargs): self._route() if not self._num_routes: raise FDSNHTTPError.create(self._nodata) self._request() # XXX(damb): Only return a streamed response as soon as valid data # is available. Use a timeout and process errors here. self._wait() response_generator = stream_with_context(self) if callable(stream_wrapper): response_generator = stream_wrapper( response_generator, **wrapper_kwargs) resp = Response(response_generator, mimetype=self.mimetype, content_type=self.content_type) resp.call_on_close(self._call_on_close) return resp
def telemetry_route(): # ingest request request_details = extract_request_details(request) app.logger.debug(request_details) call_on_close = None # handle get telemetry if request_details['method'] == 'GET': response_details = construct_response(request_details) user_id = request_details['params'].get('user') start_date = request_details['params'].get('start') end_date = request_details['params'].get('end') telemetry_list = [] if user_id and start_date and end_date: query_criteria = { '.user_id': { 'equal_to': user_id }, '.date': { 'min_value': start_date, 'max_value': end_date } } for record in sql_tables['telemetry'].list(query_criteria): telemetry_list.append(record) else: for record in sql_tables['telemetry'].list(): telemetry_list.append(record) response_details['details'] = telemetry_list app.logger.debug(response_details) return jsonify(response_details), response_details['code'] # handle post telemetry if request_details['method'] == 'POST': response_details = construct_response(request_details, telemetry_model) if not response_details['error']: telemetry_dt = request_details['json']['dt'] telemetry_record = { 'date': labDT.fromEpoch(telemetry_dt).zulu()[0:10], } for key, value in request_details['json'].items(): telemetry_record[key] = value telemetry_id = sql_tables['telemetry'].create(telemetry_record) response_details['details'] = {'id': telemetry_id} # test for consistent anomalies if telemetry_record['anomalous']: user_id = telemetry_record['user_id'] query_criteria = {'user_id': user_id} sort_criteria = [{'.dt': 'descend'}] count = 0 for record in sql_tables['telemetry'].list( query_criteria, sort_criteria): if not record['anomalous']: break count += 1 if count > 2: # send alert from server.utils import send_email def alert_user(): user_email = '' user_name = '' for user in sql_tables['users'].list( {'user_id': user_id}): user_email = user['email'] user_name = user['name'] break send_email(email_client, user_email, user_name) # call_on_close = alert_user # compose response if call_on_close: response_kwargs = { 'response': json.dumps(response_details), 'status': response_details['code'], 'mimetype': 'application/json' } response_object = Response(**response_kwargs) response_object.call_on_close(call_on_close) app.logger.debug(response_details) return response_object else: app.logger.debug(response_details) return jsonify(response_details), response_details['code']
def stream_data(): printLog('kwargs: {0}'.format(request.args)) dl_url = request.args.get('url') # printLog('download url: {0}'.format(dl_url)) if dl_url: # event = manager.Event() # 客户端关闭链接事件 # printLog('prepare to download') parent_conn, child_conn = multiprocessing.Pipe(False) result = pool.apply_async(start_download, args=(dl_url, child_conn)) # 开启子进程下载video if parent_conn.poll(timeout=15): # 尝试拉取数据 child_conn.close() # 在此关闭子进程的写管道,减少引用,又防止过早释放导致传入子进程的写管道为空 def stream_generate(pipe): # 生成器,返回小块的数据 while pipe.poll( timeout=15 ): # 设置超时时间太短在网速慢的情况下会造成传输中断(>=5),太长又会在有异常后造成并发量大 try: recv_data = pipe.recv() # 接收子进程发送来的数据 except EOFError: printLog('Nothing left or the other end was closed') return ('Nothing left or the other end was closed' ) # 子进程端管道已关闭 printLog('recv data from child_conn') if isinstance(recv_data, dict): # 接收到的为状态数据 youDLer_status = recv_data if youDLer_status['status'] == 'error': printLog('recv youtube-dl error message') return 'recv youtube-dl error message' else: index = youDLer_status['fragment_index'] - 1 # printLog('recv youtube-dl data with index: {0}'.format(index)) file_nto_read = youDLer_status[ 'filename'] + '.part-Frag' + str(index) printLog( 'send chunk data: {0}'.format(file_nto_read)) with open(file_nto_read, 'rb') as f: yield f.read() elif isinstance(recv_data, Exception): # 接收到的为异常 printLog( 'an exception occur at sub-process: {0}'.format( recv_data)) return 'an exception occur at sub-process' else: printLog('unknow exception occur at sub-process') return 'unknow exception occur at sub-process' else: printLog('poll sub-process data timeout') return 'poll sub-process data timeout' resp = Response(stream_with_context(stream_generate(parent_conn)), content_type='text/event-stream') resp.call_on_close(on_close) return resp else: printLog('poll data timeout') return render_template_string("poll data timeout") # 第一次拉取数据就超时 else: printLog('video url error!') return render_template_string('video url error!') # 参数错误
def handle_request(): request_id = request.headers.get('X-B3-TraceId') or ''.join(choices(request_id_alphabet, k=8)) logger.info('[%s] Start', request_id) # Must have X-CF-Forwarded-Url to match route try: forwarded_url = request.headers['X-CF-Forwarded-Url'] except KeyError: logger.error('[%s] Missing X-CF-Forwarded-Url header', request_id) return render_access_denied('Unknown', 'Unknown', request_id) logger.info('[%s] Forwarded URL: %s', request_id, forwarded_url) parsed_url = urllib.parse.urlsplit(forwarded_url) # Find x-forwarded-for try: x_forwarded_for = request.headers['X-Forwarded-For'] except KeyError: logger.error('[%s] X-Forwarded-For header is missing', request_id) return render_access_denied('Unknown', forwarded_url, request_id) logger.debug('[%s] X-Forwarded-For: %s', request_id, x_forwarded_for) def get_client_ip(route): try: return x_forwarded_for.split(',')[int(route['IP_DETERMINED_BY_X_FORWARDED_FOR_INDEX'])].strip() except IndexError: logger.debug('[%s] Not enough addresses in x-forwarded-for %s', request_id, x_forwarded_for) routes = env['ROUTES'] hostname_ok = [ re.match(route['HOSTNAME_REGEX'], parsed_url.hostname) for route in routes ] client_ips = [ get_client_ip(route) for route in routes ] ip_ok = [ any(client_ips[i] and IPv4Address(client_ips[i]) in IPv4Network(ip_range) for ip_range in route['IP_RANGES']) for i, route in enumerate(routes) ] shared_secrets = [ route.get('SHARED_SECRET_HEADER', []) for route in routes ] shared_secret_ok = [ [ ( shared_secret['NAME'] in request.headers and constant_time_is_equal(shared_secret['VALUE'].encode(), request.headers[shared_secret['NAME']].encode()) ) for shared_secret in shared_secrets[i] ] for i, _ in enumerate(routes) ] # In general, any matching basic auth credentials are accepted. However, # on authentication paths, only those with that path are accepted, and # on failure, a 401 is returned to request the correct credentials basic_auths = [ route.get('BASIC_AUTH', []) for route in routes ] basic_auths_ok = [ [ request.authorization and constant_time_is_equal(basic_auth['USERNAME'].encode(), request.authorization.username.encode()) and constant_time_is_equal(basic_auth['PASSWORD'].encode(), request.authorization.password.encode()) for basic_auth in basic_auths[i] ] for i, _ in enumerate(routes) ] on_auth_path_and_ok = [ [ basic_auths_ok[i][j] for j, basic_auth in enumerate(basic_auths[i]) if parsed_url.path == basic_auth['AUTHENTICATE_PATH'] ] for i, _ in enumerate(routes) ] any_on_auth_path_and_ok = any([ any(on_auth_path_and_ok[i]) for i, _ in enumerate(routes) ]) should_request_auth = not any_on_auth_path_and_ok and any( ( hostname_ok[i] and ip_ok[i] and (not shared_secrets[i] or any(shared_secret_ok[i])) and len(on_auth_path_and_ok[i]) and all(not ok for ok in on_auth_path_and_ok[i]) ) for i, _ in enumerate(routes) ) should_respond_ok_to_auth_request = any( ( hostname_ok[i] and ip_ok[i] and (not shared_secrets[i] or any(shared_secret_ok[i])) and len(on_auth_path_and_ok[i]) and any(on_auth_path_and_ok[i]) ) for i, _ in enumerate(routes) ) any_route_with_all_checks_passed = any( ( hostname_ok[i] and ip_ok[i] and (not shared_secrets[i] or any(shared_secret_ok[i])) and (not basic_auths[i] or any(basic_auths_ok[i])) ) for i, _ in enumerate(routes) ) # There is no perfect answer as to which IP to present to the client in # the light of multiple routes with different indexes of the # x-forwarded-for header. However, in real cases it is likely that if the # host matches, then that will be the correct one. If 'Unknown' is then # shown to the user, it suggests something has been misconfigured client_ip = next( (client_ips[i] for i, _ in enumerate(routes) if hostname_ok[i]) , 'Unknown') headers_to_remove = tuple(set( shared_secret['NAME'].lower() for i, _ in enumerate(routes) for shared_secret in shared_secrets[i] )) + ('host', 'x-cf-forwarded-url', 'connection') if should_request_auth: return Response( 'Could not verify your access level for that URL.\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm="Login Required"'}) if should_respond_ok_to_auth_request: return 'ok' if not any_route_with_all_checks_passed: logger.warning( '[%s] No matching route; host: %s client ip: %s', request_id, parsed_url.hostname, client_ip) return render_access_denied(client_ip, forwarded_url, request_id) logger.info('[%s] Making request to origin', request_id) def downstream_data(): while True: contents = request.stream.read(65536) if not contents: break yield contents origin_response = http.request( request.method, forwarded_url, headers={ k: v for k, v in request.headers if k.lower() not in headers_to_remove }, preload_content=False, redirect=False, assert_same_host=False, body=downstream_data(), ) logger.info('[%s] Origin response status: %s', request_id, origin_response.status) def release_conn(): origin_response.release_conn() logger.info('[%s] End', request_id) downstream_response = Response( origin_response.stream(65536, decode_content=False), status=origin_response.status, headers=[ (k, v) for k, v in origin_response.headers.items() if k.lower() != 'connection' ], ) downstream_response.autocorrect_location_header = False downstream_response.call_on_close(release_conn) logger.info('[%s] Starting response to client', request_id) return downstream_response
def start_query_download_tool(trans_id): sync_conn = None (status, error_msg, conn, trans_obj, session_obj) = check_transaction_status(trans_id) if status and conn is not None and \ trans_obj is not None and session_obj is not None: data = request.args if request.args else None try: if data and 'query' in data: sql = data['query'] conn_id = str(random.randint(1, 9999999)) sync_conn = conn.manager.connection( did=trans_obj.did, conn_id=conn_id, auto_reconnect=False, async=False ) sync_conn.connect(autocommit=False) def cleanup(): conn.manager.connections[sync_conn.conn_id]._release() del conn.manager.connections[sync_conn.conn_id] # This returns generator of records. status, gen = sync_conn.execute_on_server_as_csv( sql, records=2000 ) if not status: r = Response('"{0}"'.format(gen), mimetype='text/csv') r.headers[ "Content-Disposition" ] = "attachment;filename=error.csv" r.call_on_close(cleanup) return r r = Response( gen( quote=blueprint.csv_quoting.get(), quote_char=blueprint.csv_quote_char.get(), field_separator=blueprint.csv_field_separator.get() ), mimetype='text/csv' ) if 'filename' in data and data['filename'] != "": filename = data['filename'] else: import time filename = str(int(time.time())) + ".csv" # We will try to encode report file name with latin-1 # If it fails then we will fallback to default ascii file name # werkzeug only supports latin-1 encoding supported values try: tmp_file_name = filename tmp_file_name.encode('latin-1', 'strict') except UnicodeEncodeError: filename = "download.csv" r.headers[ "Content-Disposition" ] = "attachment;filename={0}".format(filename) r.call_on_close(cleanup) return r except Exception as e: r = Response('"{0}"'.format(e), mimetype='text/csv') r.headers["Content-Disposition"] = "attachment;filename=error.csv" r.call_on_close(cleanup) return r else: return internal_server_error( errormsg=gettext("Transaction status check failed.") )