def run(self): bridge_db_path = config.get("ooni", "bridge-db-path") bridge_db = json_loads(open(bridge_db_path).read()) out_file = self.output().open('w') with self.input().open('r') as fobj: for line in fobj: entry = json_loads(line.strip()) if entry['test_name'] == 'tcp_connect': entry = self._sanitise_tcp_connect(entry, bridge_db) elif entry['test_name'] == 'bridge_reachability': entry = self._sanitise_bridge_reachability(entry, bridge_db) out_file.write(json_dumps(entry)) out_file.write("\n") out_file.close()
async def auth_param(request=None, rpc_data=None, *args, **kwargs): request_params, params = {}, [] if isinstance(request, Request): # sanic request if request.method == 'POST': try: post_data = json_loads( str(request.body, encoding='utf-8')) except Exception as e: return response_handle(request, {'info': 'error'}) else: request_params.update(post_data) params = [ key for key, value in post_data.items() if value ] elif request.method == 'GET': request_params.update(request.args) params = [ key for key, value in request.args.items() if value ] else: return response_handle(request, {'info': 'error'}) else: # gRPC request request_params = rpc_data params = [ key for key, value in request_params.items() if value ] if set(keys).issubset(set(params)): kwargs['request_params'] = request_params return await dec_func(func, request, *args, **kwargs) else: return response_handle(request, {'info': 'error'})
def _extract_memory_info(self, dump_pathname, processor_notes): """Extract and return the JSON data from the .json.gz memory report. file""" def error_out(error_message): processor_notes.append(error_message) return {"ERROR": error_message} try: fd = gzip_open(dump_pathname, "rb") except IOError as x: error_message = "error in gzip for %s: %r" % (dump_pathname, x) return error_out(error_message) try: memory_info_as_string = fd.read() if len(memory_info_as_string) > self.config.max_size_uncompressed: error_message = ( "Uncompressed memory info too large %d (max: %d)" % ( len(memory_info_as_string), self.config.max_size_uncompressed, ) ) return error_out(error_message) memory_info = json_loads(memory_info_as_string) except IOError as x: error_message = "error in gzip for %s: %r" % (dump_pathname, x) return error_out(error_message) except ValueError as x: error_message = "error in json for %s: %r" % (dump_pathname, x) return error_out(error_message) finally: fd.close() return memory_info
async def valid_proxies(ip, port): """ Return all usable proxies without socket 4/5 :param ip: :param port: :return: """ # TODO valid socket 4/5 # response = request_url_by_requests(url=CONFIG.TEST_URL['http'], proxies=proxies) proxy = "http://{ip}:{port}".format(ip=ip, port=port) html = await request_url_by_aiohttp(url=CONFIG.TEST_URL['http'], proxy=proxy, timeout=CONFIG.TEST_URL['timeout']) if html: try: res_json = json_loads(html) headers = res_json.get('headers', {}) X_Forwarded_For = headers.get('X-Forwarded-For') Proxy_Connection = headers.get('Proxy-Connection') if X_Forwarded_For and ',' in X_Forwarded_For: types = 3 elif Proxy_Connection: types = 2 else: types = 1 info = { 'proxy': "{ip}:{port}".format(ip=ip, port=port), 'types': types } return True, info except Exception as e: return False, None else: return False, None
async def auth_param(request, *args, **kwargs): request_params = {} # POST request if request.method == 'POST' or request.method == 'DELETE': try: post_data = json_loads(str(request.body, encoding='utf-8')) except Exception as e: LOGGER.exception(e) return response_handle(request, UniResponse.PARAM_PARSE_ERR, status=400) else: request_params.update(post_data) params = [key for key, value in post_data.items() if value] elif request.method == 'GET': request_params.update(request.args) params = [key for key, value in request.args.items() if value] else: # TODO return response_handle(request, UniResponse.PARAM_UNKNOWN_ERR, status=400) if set(keys).issubset(set(params)): try: kwargs['request_params'] = request_params response = await func(request, *args, **kwargs) return response except Exception as e: LOGGER.exception(e) return response_handle(request, UniResponse.SERVER_UNKNOWN_ERR, 500) else: return response_handle(request, UniResponse.PARAM_ERR, status=400)
def json(self): if self.parsed_json is None: try: self.parsed_json = json_loads(self.body) except Exception: raise InvalidUsage("Failed when parsing body as json") return self.parsed_json
def json(self): if not self.parsed_json: try: self.parsed_json = json_loads(self.body) except Exception: pass return self.parsed_json
def json(self): if not self.parsed_json: try: self.parsed_json = json_loads(self.body) # 解析body内容 except Exception: raise InvalidUsage("Failed when parsing body as json") return self.parsed_json
def json(self): if not self.parsed_json: try: self.parsed_json = json_loads(self.body) except Exception: log.exception("failed when parsing body as json") return self.parsed_json
def json(self): if self.parsed_json is None: try: self.parsed_json = json_loads(self.body) except Exception: if not self.body: return None raise self._invalid_usage('Failed when parsing body as json') return self.parsed_json
def json(self): if not self.parsed_json: try: self.parsed_json = json_loads(self.body) except Exception: return HTTPResponse(json_dumps(self.body), headers=self.headers, status=400, content_type="application/json") return self.parsed_json
async def post_rss_json(request, **kwargs): post_data = json_loads(str(request.body, encoding='utf-8')) name = post_data.get('name') if name == 'howie6879': result = {} data = await get_rss() result['info'] = data result['status'] = 1 return json(result) else: return json({'info': '参数错误'})
async def auth_param(request, *args, **kwargs): keys = data_.keys() request_params = {} ip = request.headers.get('X-Real-IP', '') request_params["ip"] = ip request_params.update({k: v[0] for k, v in request.args.items()}) path_request = request.path logger.info("request.path:%s", path_request) params = [key for key, value in request.args.items() ] + [k for k, v in data_.items() if v is not None] # POST request if request.method == 'POST' or request.method == 'DELETE': try: post_data = json_loads(str(request.body, encoding='utf-8')) except Exception as e: logger.error("err %s" % e) return response_handle(request, UniResponse.PARAM_PARSE_ERR, status=400) else: request_params.update(post_data) params += [key for key, value in post_data.items()] elif request.method == 'GET': request_params.update( {k: v[0] for k, v in request.args.items()}) else: # TODO return response_handle(request, UniResponse.PARAM_UNKNOWN_ERR, status=400) if set(keys).issubset(set(params)): try: data_.update(request_params) kwargs['request_params'] = data_ logger.info("request_params %s" % data_) response = await func(request, *args, **kwargs) return response except Exception as e: logger.error(e) return json_encoder({ 'code': 500, 'msg': 'err ', 'data': {} }) else: return json_encoder({ 'code': 400, 'msg': 'miss params %s' % list(set(keys).difference(set(params))), 'data': {} })
def load_json(body): if not body: return None try: return json_loads(body) except Exception as e: raise exceptions.RpcParseError(data={ 'exception': str(type(e)), 'data': body, 'message': str(e) })
def test_json(): app = Luya('test_json') @app.route('/') async def handler(request): return json({"test": True}) response = app.test_client.get('/') results = json_loads(response.text) assert results.get('test') == True
async def get_all(self, default=None, **kwargs): """ Return all values """ try: res = await self._client_conn.hgetall(self.name) all_dict = { key.decode('utf-8'): json_loads(value.decode('utf-8')) for key, value in res.items() } except Exception as e: all_dict = default return all_dict
class OutOfMemoryBinaryRule(Rule): required_config = Namespace() required_config.add_option( 'max_size_uncompressed', default=20 * 1024 * 1024, # ~20 Mb doc=( "Number of bytes, max, that we accept memory info payloads " "as JSON." ) ) #-------------------------------------------------------------------------- def version(self): return '1.0' #-------------------------------------------------------------------------- def _predicate(self, raw_crash, raw_dumps, processed_crash, proc_meta): return 'memory_report' in raw_dumps #-------------------------------------------------------------------------- def _extract_memory_info(self, dump_pathname, processor_notes): """Extract and return the JSON data from the .json.gz memory report. file""" try: fd = gzip_open(dump_pathname, "rb") except IOError, x: error_message = "error in gzip for %s: %r" % (dump_pathname, x) processor_notes.append(error_message) return {"ERROR": error_message} try: memory_info_as_string = fd.read() if len(memory_info_as_string) > self.config.max_size_uncompressed: error_message = ( "Uncompressed memory info too large %d (max: %d)" % ( len(memory_info_as_string), self.config.max_size_uncompressed, ) ) processor_notes.append(error_message) return {"ERROR": error_message} memory_info = json_loads(memory_info_as_string) except ValueError, x: error_message = "error in json for %s: %r" % (dump_pathname, x) processor_notes.append(error_message) return {"ERROR": error_message}
def format_and_send_message(): msg_info = send_queue.get() msg = {} msg['user'] = msg_info['user'] msg['mode'] = msg_info['mode'] context = json_loads(msg_info['context']) msg['subject'] = msg_info['subject'] % context msg['body'] = msg_info['body'] % context try: send_message(msg) except: logger.exception('Failed to send message %s', msg) mark_message_as_unsent(msg_info) metrics.stats['message_fail_cnt'] += 1 else: mark_message_as_sent(msg_info) metrics.stats['message_sent_cnt'] += 1
def skip_test_multiprocessing(): app = Sanic('test_json') response = Array('c', 50) @app.route('/') async def handler(request): return json({"test": True}) stop_event = Event() async def after_start(*args, **kwargs): http_response = await local_request('get', '/') response.value = http_response.text.encode() stop_event.set() def rescue_crew(): sleep(5) stop_event.set() rescue_process = Process(target=rescue_crew) rescue_process.start() app.serve_multiple( { 'host': HOST, 'port': PORT, 'after_start': after_start, 'request_handler': app.handle_request, 'request_max_size': 100000, }, workers=2, stop_event=stop_event) rescue_process.terminate() try: results = json_loads(response.value) except: raise ValueError( "Expected JSON response but got '{}'".format(response)) assert results.get('test') == True
def _format_record(self, line, idx): try: record = json_loads(line) except Exception: logger.error("%s:%s error in parsing JSON" % (self.report_path, idx)) logger.error(traceback.format_exc()) row = [] for key, data_type in self.columns: try: value = record[key] if data_type == 'JSONB': value = json_dumps(value) except KeyError: logger.error("%s:%s could not find key %s" % (self.report_path, idx, key)) logger.debug(record) raise Exception("Could not find key in report") row.append(value) return row
def load_body(self): """ Load http request body and returns form data and files. """ environ = self.environ cl = environ['CONTENT_LENGTH'] icl = int(cl) if icl > self.max_content_lenght: raise ValueError('Maximum content length exceeded') fp = environ['wsgi.input'] ct = environ['CONTENT_TYPE'] # application/x-www-form-urlencoded if '/x' in ct: return parse_qs(fp.read(icl).decode(self.encoding)), None # application/json elif '/j' in ct: return json_loads(fp.read(icl).decode(self.encoding)), None # multipart/form-data elif ct.startswith('m'): return parse_multipart(fp, ct, cl, self.encoding) else: return None, None
def jsonrpc(self) -> Optional[JrpcRequest]: # ignore body and json if HTTP methos is not POST if self.method != 'POST': return None if self._parsed_jsonrpc is _empty: self._parsed_jsonrpc = None from jussi.errors import ParseError from jussi.errors import InvalidRequest from jussi.validators import validate_jsonrpc_request try: # raise ParseError for blank/empty body if self.body is _empty: raise ParseError(http_request=self) # raise ParseError if parsing fails try: self._parsed_json = json_loads(self.body) except Exception as e: raise ParseError(http_request=self, exception=e) # validate jsonrpc jsonrpc_request = self._parsed_json validate_jsonrpc_request(jsonrpc_request) if isinstance(jsonrpc_request, dict): self._parsed_jsonrpc = jsonrpc_from_request( self, 0, jsonrpc_request) self.is_single_jrpc = True elif isinstance(jsonrpc_request, list): self._parsed_jsonrpc = [ jsonrpc_from_request(self, batch_index, req) for batch_index, req in enumerate(jsonrpc_request) ] self.is_batch_jrpc = True except ParseError as e: raise e except Exception as e: raise InvalidRequest(http_request=self, exception=e) return self._parsed_jsonrpc
def json(self): if not self.headers['content-type'] in ['application/json']: raise panic_exceptions.ServerError( f'Content-Type[{self.headers["content-type"].value}] not supported' ) if not 'json' in self._parsed.keys(): try: self._parsed['json'] = json_loads( self.body.extract.decode(self._encoding)) except ValueError as err: raise panic_exceptions.BadRequest( f'Unable to decode request-body of Content-Type[{self.headers["content-type"].value}]' ) except Exception as err: print(err) import ipdb ipdb.set_trace() raise err return self._parsed['json']
async def beacons(request, ws): while True: data = await ws.recv() try: data = json_loads(data) assert 'latitude' in data assert 'longitude' in data except Exception: await ws.send(json_dumps({"error": "Illegal data"})) continue data['latitude'] = round(float(data['latitude']), 6) data['longitude'] = round(float(data['longitude']), 6) limit = data.get('limit', 10) geohash = geohash2.encode(data['latitude'], data['longitude'], precision=7) beacons = await Item.filter(geohash=geohash).limit(limit) beacons_data = [ dict(id=beacon.id, type=beacon.type, content=beacon.content) for beacon in beacons ] print(beacons_data) await ws.send(json_dumps(beacons_data))
def load_json_body(req): try: return json_loads(req.context['body']) except ValueError as e: raise HTTPBadRequest('invalid JSON', 'failed to decode json: %s' % str(e))
def _json_report_iterator(self, fobj): for line in fobj: yield json_loads(line.strip())