def test_datetime_mode_loads(): import pytz utc = datetime.now(pytz.utc) utcstr = utc.isoformat() jsond = rapidjson.dumps(utc, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) assert jsond == '"%s"' % utcstr assert rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == utc local = utc.astimezone(pytz.timezone('Europe/Rome')) locstr = local.isoformat() jsond = rapidjson.dumps(local, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) assert jsond == '"%s"' % locstr assert rapidjson.loads(jsond) == locstr assert rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == local load_as_utc = rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_UTC) assert load_as_utc == utc assert not load_as_utc.utcoffset() load_as_naive = rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) assert load_as_naive == local.replace(tzinfo=None)
def test_max_recursion_depth(): a = {'a': {'b': {'c': 1}}} assert rapidjson.dumps(a) == '{"a":{"b":{"c":1}}}' with pytest.raises(OverflowError): rapidjson.dumps(a, max_recursion_depth=2)
def test_datetime_values(value): with pytest.raises(TypeError): rapidjson.dumps(value) dumped = rapidjson.dumps(value, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) loaded = rapidjson.loads(dumped, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) assert loaded == value
def test_allow_nan(): f = [1.1, float("inf"), 2.2, float("nan"), 3.3, float("-inf"), 4.4] expected = '[1.1,Infinity,2.2,NaN,3.3,-Infinity,4.4]' assert rapidjson.dumps(f) == expected assert rapidjson.dumps(f, allow_nan=True) == expected with pytest.raises(ValueError): rapidjson.dumps(f, allow_nan=False) s = "NaN" assert math.isnan(rapidjson.loads(s)) assert math.isnan(rapidjson.loads(s, allow_nan=True)) with pytest.raises(ValueError): rapidjson.loads(s, allow_nan=False) s = "Infinity" assert rapidjson.loads(s) == float("inf") assert rapidjson.loads(s, allow_nan=True) == float("inf") with pytest.raises(ValueError): rapidjson.loads(s, allow_nan=False) s = "-Infinity" assert rapidjson.loads(s) == float("-inf") assert rapidjson.loads(s, allow_nan=True) == float("-inf") with pytest.raises(ValueError): rapidjson.loads(s, allow_nan=False)
def test_sort_keys(): o = {"a": 1, "z": 2, "b": 3} expected1 = '{"a":1,"b":3,"z":2}' expected2 = '{\n "a": 1,\n "b": 3,\n "z": 2\n}' assert rapidjson.dumps(o, sort_keys=True) == expected1 assert rapidjson.dumps(o, sort_keys=True, indent=4) == expected2
def test_circular_composite(): dct2 = {} dct2['a'] = [] dct2['a'].append(dct2) with pytest.raises(OverflowError): rapidjson.dumps(dct2)
def test_unicode(u): s = u.encode('utf-8') ju = rapidjson.dumps(u) js = rapidjson.dumps(s) assert ju == js assert ju == json.dumps(u) assert rapidjson.dumps(u, ensure_ascii=False) == json.dumps(u, ensure_ascii=False)
def test_serialize_sets(): def default_iterable(obj): return list(obj) rapidjson.dumps([set()], default=default_iterable) with pytest.raises(TypeError): rapidjson.dumps([set()])
def test_unicode_3(): u = 'asdf \U0010ffff \U0001ffff qwert \uffff \u10ff \u00ff \u0080 \u7fff \b\n\r' s = u.encode('utf-8') ju = rapidjson.dumps(u) js = rapidjson.dumps(s) assert ju == js assert ju.lower() == json.dumps(u).lower() assert rapidjson.dumps(u, ensure_ascii=False) == json.dumps(u, ensure_ascii=False)
def test_unicode_1(): u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' s = u.encode('utf-8') ju = rapidjson.dumps(u) js = rapidjson.dumps(s) assert ju == js assert ju.lower() == json.dumps(u).lower() assert rapidjson.dumps(u, ensure_ascii=False) == json.dumps(u, ensure_ascii=False)
def test_unicode_2(): u = '\U0010ffff' s = u.encode('utf-8') ju = rapidjson.dumps(u) js = rapidjson.dumps(s) assert ju == js assert ju.lower() == json.dumps(u).lower() assert rapidjson.dumps(u, ensure_ascii=False) == json.dumps(u, ensure_ascii=False)
def test_skipkeys(): o = {True: False, 1: 1, 1.1: 1.1, (1, 2): "foo", b"asdf": 1, None: None} with pytest.raises(TypeError): rapidjson.dumps(o) with pytest.raises(TypeError): rapidjson.dumps(o, skipkeys=False) assert rapidjson.dumps(o, skipkeys=True) == '{}'
def test_infinity(): inf = float("inf") dumped = rapidjson.dumps(inf) loaded = rapidjson.loads(dumped) assert loaded == inf d = Decimal(inf) dumped = rapidjson.dumps(inf, use_decimal=True) loaded = rapidjson.loads(dumped, use_decimal=True) assert loaded == inf
def test_nan(): nan = float("nan") dumped = rapidjson.dumps(nan) loaded = rapidjson.loads(dumped) assert math.isnan(nan) assert math.isnan(loaded) d = Decimal(nan) dumped = rapidjson.dumps(nan, use_decimal=True) loaded = rapidjson.loads(dumped, use_decimal=True) assert math.isnan(d) assert math.isnan(loaded)
def _load_schema(name): """Load a schema from disk""" path = os.path.join(os.path.dirname(__file__), name + '.yaml') with open(path) as handle: schema = yaml.safe_load(handle) fast_schema = rapidjson_schema.loads(rapidjson.dumps(schema)) return path, (schema, fast_schema)
def test_doubles(): doubles = [] for x in range(100000): d = sys.maxsize * random.random() dumped = rapidjson.dumps(d) loaded = rapidjson.loads(dumped) assert loaded == d
def wshandler(request): env, error = yield from get_env(request) if error: return error ws = web.WebSocketResponse() ws.start(request) request.app['sockets'].append((env.username, ws)) session = request.cookies.get('session') while True: msg = yield from ws.receive() if msg.tp == web.MsgType.text: log.debug(msg.data) data = json.loads(msg.data) payload = data.get('payload') if payload: payload = json.dumps(payload) resp = yield from aiohttp.request( 'POST' if payload else 'GET', env('host_web') + data['url'], headers={ 'X-Requested-With': 'XMLHttpRequest', 'Cookie': data['cookie'] }, data=payload, ) log.debug('%s %s', resp.status, msg.data) if resp.status == 200: p = (yield from resp.read()).decode() ws.send_str(json.dumps({'uid': data['uid'], 'payload': p})) new_session = resp.cookies.get('session') if new_session and session != new_session: session = new_session.value msg = {'session': new_session.output(header='').strip()} ws.send_str(json.dumps(msg)) log.debug('sent new session') elif msg.tp == web.MsgType.close: log.debug('ws closed') yield from ws.close() break elif msg.tp == web.MsgType.error: log.exception(ws.exception()) request.app['sockets'].remove((env.username, ws)) return ws
def test_unicode(): arabic='بينهم ان يكون مسلما رشيدا عاقلا ًوابنا شرعيا لابوين عمانيين' chinese='本站所提供的資料和服務都不收費,因此網站所需要的資金全來自廣告及捐款。若您願意捐款補助' for text in [arabic, chinese]: dumped = rapidjson.dumps(text) loaded = rapidjson.loads(dumped) assert text == loaded
def test_uuid_and_datetime_mode_together(): value = [date.today(), uuid.uuid1()] dumped = rapidjson.dumps(value, datetime_mode=rapidjson.DATETIME_MODE_ISO8601, uuid_mode=rapidjson.UUID_MODE_CANONICAL) loaded = rapidjson.loads(dumped, datetime_mode=rapidjson.DATETIME_MODE_ISO8601, uuid_mode=rapidjson.UUID_MODE_CANONICAL) assert loaded == value
def test_default(): def encode_complex(obj): if isinstance(obj, complex): return [obj.real, obj.imag] raise TypeError(repr(obj) + " is not JSON serializable") result = rapidjson.dumps(2 + 1j, default=encode_complex) assert result == '[2.0,1.0]'
def write_block(self, block, durability='soft'): """Write a block to bigchain. Args: block (dict): block to write to bigchain. """ block_serialized = rapidjson.dumps(block) r.table('bigchain').insert(r.json(block_serialized), durability=durability).run(self.conn)
def format(self, record): message = record.getMessage() traceback = None if record.exc_info: traceback = self.formatException(record.exc_info) extra = self.extra_from_record(record) json_record = self.json_record(message, extra, record, traceback) self.mutate_json_record(json_record) return rapidjson.dumps(json_record)
def test_indent(): o = {"a": 1, "z": 2, "b": 3} expected1 = '{\n "a": 1,\n "z": 2,\n "b": 3\n}' expected2 = '{\n "a": 1,\n "b": 3,\n "z": 2\n}' expected3 = '{\n "b": 3,\n "a": 1,\n "z": 2\n}' expected4 = '{\n "b": 3,\n "z": 2,\n "a": 1\n}' expected5 = '{\n "z": 2,\n "a": 1,\n "b": 3\n}' expected6 = '{\n "z": 2,\n "b": 3,\n "a": 1\n}' expected = ( expected1, expected2, expected3, expected4, expected5, expected6) assert rapidjson.dumps(o, indent=4) in expected with pytest.raises(TypeError): rapidjson.dumps(o, indent="\t")
def test_uuid_mode(): assert rapidjson.UUID_MODE_NONE == 0 assert rapidjson.UUID_MODE_CANONICAL == 1 assert rapidjson.UUID_MODE_HEX == 2 value = uuid.uuid1() with pytest.raises(TypeError): rapidjson.dumps(value) with pytest.raises(ValueError): rapidjson.dumps(value, uuid_mode=42) with pytest.raises(ValueError): rapidjson.loads('""', uuid_mode=42) dumped = rapidjson.dumps(value, uuid_mode=rapidjson.UUID_MODE_CANONICAL) loaded = rapidjson.loads(dumped, uuid_mode=rapidjson.UUID_MODE_CANONICAL) assert loaded == value # When loading, hex mode implies canonical format loaded = rapidjson.loads(dumped, uuid_mode=rapidjson.UUID_MODE_HEX) assert loaded == value dumped = rapidjson.dumps(value, uuid_mode=rapidjson.UUID_MODE_HEX) loaded = rapidjson.loads(dumped, uuid_mode=rapidjson.UUID_MODE_HEX) assert loaded == value
def test_default(): class Bar: pass class Foo: def __init__(self): self.foo = "bar" def default(obj): if isinstance(obj, Foo): return {"foo": obj.foo} raise TypeError("default error") o = {"asdf": Foo()} assert rapidjson.dumps(o, default=default) == '{"asdf":{"foo":"bar"}}' o = {"asdf": Foo(), "qwer": Bar()} with pytest.raises(TypeError): rapidjson.dumps(o, default=default) with pytest.raises(TypeError): rapidjson.dumps(o)
def notify(env, ids, last_sync=False): if (not ids and not last_sync): return url = 'http://localhost:9000/notify/' d = json.dumps({ 'notify': True, 'ids': list(set(ids)), 'last_sync': last_sync }) try: requests.post(url, data=d, timeout=5, auth=(env.username, env.token)) except IOError as e: log.error(e)
def test_object_hook(): class Foo: def __init__(self, foo): self.foo = foo def hook(d): if 'foo' in d: return Foo(d['foo']) return d def default(obj): return {'foo': obj.foo} res = rapidjson.loads('{"foo": 1}', object_hook=hook) assert isinstance(res, Foo) assert res.foo == 1 assert rapidjson.dumps(rapidjson.loads('{"foo": 1}', object_hook=hook), default=default) == '{"foo":1}' res = rapidjson.loads(rapidjson.dumps(Foo(foo="bar"), default=default), object_hook=hook) assert isinstance(res, Foo) assert res.foo == "bar"
def clean(key, value): if not value: if key in ('to', 'fr', 'cc', 'bcc', 'reply_to', 'sender', 'refs'): return [] return value elif key in ('to', 'fr', 'cc', 'bcc', 'reply_to', 'sender'): return [format_addr(v) for v in value] elif key in ('msgid', 'in_reply_to'): return value.strip() elif key in ('attachments',): return json.dumps(value) elif key in ('refs',): refs = ['<%s>' % v for v in re.split('[<>\s]+', value) if v] return refs else: return value
def test_larger_structure(): value = { 'words': """ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Mauris adipiscing adipiscing placerat. Vestibulum augue augue, pellentesque quis sollicitudin id, adipiscing. """, 'list': list(range(200)), 'dict': dict((str(i),'a') for i in list(range(200))), 'int': 100100100, 'float': 100999.123456 } dumped = rapidjson.dumps(value) loaded = rapidjson.loads(dumped) assert loaded == value
def serialize(data): """Serialize a dict into a JSON formatted string. This function enforces rules like the separator and order of keys. This ensures that all dicts are serialized in the same way. This is specially important for hashing data. We need to make sure that everyone serializes their data in the same way so that we do not have hash mismatches for the same structure due to serialization differences. Args: data (dict): dict to serialize Returns: str: JSON formatted string """ return rapidjson.dumps(data, skipkeys=False, ensure_ascii=False, sort_keys=True)
def get_payload(self, request, flat_args=False): data = prepare_payload_data(request) serializer = WebhookRunSerializer(data=data) serializer.is_valid(raise_exception=True) payload = serializer.data trace_args = payload.copy() # Combine arguments and make them flat args = {} for value in list(payload.values()): if isinstance(value, dict): args.update(value) if flat_args: payload = args else: payload.update(args) try: payload_data = json.dumps(payload) except (OverflowError, UnicodeDecodeError): raise UnsupportedPayload() return payload_data, trace_args
def _init_json(self): # rapidjson is well maintained library with acceptable performance, good choice if self.options['json_lib'] == 'rapidjson': import rapidjson self._json_encode = lambda x: rapidjson.dumps(x, number_mode=rapidjson.NM_NATIVE) self._json_decode = lambda x: rapidjson.loads(x, number_mode=rapidjson.NM_NATIVE) # ujson provides best performance in our tests, but it is abandoned by maintainers elif self.options['json_lib'] == 'ujson': import ujson self._json_encode = ujson.dumps self._json_decode = ujson.loads # json from Python stdlib, very safe choice, but slow elif self.options['json_lib'] == 'json': import json self._json_encode = json.dumps self._json_decode = json.loads else: raise ValueError(f"Unsupported json library [{self.options['json_lib']}]")
def get_writer(self, options=None, table_name=None, rapidjson_serialize=False) -> BatchWriter: from snuba import settings from snuba.clickhouse.http import HTTPBatchWriter def default(value): if isinstance(value, datetime): return value.strftime(DATETIME_FORMAT) else: raise TypeError return HTTPBatchWriter( self.__table_schema, settings.CLICKHOUSE_HOST, settings.CLICKHOUSE_HTTP_PORT, lambda row: (rapidjson.dumps(row, default=default) if rapidjson_serialize else json.dumps( row, default=default)).encode("utf-8"), options, table_name, chunk_size=settings.CLICKHOUSE_HTTP_CHUNK_SIZE, )
def print_epoch_details(results, total_epochs: int, print_json: bool, no_header: bool = False, header_str: str = None) -> None: """ Display details of the hyperopt result """ params = results.get('params_details', {}) # Default header string if header_str is None: header_str = "Best result" if not no_header: explanation_str = Hyperopt._format_explanation_string( results, total_epochs) print(f"\n{header_str}:\n\n{explanation_str}\n") if print_json: result_dict: Dict = {} for s in ['buy', 'sell', 'roi', 'stoploss', 'trailing']: Hyperopt._params_update_for_json(result_dict, params, s) print( rapidjson.dumps(result_dict, default=str, number_mode=rapidjson.NM_NATIVE)) else: Hyperopt._params_pretty_print(params, 'buy', "Buy hyperspace params:") Hyperopt._params_pretty_print(params, 'sell', "Sell hyperspace params:") Hyperopt._params_pretty_print(params, 'roi', "ROI table:") Hyperopt._params_pretty_print(params, 'stoploss', "Stoploss:") Hyperopt._params_pretty_print(params, 'trailing', "Trailing stop:")
async def get_fcm_tokens(account: str, r: web.Request, v2: bool = False) -> list: """Return list of FCM tokens that belong to this account""" redisInst = r.app['rdata'] tokens = await redisInst.get(account) if tokens is None: return [] tokens = json.loads(tokens.replace('\'', '"')) # Rebuild the list for this account removing tokens that dont belong anymore new_token_list = {} new_token_list['data'] = [] if 'data' not in tokens: return [] for t in tokens['data']: account_list = await get_or_upgrade_token_account_list(account, t, r, v2=v2) if account not in account_list: continue new_token_list['data'].append(t) await redisInst.set(account, json.dumps(new_token_list)) return new_token_list['data']
def test_allow_nan(): f = [1.1, float("inf"), 2.2, float("nan"), 3.3, float("-inf"), 4.4] expected = '[1.1,Infinity,2.2,NaN,3.3,-Infinity,4.4]' assert rj.dumps(f) == expected assert rj.dumps(f, number_mode=rj.NM_NAN) == expected assert rj.dumps(f, allow_nan=True) == expected with pytest.raises(ValueError): rj.dumps(f, number_mode=None) with pytest.raises(ValueError): rj.dumps(f, allow_nan=False) s = "NaN" assert math.isnan(rj.loads(s)) assert math.isnan(rj.loads(s, number_mode=rj.NM_NAN)) assert math.isnan(rj.loads(s, allow_nan=True)) with pytest.raises(ValueError): rj.loads(s, number_mode=rj.NM_NONE) with pytest.raises(ValueError): rj.loads(s, allow_nan=False) s = "Infinity" assert rj.loads(s) == float("inf") assert rj.loads(s, number_mode=rj.NM_NAN) == float("inf") assert rj.loads(s, allow_nan=True) == float("inf") with pytest.raises(ValueError): rj.loads(s, number_mode=rj.NM_NONE) with pytest.raises(ValueError): rj.loads(s, allow_nan=False) s = "-Infinity" assert rj.loads(s) == float("-inf") assert rj.loads(s, number_mode=rj.NM_NAN) == float("-inf") assert rj.loads(s, allow_nan=True) == float("-inf") with pytest.raises(ValueError): rj.loads(s, number_mode=rj.NM_NONE) with pytest.raises(ValueError): rj.loads(s, allow_nan=False)
def _fast_tojson(obj): return Markup(rapidjson.dumps(obj))
def test_parse(): # test in/out equivalence and parsing res = rj.loads(JSON) out = rj.dumps(res) assert res == rj.loads(out)
def dumps(obj: object, *args, **kwargs) -> bytes: kwargs = add_settings_to_kwargs(kwargs) if "default" not in kwargs: kwargs["default"] = xid_encoder return rapidjson.dumps(obj, *args, **kwargs)
def test_decode_vertex_properties(self, db_vertex_vertex_properties_response): results = rapidjson.loads( rapidjson.dumps(db_vertex_vertex_properties_response), object_hook=TridentDecoder.object_hook) assert results
def process_grpc(self, instance, incentive, spec): logger = self.get_logger() from apps.codeboxes.proto import broker_pb2, broker_pb2_grpc if self.runner is None: self.channel = grpc.insecure_channel(settings.CODEBOX_BROKER_GRPC, settings.CODEBOX_GRPC_OPTIONS) self.runner = broker_pb2_grpc.ScriptRunnerStub(self.channel) socket = incentive.socket entrypoint = socket.get_local_path(incentive.codebox.path) # Add environment environment_hash = '' environment_url = '' if socket.environment_id: environment = Cached(SocketEnvironment, kwargs={ 'pk': socket.environment_id }).get() if not environment.is_ready: self.block_run('Environment is not yet ready.', incentive, instance, spec, status=Trace.STATUS_CHOICES.FAILURE) return environment_hash = environment.get_hash() environment_url = environment.get_url() req = broker_pb2.RunRequest( meta={ 'files': socket.get_files(), 'environmentURL': environment_url, 'trace': json.dumps(spec['trace']).encode(), 'traceID': spec['trace']['id'], }, lbMeta={ 'concurrencyKey': str(instance.pk), 'concurrencyLimit': spec['run']['concurrency_limit'], }, request=[{ 'meta': { 'runtime': spec['run']['runtime_name'], 'sourceHash': socket.get_hash(), 'userID': str(instance.pk), 'environment': environment_hash, 'options': { 'entryPoint': entrypoint, 'outputLimit': settings.CODEBOX_RESULT_SIZE_LIMIT, 'timeout': int(spec['run']['timeout'] * 1000), 'args': spec['run']['additional_args'].encode(), 'config': spec['run']['config'].encode(), 'meta': spec['run']['meta'].encode(), }, }, }]) # Retry grpc Run if needed. metadata = create_headers_from_zipkin_attrs(get_tracing_attrs()) if metadata is not None: metadata = metadata.items() for i in range(self.grpc_run_retries + 1): try: self.runner.Run(req, timeout=GRPC_RUN_TIMEOUT, metadata=metadata) return except Exception: if i + 1 > self.grpc_run_retries: raise logger.warning("gRPC run failed, retrying (try #%d out of %d)", i + 1, self.grpc_run_retries, exc_info=1) time.sleep(1)
def _dumps(self, value): return rapidjson.dumps(value).encode('utf_8')
async def process_defer(self, r : web.Request, uid : str, block : dict, do_work : bool, subtype: str = None) -> dict: # Let's cache the link because, due to callback delay it's possible a client can receive # a push notification for a block it already knows about is_change = True if subtype == 'change' else False if not is_change and 'link' in block: if block['link'].replace('0', '') == '': is_change = True else: await r.app['rdata'].set(f"link_{block['link']}", "1", expire=3600) # check for receive race condition # if block['type'] == 'state' and block['previous'] and block['balance'] and block['link']: if block['type'] == 'state' and {'previous', 'balance', 'link'} <= set(block): try: prev_response = await self.json_post({ 'action': 'blocks_info', 'hashes': [block['previous']], 'balance': 'true' }) try: prev_block = json.loads(prev_response['blocks'][block['previous']]['contents']) if prev_block['type'] != 'state' and ('balance' in prev_block): prev_balance = int(prev_block['balance'], 16) elif prev_block['type'] != 'state' and ('balance' not in prev_block): prev_balance = int(prev_response['blocks'][block['previous']]['balance']) else: prev_balance = int(prev_block['balance']) if int(block['balance']) < prev_balance: link_hash = block['link'] link_hash = self.util.address_decode(link_hash) # this is a send link_response = await self.json_post({ 'action': 'block', 'hash': link_hash }) # print('link_response',link_response) if 'error' not in link_response and 'contents' in link_response: log.server_logger.error( 'rpc process receive race condition detected;%s;%s;%s', self.util.get_request_ip(r), uid, str(r.headers.get('User-Agent'))) return { 'error':'receive race condition detected' } except Exception: # no contents, means an error was returned for previous block. no action needed log.server_logger.exception('in process_defer') pass except Exception: log.server_logger.error('rpc process receive race condition exception;%s;%s;%s;User-Agent:%s', str(sys.exc_info()), self.util.get_request_ip(r), uid, str(r.headers.get('User-Agent'))) pass # Do work if we're told to if 'work' not in block and do_work: try: if block['previous'] == '0' or block['previous'] == '0000000000000000000000000000000000000000000000000000000000000000': workbase = self.util.pubkey(block['account']) else: workbase = block['previous'] difficulty = 'fffffe0000000000' if self.banano_mode else 'ffffffc000000000' if subtype == 'receive' else 'fffffff800000000' work_response = await self.work_request({ 'action': 'work_generate', 'hash': workbase, 'difficulty': difficulty, 'reward': False }) if work_response is None or 'work' not in work_response: return { 'error':'failed work_generate in process request' } block['work'] = work_response['work'] except Exception: log.server_logger.exception('in work process_defer') return { 'error':"Failed work_generate in process request" } process_request = { 'action':'process', 'block': json.dumps(block) } if subtype is not None: process_request['subtype'] = subtype elif is_change: process_request['subtype'] = 'change' return await self.json_post(process_request)
def ajax_firewall_detail_events(self): filtered = list(filter( lambda x: "Microsoft-Windows-Windows Firewall With Advanced Security" == x.get("System", {}).get("Provider", {}).get("@Name", None) and (x.get("System", {}).get("EventID", {}).get("#text", None) in ("2003", "2004", "2005", "2006")) , self.events.values() )) data = [] # 전역변수 초기화 # data.clear() for event in filtered: event["_PluginResult"] = {} EventID = event.get("System", {}).get("EventID", {}).get("#text", None) # 방화벽 켜기/끄기 if EventID == "2003" : Profiles = event.get("EventData", {}).get("Data", [])[0].get("#text", None) SettingValue = event.get("EventData", {}).get("Data", [])[3].get("#text", None) ModifyingApplication = event.get("EventData", {}).get("Data", [])[7].get("#text", None) # 켜기 if SettingValue == "01000000" : event["_PluginResult"]["Status"] = "켜기" else : event["_PluginResult"]["Status"] = "끄기" event["_PluginResult"]["RuleName"] = RuleName event["_PluginResult"]["Action"] = dic_Action.get(Profiles) event["_PluginResult"]["ModifyingApplication"] = ModifyingApplication data.append(event) # 방화벽 예외 추가 / 수정 # <EventData> # <Data Name="RuleId">{175AFF65-D4CD-47C3-93EA-747D0920451C}</Data> # <Data Name="RuleName">Wizvera-Veraport-G3-out</Data> # <Data Name="Origin">1</Data> # <Data Name="ApplicationPath">C:\Program Files (x86)\Wizvera\Veraport20\veraport.exe</Data> # <Data Name="ServiceName" /> # <Data Name="Direction">2</Data> # <Data Name="Protocol">256</Data> # <Data Name="LocalPorts" /> # <Data Name="RemotePorts" /> # <Data Name="Action">3</Data> # <Data Name="Profiles">2147483647</Data> # <Data Name="LocalAddresses">*</Data> # <Data Name="RemoteAddresses">*</Data> # <Data Name="RemoteMachineAuthorizationList" /> # <Data Name="RemoteUserAuthorizationList" /> # <Data Name="EmbeddedContext" /> # <Data Name="Flags">1</Data> # <Data Name="Active">1</Data> # <Data Name="EdgeTraversal">0</Data> # <Data Name="LooseSourceMapped">0</Data> # <Data Name="SecurityOptions">0</Data> # <Data Name="ModifyingUser">S-1-5-21-1389356236-1730200243-462038765-1000</Data> # <Data Name="ModifyingApplication">C:\Windows\SysWOW64\netsh.exe</Data> # <Data Name="SchemaVersion">522</Data> # <Data Name="RuleStatus">65536</Data> # </EventData> elif EventID == "2004" or EventID == "2005" : RuleID = event.get("EventData", {}).get("Data", [])[0].get("#text", None) RuleName = event.get("EventData", {}).get("Data", [])[1].get("#text", None) ApplicationPath = event.get("EventData", {}).get("Data", [])[3].get("#text", "*") Direction = event.get("EventData", {}).get("Data", [])[5].get("#text", None) Protocol = event.get("EventData", {}).get("Data", [])[6].get("#text", None) LocalPorts = event.get("EventData", {}).get("Data", [])[7].get("#text", "*") RemotePorts = event.get("EventData", {}).get("Data", [])[8].get("#text", "*") Action = event.get("EventData", {}).get("Data", [])[9].get("#text", None) LocalAddresses = event.get("EventData", {}).get("Data", [])[11].get("#text", "*") RemoteAddresses = event.get("EventData", {}).get("Data", [])[12].get("#text", "*") ModifyingApplication = event.get("EventData", {}).get("Data", [])[22].get("#text", None) if EventID == "2004" : event["_PluginResult"]["Status"] = "추가" else : event["_PluginResult"]["Status"] = "수정" event["_PluginResult"]["RuleID"] = RuleID event["_PluginResult"]["RuleName"] = RuleName event["_PluginResult"]["ApplicationPath"] = LocalAddresses.replace("*", "모두") event["_PluginResult"]["Action"] = dic_Action.get(Action) event["_PluginResult"]["Direction"] = dic_Direction.get(Direction) event["_PluginResult"]["Protocol"] = dic_Protocol.get(Protocol) event["_PluginResult"]["LocalAddresses"] = LocalAddresses.replace("*", "모두") event["_PluginResult"]["RemoteAddresses"] = RemoteAddresses.replace("*", "모두") event["_PluginResult"]["LocalPorts"] = LocalPorts.replace("*", "모두") event["_PluginResult"]["RemotePorts"] = RemotePorts.replace("*", "모두") event["_PluginResult"]["ModifyingApplication"] = ModifyingApplication data.append(event) # 방화벽 예외 삭제 elif EventID == "2006": RuleID = event.get("EventData", {}).get("Data", [])[0].get("#text", None) RuleName = event.get("EventData", {}).get("Data", [])[1].get("#text", None) ModifyingApplication = event.get("EventData", {}).get("Data", [])[3].get("#text", None) event["_PluginResult"]["Status"] = "삭제" event["_PluginResult"]["RuleID"] = RuleID event["_PluginResult"]["RuleName"] = RuleName event["_PluginResult"]["ModifyingApplication"] = ModifyingApplication data.append(event) return rapidjson.dumps(data)
def ajax_firewall_status_events(self): filtered = list(filter( lambda x: "Microsoft-Windows-Windows Firewall With Advanced Security" == x.get("System", {}).get("Provider",{}).get("@Name", None) and ( x.get("System", {}).get("EventID", {}).get("#text", None) in ("2003", "2010")) , self.events.values() )) # 방화벽 현재 상태 (온/오프) # 현재 프로필 network_profile_dict = {} network_profile_dict["2"] = "Disconnected" network_profile_dict["4"] = "Disconnected" firewall_result = [] firewall_private_status = "" firewall_public_status = "" firewall_domain_status = "" # 정렬해야함 filtered = sort_by_timecreated(filtered, reverse=False) for event in filtered: EventID = event.get("System", {}).get("EventID", {}).get("#text", None) if EventID == "2003": SettingValue = event.get("EventData", {}).get("Data", [])[3].get("#text", None) Profile = event.get("EventData", {}).get("Data", [])[0].get("#text", None) if Profile == "1": firewall_domain_status = SettingValue elif Profile == "2": firewall_private_status = SettingValue elif Profile == "4": firewall_public_status = SettingValue if EventID == "2010": OldProfile = event.get("EventData", {}).get("Data", [])[2].get("#text", None) NewProfile = event.get("EventData", {}).get("Data", [])[3].get("#text", None) InterfaceName = event.get("EventData", {}).get("Data", [])[1].get("#text", None) if OldProfile == "2": network_profile_dict["2"] = InterfaceName elif OldProfile == "4": network_profile_dict["4"] = InterfaceName # print (network_profile_dict) # 현재 프로파일 # firewall = {} # firewall["current_profile"] = current_profile # firewall["firewall_domain_status"] = firewall_domain_status # firewall["firewall_private_status"] = firewall_private_status # firewall["firewall_public_status"] = firewall_public_status # # firewall_result.append(firewall) # 딕셔너리를 리스트 형태로 변환하고 반환 profile_list = [] for key, value in network_profile_dict.items(): profile_list.append(value) firewall_result.append(profile_list) firewall_result.append(firewall_domain_status) firewall_result.append(firewall_private_status) firewall_result.append(firewall_public_status) return rapidjson.dumps(firewall_result)
def test_telemetry_ingestion_append_version(client, ping, default_values): spec = TelemetryURISpec(**default_values) rv = client.post(build_route(spec) + "?v=4", data=json.dumps(ping), content_type='application/json') assert rv.status_code == 200
def ajax_firewall_list_events(self): # 예외 포트 현재 상태 filtered = list(filter( lambda x: "Microsoft-Windows-Windows Firewall With Advanced Security" == x.get("System", {}).get("Provider", {}).get("@Name", None) and (x.get("System", {}).get("EventID", {}).get("#text", None) in ("2004", "2005", "2006")) , self.events.values() )) firewall_dict = {} filtered = sort_by_timecreated(filtered, reverse=False) for event in filtered: EventID = event.get("System", {}).get("EventID", {}).get("#text", None) # 룰 생성, 변경 if EventID == "2004" or EventID == "2005": RuleID = event.get("EventData", {}).get("Data", [])[0].get("#text", None) RuleName = event.get("EventData", {}).get("Data", [])[1].get("#text", None) ApplicationPath = event.get("EventData", {}).get("Data", [])[3].get("#text", "*") Direction = event.get("EventData", {}).get("Data", [])[5].get("#text", None) Protocol = event.get("EventData", {}).get("Data", [])[6].get("#text", None) LocalPorts = event.get("EventData", {}).get("Data", [])[7].get("#text", "*") RemotePorts = event.get("EventData", {}).get("Data", [])[8].get("#text", "*") Action = event.get("EventData", {}).get("Data", [])[9].get("#text", None) LocalAddresses = event.get("EventData", {}).get("Data", [])[11].get("#text", "*") RemoteAddresses = event.get("EventData", {}).get("Data", [])[12].get("#text", "*") Active = event.get("EventData", {}).get("Data", [])[17].get("#text", None) ModifyingApplication = event.get("EventData", {}).get("Data", [])[22].get("#text", None) # 이름, 프로필, 작업, 방향, 프로그램, 로컬주소, 로컬포트, 원격주소, 원격포트 rule = {} if dic_Action.get(Action) == "허용" : if Active == "1" : RuleName = "<img src='./static/images/Firewall_allow_enable.png' /> " + RuleName else : RuleName = "<img src='./static/images/Firewall_allow_disable.png' /> " + RuleName elif dic_Action.get(Action) == "거부": if Active == "1" : RuleName = "<img src='./static/images/Firewall_deny_enable.png' /> " + RuleName else : RuleName = "<img src='./static/images/Firewall_deny_disable.png' /> " + RuleName rule["RuleName"] = RuleName rule["ApplicationPath"] = ApplicationPath.replace("*", "모두") rule["Action"] = dic_Action.get(Action) rule["Direction"] = dic_Direction.get(Direction) rule["Protocol"] = dic_Protocol.get(Protocol) rule["LocalAddresses"] = LocalAddresses.replace("*", "모두") rule["RemoteAddresses"] = RemoteAddresses.replace("*", "모두") rule["LocalPorts"] = LocalPorts.replace("*", "모두") rule["RemotePorts"] = RemotePorts.replace("*", "모두") #rule.append(rule_data) firewall_dict[RuleID] = rule # 룰 삭제 if EventID == "2006": RuleID = event.get("EventData", {}).get("Data", [])[0].get("#text", None) try : del firewall_dict[RuleID] except : # 기존 룰추가 로그가 지워져 사전에 없는경우 예외 발생 pass # 딕셔너리를 리스트 형태로 변환하고 반환 firewall_list = [] for key, value in firewall_dict.items(): firewall_list.append(value) return rapidjson.dumps(firewall_list)
def dumps(obj): return rapidjson.dumps(obj)
def test_intenums_as_ints(): class IE(enum.IntEnum): val = 123 bigval = 123123123123123123123123 assert rj.dumps([IE.val, IE.bigval]) == "[123,123123123123123123123123]"
async def json_post(self, request_json : dict, timeout : int = 30, is_work : bool = False) -> dict: try: async with ClientSession() as session: async with session.post(self.work_url if is_work and self.work_url is not None else self.node_url, json=request_json, timeout=timeout) as resp: if resp.status > 299: log.server_logger.error('Received status code %d from request %s', resp.status, json.dumps(request_json)) raise Exception return await resp.json(content_type=None) except Exception: log.server_logger.exception("exception in json_post") return None
def on_message(client, userdata, msg): """ On messages from MQTT, handle the request. """ try: r = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB) topic = msg.topic topic = topic.split('/') if topic[0] == 'work': # We store the work request in a hashmap with the work hash as the key for reference when receiving the result. work_type = topic[1] message = msg.payload.decode().split(',') work_hash = message[0] work_difficulty = message[1] mapping = { 'work_type': work_type, 'work_difficulty': work_difficulty, 'timestamp': str(datetime.utcnow()) } r.hmset(work_hash, mapping) elif topic[0] == 'result': # When we receive the result, retrieve the hashmap associated with the work hash. message = msg.payload.decode().split(',') work_hash = message[0] work_value = message[1] work_client = message[2] # Set client activity r.hset( "clientactivity", work_client, json.dumps({"last_active": datetime.utcnow()}, datetime_mode=dt_mode)) hmreturn = r.hmget(work_hash, ['work_type', 'timestamp', 'work_difficulty']) # Multiple results are sent for 1 work hash, ignore if the result has already been logged. if hmreturn == [None, None, None]: return request_time = datetime.strptime(hmreturn[1].decode(), '%Y-%m-%d %H:%M:%S.%f') # Set PoW keys, 1 with expiry 2 days, one with expiry 1 days r.hset( "pow_count", work_hash, json.dumps({"dt": datetime.utcnow()}, datetime_mode=dt_mode)) # Get time this request took time_diff_micro = (datetime.utcnow() - request_time).microseconds time_difference = round(time_diff_micro * (10**-6), 4) """ avg = r.get("avgresponse") new = {} if avg is not None: avg = json.loads(avg) # Reset average after an hour if (datetime.utcnow() - avg['created']).total_seconds() > 3600: r.delete("avgresponse") new['created'] = datetime.utcnow() new['count'] = 1 new['time_total'] = time_difference else: new['count'] = avg['count'] + 1 new['time_total'] += avg['time_total'] + 1 else: new = { 'created': datetime.utcnow(), 'count': 1, 'time_total': time_difference } r.set("avgresponse", json.dumps(new, datetime_mode=dt_mode)) """ # Set live chart data r.lpush("live_chart_prefill", str(time_difference)) r.ltrim("live_chart_prefill", 0, 25) # Once logged successfully, delete the work hash from redis. r.delete(work_hash) elif topic[0] == 'statistics': stats = json.loads(msg.payload.decode()) logger.info("Stats call received: {}".format(stats)) # It just seems easier/faster to store the total paid aggregate in redis if 'total_paid_banano' in stats: r.set("bpowdash:totalpaidban", str(stats['total_paid_banano'])) r.set("services", json.dumps(stats['services'])) elif topic[0] == 'client': try: # Messages on client update their totals without us having to track. Keeps in sync # with the server. result = json.loads(msg.payload.decode()) address = topic[1] if 'precache' in result: precache = int(result['precache']) else: precache = 0 if 'ondemand' in result: ondemand = int(result['ondemand']) else: ondemand = 0 r.hset( "clientstats", address, json.dumps({ "total": ondemand + precache, "precache": precache, "ondemand": ondemand })) except Exception as e: logger.exception("error logging client info: {}".format(e)) logger.info(msg.payload) else: try: logger.info("UNEXPECTED MESSAGE") logger.info("TOPIC: {}".format(topic[0].upper())) logger.info("message: {}".format(msg.payload)) except Exception as e: logger.info("exception: {}".format(e)) except Exception as e: logger.exception("Error: {}".format(e))
def encode(self, value: Result) -> bytes: return cast(str, rapidjson.dumps(value)).encode("utf-8")
async def emit(self, event, payload): logger = logging.getLogger('socketio') json_payload = rapidjson.dumps([event, payload]) msg = ENGINEIO_MESSAGE + SOCKETIO_EVENT + json_payload logger.debug("Send '%s'", msg) await self.ws.send(msg)
async def get_event(message): print(message) event = str(rapidjson.dumps(message, indent=2)) await message.reply(event)
def json_dumps(obj: PY_JSON_TYPES) -> str: return json.dumps(obj, default=to_serializable)
# ################################################################################################################################ logger = getLogger('zato') # ################################################################################################################################ xml_error_template = '<?xml version="1.0" encoding="utf-8"?><error>{}</error>' copy_forbidden = b'You are not authorized to access this resource' copy_not_found = b'Not found' error_response = { NOT_FOUND: { DATA_FORMAT.JSON: dumps({'error': copy_not_found}), DATA_FORMAT.XML: xml_error_template.format(copy_not_found) } } # ################################################################################################################################ class MSG_TYPE: _COMMON = 'zwsx.{}' REQ_TO_CLIENT = _COMMON.format('rqc') RESP_AUTH = _COMMON.format('rspa') RESP_OK = _COMMON.format('rspok') # A message from server indicating an error, no response from client is expected
def to_json(self, sort_keys=False, indent=None): """Dump recursively any class of type MISPAbstract to a json string""" return dumps(self, default=pymisp_json_default, sort_keys=sort_keys, indent=indent)
def to_json(self, response, **kw): kw.setdefault('content_type', 'application/json') r = json.dumps(response, ensure_ascii=False, default=str) return self.Response(r, **kw)
def write(self, key, value): return rapidjson.dumps(value).encode('utf_8')
def encode(self, value: WriterTableRow) -> JSONRow: return rapidjson.dumps(value, default=self.__default).encode("utf-8")