def test_ujson(everything: Everything): converter = ujson_make_converter() raw = ujson_dumps(converter.unstructure(everything)) assert (converter.structure( ujson_loads(raw), Everything, ) == everything)
async def _decorated(request): _jinja_env = request.app.config[jinja_template_env_name] template_context = await to_decorate(request) template_renderer = _jinja_env.get_template(template) if update_template_with_its_context: template_renderer.globals.update( {'get_template_context': get_jinja_template_context}) template_renderer.globals.update(update_template_globals_with) if _jinja_env.enable_async: rendered_template = await template_renderer.render_async( template_context) else: rendered_template = template_renderer.render(template_context) if render_as == "json" \ and not isinstance(_jinja_env, jinja_NativeEnvironment): rendered_template = ujson_loads(rendered_template) return _sanic_responses[render_as](rendered_template)
def test_loads_citm_catalog_ujson(benchmark): benchmark.group = "citm_catalog.json deserialization" benchmark.extra_info["lib"] = "ujson" data = read_fixture_str("citm_catalog.json.xz") benchmark.extra_info["correct"] = json_loads(ujson_dumps( ujson_loads(data))) == json_loads(data) benchmark(ujson_loads, data)
def test_ujson(everything: Everything): from ujson import dumps as ujson_dumps from ujson import loads as ujson_loads converter = ujson_make_converter() raw = ujson_dumps(converter.unstructure(everything)) assert converter.structure(ujson_loads(raw), Everything) == everything
def test_loads_citm_catalog_ujson(benchmark): benchmark.group = 'citm_catalog.json deserialization' benchmark.extra_info['lib'] = 'ujson' data = read_fixture_str("citm_catalog.json.xz") benchmark.extra_info['correct'] = json_loads(ujson_dumps( ujson_loads(data))) == json_loads(data) benchmark(ujson_loads, data)
def env( key: str, type_: Type[Union[str, bool, int, dict, float]] = str, default: Optional[Any] = None, ) -> Union[str, int, bool, float, Dict]: """Returns the value of the supplied env key name converting the env key's value to the specified type. If the env key does not exist the default value is returned. Boolean values for env keys are expected to be: - true: 1, true, yes, y, ok, on - false: 0, false, no, n, nok, off :param key: The name of the environment variable :param type_: What type should the the env key's value be converted to, defaults to str :param default: The default value of the env key, defaults to None :return: The value of the env key or the supplied default """ if key not in environ: return default val = environ[key] if type_ == str: return val elif type_ == bool: if val.lower() in ['1', 'true', 'yes', 'y', 'ok', 'on']: return True if val.lower() in ['0', 'false', 'no', 'n', 'nok', 'off']: return False raise ValueError( f'Invalid environment variable "{key}" (expected a boolean): "{val}"' ) elif type_ == int: try: return int(val) except ValueError: raise ValueError( f'Invalid environment variable "{key}" (expected a integer): "{val}"' ) elif type_ == float: try: return float(val) except ValueError: raise ValueError( f'Invalid environment variable "{key}" (expected a float): "{val}"' ) elif type_ == dict: return ujson_loads(val)
def get(self, tablename, key, hint=None): """ Get an object from a given table. :param tablename: a table name :param key: an object key :param hint: (facultative) a secondary index hint :return: a python dictionary """ redis_key = "%s:id:%s" % (tablename, key) if hint is not None: hint_key = "sec_index:%s:%s:%s" % (tablename, hint[0], hint[1]) redis_keys = self.redis_client.smembers(hint_key) redis_key = redis_keys[0] fetched = self.redis_client.hget(tablename, redis_key) # Parse result from JSON to python dict. result = ujson_loads(fetched) if fetched is not None else None return result
def _resolve_keys(self, tablename, keys): """ Returns the objects that match a list of keys in a specified table. :param tablename: a table name :param keys: a list of keys :return: a list of python objects """ fetched = list(self.etcd_client.get(tablename + "/").children) if len(fetched) == 0: return [] str_result = map(lambda x: x.value, fetched) # When looking-up for a deleted object, driver return None, which should be filtered. str_result = filter(lambda x: x is not None, str_result) # Transform the list of JSON string into a single string (boost performances). str_result = "[%s]" % (",".join(str_result)) # Parse result from JSON to python dict. result = ujson_loads(str_result) result = map(lambda x: convert_unicode_dict_to_utf8(x), result) result = filter(lambda x: x != None, result) return result
def get(self, tablename, key, hint=None): """ Get an object from a given table. :param tablename: a table name :param key: an object key :param hint: (facultative) a secondary index hint :return: a python dictionary """ etcd_key = "/%s/%s" % (tablename, key) if hint is not None: redis_keys = self.etcd_client.read("sec_index/%s/%s/%s" % (tablename, hint[0], hint[1]), recursive=True) etcd_key = redis_keys[0] try: fetched = self.etcd_client.read(etcd_key) # Parse result from JSON to python dict. value = self._get_value_from_results(fetched) result = ujson_loads(value) if value is not None else None return result except etcd.EtcdKeyNotFound: return None
def _resolve_keys(self, tablename, keys): """ Returns the objects that match a list of keys in a specified table. :param tablename: a table name :param keys: a list of keys :return: a list of python objects """ result = [] if len(keys) > 0: keys = filter(lambda x: x != "None" and x != None, keys) sorted_keys = sorted(keys, key=lambda x: x.split(":")[-1]) str_result = self.redis_client.hmget(tablename, sorted_keys) # When looking-up for a deleted object, redis's driver return None, which should be # filtered. str_result = filter(lambda x: x is not None, str_result) # Transform the list of JSON string into a single string (boost performances). str_result = "[%s]" % (",".join(str_result)) # Parse result from JSON to python dict. result = ujson_loads(str_result) result = map(lambda x: convert_unicode_dict_to_utf8(x), result) result = filter(lambda x: x != None, result) return result
def parse_settings(args: argparse.Namespace) -> Tuple[TargetConfig, AppConfig]: if args.settings: return parse_settings_file(args.settings) if not args.input_stdin and not args.input_file and not args.single_targets: print("""errors, set input source: --stdin read targets from stdin; -t,--targets set targets, see -h; -f,--input-file read from file with targets, see -h""") exit(1) payloads = [] search_values = [] input_file = None if args.input_file: input_file = args.input_file if not path.isfile(input_file): abort(f'ERROR: file not found: {input_file}') if not args.output_file: output_file, write_mode = '/dev/stdout', 'wb' else: output_file, write_mode = args.output_file, 'a' if args.list_payloads: payloads = list(return_payloads_from_files(args.list_payloads)) # endregion # region about proxy proxy_connections = [] if args.proxy_connection_string: # TODO: валидировать строку подключения proxy_connection_string = [ connection for connection in args.proxy_connection_string.split(';') if connection ] proxy_connections.extend(proxy_connection_string) if proxy_connections: proxy_connections = cycle(proxy_connections) # endregion if args.single_contain: try: search_value = decode_base64_string(args.single_contain) assert search_value is not None search_values.append(search_value) except Exception as e: abort('errors with --single-contain options', e) elif args.single_contain_string: try: search_value = str(args.single_contain_string).encode('utf-8') assert search_value is not None search_values.append(search_value) except Exception as e: abort('errors with --single-contain-string options', e) elif args.single_contain_hex: try: search_value = bytes.fromhex(args.single_contain_hex) assert search_value is not None search_values.append(search_value) except Exception as e: abort('errors with --single-contain-hex options', e) single_payload = None if args.single_payload: single_payload = decode_base64_string(args.single_payload) elif args.single_payload_hex: try: single_payload: bytes = bytes.fromhex(args.single_payload_hex) except BaseException: pass elif args.single_payload_pickle_hex: try: single_payload: bytes = bytes.fromhex( args.single_payload_pickle_hex) except: pass if single_payload: payloads.append(single_payload) cookies = None if not args.full_cookies: if args.full_cookies_hex: try: _cookies_hex: bytes = bytes.fromhex(args.full_cookies_hex) _cookies_string: str = _cookies_hex.decode('utf-8') cookies = ujson_loads(_cookies_string) except Exception as e: print( f'errors with full cookies from hex. {e}, cookies set to None' ) else: try: cookies = ujson_loads(args.full_cookies) except Exception as e: print( f'errors with full cookies from string(json). {e}, cookies set to None' ) if not args.full_headers: if args.full_headers_hex: try: _headers_hex: bytes = bytes.fromhex(args.full_headers_hex) _headers_string: str = _headers_hex.decode('utf-8') headers = ujson_loads(_headers_string) except Exception as e: print( f'errors with full headers from hex. {e}, headers set to None' ) headers = {} elif args.user_agent.lower() == 'no': headers = {} elif args.user_agent == 'random': headers = {'User-Agent': return_user_agent()} else: headers = {'User-Agent': args.user_agent} else: try: headers = ujson_loads(args.full_headers) except Exception as e: print(f'errors with full headers. {e}, headers set to None') headers = {} if args.ssl_check: scheme = 'https' else: scheme = 'http' target_settings = TargetConfig( **{ 'port': args.port, 'ssl_check': args.ssl_check, 'total_timeout': args.total_timeout, 'list_payloads': payloads, 'search_values': search_values, 'max_size': args.max_size, 'python_payloads': args.python_payloads, 'generator_payloads': args.generator_payloads, 'headers': headers, 'cookies': cookies, 'scheme': scheme, 'endpoint': args.endpoint, 'method': args.method, 'hostname': '', 'single_payload_type': args.single_payload_type, 'allow_redirects': args.allow_redirects }) app_settings = AppConfig( **{ 'senders': args.senders, 'queue_sleep': args.queue_sleep, 'statistics': args.statistics, 'total_timeout': args.total_timeout, 'input_file': input_file, 'input_stdin': args.input_stdin, 'single_targets': args.single_targets, 'output_file': output_file, 'write_mode': write_mode, 'show_only_success': args.show_only_success, 'endpoint': args.endpoint, 'status_code': args.status_code, 'without_base64': args.without_base64, 'without_hashs': args.without_hashs, 'without_certraw': args.without_certraw, 'proxy_connections': proxy_connections }) return target_settings, app_settings
async def do(self, target: Target): protocol_name_like_filename = Path(__file__).stem async with self.semaphore: result = None future_connection = asyncio.open_connection(target.ip, target.port) try: reader, writer = await asyncio.wait_for( future_connection, timeout=target.conn_timeout) except Exception as e: await asyncio.sleep(0.005) try: future_connection.close() del future_connection except Exception as e: pass result = create_error_template(target, str(e)) else: try: # 1. getIsMaster message_out_is_master: bytes = get_is_master_msg() writer.write(message_out_is_master) await writer.drain() await asyncio.sleep(0.001) desc_text = "check isMaster" status_data, answer = await single_read( reader, target, custom_max_size=65535, operation_description=desc_text) if status_data: doc_offset = MSGHEADER_LEN + 20 if len(answer) < doc_offset + 4: error_message = f'Server truncated message - no query reply ' \ f'({len(answer)} bytes: {answer.hex()})' raise CustomError(error_message) resp_flags = int.from_bytes( answer[MSGHEADER_LEN:MSGHEADER_LEN + 4], byteorder='little') if resp_flags & QUERY_RESP_FAILED != 0: error_message = "isMaster query failed" raise CustomError(error_message) doclen = int.from_bytes(answer[doc_offset:doc_offset + 4], byteorder='little') if len(answer[doc_offset:]) < doclen: error_message = f'Server truncated BSON reply doc ' \ f'({len(answer[doc_offset:])} bytes: {answer.hex()})' raise CustomError(error_message) try: decoded_doc = bson_decode(answer[doc_offset:]) except Exception as exp: error_message = f'Server sent invalid BSON reply doc ' \ f'({len(answer[doc_offset:])} bytes: {answer.hex()})' raise CustomError(error_message) else: if decoded_doc: addition_info = {} _document_is_master = bson_json_dumps( decoded_doc) result_payload = _document_is_master.encode( 'utf-8') addition_info['is_master'] = ujson_loads( _document_is_master) # Gleaned from wireshark - if "MaxWireVersion" is less than 7, then # "build info" command should be sent in an OP_COMMAND with the query sent # and response retrieved at "metadata" offset. At 7 and above, should # be sent as an OP_MSG in the "section" field, and response is at "body" offset if decoded_doc['maxWireVersion'] < 7: query: bytes = get_build_info_command_msg() resplen_offset = 4 resp_offset = 0 else: query = get_build_info_op_msg() resplen_offset = 5 resp_offset = 5 writer.write(query) await writer.drain() await asyncio.sleep(0.001) status_data, answer = await single_read( reader, target, custom_max_size=65535, operation_description=desc_text) if status_data: if len(answer ) < MSGHEADER_LEN + resplen_offset: error_message = f'Server truncated message - no metadata doc ' \ f'({len(answer)} bytes: {answer.hex()})' raise CustomError(error_message) _tmp_value = answer[ MSGHEADER_LEN:MSGHEADER_LEN + resplen_offset] responselen = int.from_bytes( _tmp_value, byteorder='little') if len(answer[MSGHEADER_LEN:] ) < responselen: error_message = f'Server truncated BSON response doc ' \ f'({len(answer[MSGHEADER_LEN:])} bytes: {answer.hex()})' raise CustomError(error_message) try: _document_bytes = answer[ MSGHEADER_LEN + resp_offset:] _data_buildinfo = bson_decode_all( _document_bytes) except Exception as exp: error_message = f'Server sent invalid BSON reply doc ' \ f'({len(answer[doc_offset:])} bytes: {answer.hex()})' raise CustomError(error_message) else: _document_buildinfo = bson_json_dumps( _data_buildinfo) try: addition_info[ 'build_info'] = ujson_loads( _document_buildinfo) _document_buildinfo_str_bytes = _document_buildinfo.encode( 'utf-8') except: pass result_payload = result_payload + b'\n' + _document_buildinfo_str_bytes # try return list databases query: bytes = get_list_db_op_msg() resplen_offset = 5 resp_offset = 5 writer.write(query) await writer.drain() await asyncio.sleep(0.001) status_data, answer = await single_read( reader, target, custom_max_size=65535, operation_description=desc_text) if status_data: if len(answer ) < MSGHEADER_LEN + resplen_offset: error_message = f'Server truncated message - no metadata doc ' \ f'({len(answer)} bytes: {answer.hex()})' raise CustomError(error_message) responselen = int.from_bytes( answer[MSGHEADER_LEN:MSGHEADER_LEN + resplen_offset], byteorder='little') if len(answer[MSGHEADER_LEN:] ) < responselen: error_message = f'Server truncated BSON response doc ' \ f'({len(answer[MSGHEADER_LEN:])} bytes: {answer.hex()})' raise CustomError(error_message) try: _data_listdb = bson_decode_all( answer[MSGHEADER_LEN + resp_offset:]) except Exception as exp: error_message = f'Server sent invalid BSON reply doc ' \ f'({len(answer[doc_offset:])} bytes: {answer.hex()})' raise CustomError(error_message) else: _document_listdb = bson_json_dumps( _data_listdb) try: _dbs = ujson_loads( _document_listdb) if len(_dbs) == 1: addition_info['list_db'] = { 'databases': _dbs[0]['databases'], 'total_size': _dbs[0]['totalSize'] } else: addition_info[ 'list_dbs'] = _dbs _document_listdb_str_bytes = _document_listdb.encode( 'utf-8') except: pass result_payload = result_payload + b'\n' + _document_buildinfo_str_bytes # try return logs mongodb query: bytes = get_logs_db_op_msg() resplen_offset = 5 resp_offset = 5 writer.write(query) await writer.drain() await asyncio.sleep(0.001) status_data, answer = await multi_read( reader, target) if status_data: try: print(len(answer)) _data_logs_mongodb = bson_decode( answer[MSGHEADER_LEN + resp_offset:]) except Exception as exp: print(exp) else: read_logs(_data_logs_mongodb) # ----------------------------------------------------- result = make_document_from_response( result_payload, target, addition_dict=addition_info, protocol=protocol_name_like_filename) await asyncio.sleep(0.005) try: writer.close() except BaseException: pass except Exception as exp: result = create_error_template(target, str(exp)) try: future_connection.close() except Exception: pass await asyncio.sleep(0.005) try: writer.close() except Exception: pass await self.send_result(result)
def parse_record(row: str) -> dict: """ reparse str record to dict with keys(fields) for MongoDB :param row: :return: dict record for insert to MongoDB Example return record: First schema { "timestamp" : NumberLong(1601597605), "type" : "a", - without changes from file "value" : NumberLong(416915262), - Int represent of IPv4 "tld" : "com", "domain" : "spectrum", "subdomain" : "024-217-159-062.res" } Second schema { "timestamp" : NumberLong(1601652544), "type" : "cname", "tld" : "be", "domain" : "stylingcorner", "subdomain" : "mail", "value_str" : "stylingcorner.be" - no IP in record of file and save as is } """ def convert_timestamp(value: str) -> Int64: try: return Int64(value) except: pass def convert_ip(value: str) -> Int64: try: ip = IPv4Address(value) return Int64(ip) except: pass def convert_domain(value: str): try: domain_parts = value.split('.') tld = domain_parts[-1] domain = domain_parts[-2] subdomain = value[:-len(tld) - len(domain) - 1].strip('.') return tld, domain, subdomain except: pass record = {} try: _row: dict = ujson_loads(row) keys = { 'timestamp': convert_timestamp, 'name': convert_domain, 'type': None, 'value': convert_ip, 'domain': convert_domain } for k in keys: if k in _row: if keys[k]: record[k] = keys[k](_row[k]) else: record[k] = _row[k] if 'name' in record: if record['name']: record['tld'] = record['name'][0] record['domain'] = record['name'][1] record['subdomain'] = record['name'][2] record.pop('name') if not record['value']: record['value_str'] = _row['value'] for k in list(record.keys()): if not record[k]: record.pop(k) except: pass return record