def test_adds_and_returns_valid_events(): app = setup_application(force_recreate_database=True, force_initialize_database=True) headers = {"access_token": DEMO_DEVICE_WRITE_TOKEN} expected_uid = ksuid.ksuid().toBase62() params = { "id": expected_uid, "foo": "bar", "bizz": "bazz", "boo": "poo", "t": "special" } resp = app.post("/collect", params=params, headers=headers) assert resp.status_code == 200 assert resp.json is not None json_resp = resp.json requies_uid = json_resp["request"]["uid"] assert requies_uid is not None headers = {"access_token": DEMO_DEVICE_READ_TOKEN} resp = app.get("/view_all", headers=headers) assert resp.status_code == 200 assert resp.json is not None json_resp = resp.json assert "data" in json_resp json_data = json_resp["data"] assert type(json_data) == list found = False for item in json_data: assert "request_uid" in item assert "request_id" in item assert "request_uid" in item assert "adding_dts" in item assert "method" in item assert "parameters" in item if item["request_uid"] == requies_uid: v = {item["name"]: item["value"] for item in item["parameters"]} assert v == params found = True break assert found
def save_response_with_id(self, response, existing_id=None): try: if not existing_id: existing_id = str(ksuid()) key_ttl = 60 * 60 status = self.redis_client.set("{0}".format(existing_id), json.dumps(response)) self.redis_client.expire("{0}".format(existing_id), key_ttl) return existing_id except redis.ConnectionError: logger_object.log_message(ERROR, error_logging(sys.exc_info())) self.redis_client = redis.StrictRedis(connection_pool=self.get_redis_pool(self.db)) except: logger_object.log_message(ERROR, error_logging(sys.exc_info())) return None
def test_integration_test(): uid1 = ksuid() bu = uid1.toBytes() assert bu is not None uid2 = ksuid.fromBytes(bu) assert str(uid1) == str(uid2) assert uid1.bytes() == uid2.bytes() assert uid1.toBytes() == uid2.toBytes() assert uid1.toBase62() == uid2.toBase62() b62 = uid1.toBase62() uid3 = ksuid.fromBase62(b62) assert str(uid1) == str(uid3) assert uid1.bytes() == uid2.bytes() assert uid1.toBytes() == uid3.toBytes() assert uid1.toBase62() == uid3.toBase62() bs = uid1.bytes() assert bs is not None
def export_hits(self, hits): output_file_name = ksuid().toBase62() with jsonlines.open(output_file_name, mode='w') as writer: for item in hits: # no indent or pretty print - jsonlines / ndjson format writer.write(item) if self.s3: if self.compress: non_compress_file = output_file_name output_file_name = output_file_name + ".tar.gz" outTar = tarfile.open(output_file_name, mode='w:gz') try: outTar.add(non_compress_file) finally: outTar.close() os.remove(non_compress_file) self.s3.meta.client.upload_file(output_file_name, self.s3_bucket, output_file_name) os.remove(output_file_name)
"dcn_name": "Third DCN (W/O)", "dcn_description": "Third DCN for demo configuration (write only)", "devices": [{ "dcd_name": "Third device (W/O)", "dcd_description": "Third device for demo configuration (W/O)", "write_token": "rYkw7ohCQOD33VqSA1CyAWX0ey", "read_token": None }, { "dcd_name": "Fourth device (W/O)", "dcd_description": "Fourth device for demo configuration (W/O)", "write_token": "rYl62gSLXLsalya2mNzO1atFZ9", "read_token": None }] }] POSSIBLE_TEST_PARAMS_NAME = [ "foo", "boo", "zoo", "boom", "badum", "tsss", "test", "temp", "alfa", "bravo", "charlie", "units" ] POSSIBLE_PARAMS_VALUE = [ "value", "rpx", (lambda: str(ksuid.ksuid())), (lambda: ksuid.ksuid().toBase62()), (lambda: str(random.randint(1, 1000000000))), (lambda: random.choice(["true", "false"])), (lambda: random.choice(["C", "F"])), (lambda: str(round(random.uniform(-20, 40), 2))) ]
def get_base62_ksuid(): return ksuid.ksuid().toBase62()
def get_string_ksuid(): return str(ksuid.ksuid())
def str_ksuid(): from ksuid import ksuid return str(ksuid())
def parse_swagger_spec(file_name, blacklist): with open(file_name) as f: content = f.read() try: spec = None if "json" in file_name.lower(): spec = json.loads(content) if "yaml" in file_name.lower(): spec = yaml.safe_load(content) except JSONDecodeError as e: raise Exception( f"Failed to parse invalid spec (invalid JSON): {file_name}") except yaml.YAMLError: raise Exception( f"Failed to parse invalid spec (invalid YAML): {file_name}") if type(spec) != dict: raise Exception( f"Failed to parse invalid spec (invalid spec, invalid format): {file_name}" ) host = None security_definitions = None if spec.get("openapi") is not None: host = spec.get("servers", [])[0]["url"] security_definitions = parse_security_definitions(spec) if spec.get("swagger") is not None: host = spec.get("host") if type(host) == list and len(host) > 0: host = host[0] security_definitions = parse_security_schemes(spec) if host is not None and any(_host in host for _host in blacklist): raise Exception( f"Failed to parse invalid spec (blacklisted host): {file_name}") paths = spec.get("paths", {}) if type(paths) != dict: raise Exception( f"Failed to parse invalid spec (invalid spec, missing paths): {file_name}" ) if paths is None: raise Exception( f"Failed to parse invalid spec (invalid spec, missing paths): {file_name}" ) base_path = spec.get('basePath', '/') if base_path is None: raise Exception( f"Failed to parse invalid spec (invalid spec, missing basePath)") # don't allow go templated base path, default to / if "{{." in base_path: base_path = '/' custom_schema = { "ksuid": str(ksuid.ksuid()), "url": host, "securityDefinitions": security_definitions, "paths": {} } for path in paths: if type(paths[path]) == str: continue full_path = f"{base_path}{path}".replace('//', '/') extra_path_params = resolve_extra_path_params(full_path) try: for method, endpoint_data in paths[path].items(): if type(endpoint_data) != dict: continue path_params = endpoint_data.get("parameters", []) if type(path_params) != list: continue description = endpoint_data.get("description", None) if description is None: description = endpoint_data.get("summary", "") operation_id = spec["paths"][path][method].get( "operationId", None) consumes = endpoint_data.get("consumes", None) if consumes is None: consumes = [] parameters = [ parameter for parameter in list( map( lambda param_fields: resolve_parameter( spec, consumes, param_fields), path_params)) if parameter is not None ] parameters.extend(extra_path_params) seen_route_hashes = seen_hashes.get(path, {}).get(method, []) # skip this spec if circular references are detected try: json_params = json.dumps(parameters, sort_keys=True).encode('utf-8') param_hash = hashlib.md5(json_params).hexdigest() except ValueError: logging.warning( f"Failed to parse invalid spec (circular references): {file_name}" ) return if param_hash in seen_route_hashes: raise ParserSpecPathDuplicateException seen_route_hashes.append(param_hash) if seen_hashes.get(path, None) is None: seen_hashes[path] = {} seen_hashes[path][method] = seen_route_hashes custom_schema["paths"][full_path] = { method: { "description": description, "operationId": operation_id, "parameters": parameters, "consumes": consumes, "produces": endpoint_data.get("produces", None), } } except ParserSpecPathDuplicateException: continue except RecursionError: continue if len(custom_schema.get("paths", [])) == 0: return return custom_schema