def test_json_init(module_name): check_name = "test_" # 获取所有的测试名称及请求内容 input_file = os.path.join(os.path.dirname(__file__) + "./../input_json/" + test.name + "/" + module_name + ".json5") output_file = os.path.join(os.path.dirname(__file__) + "./../output_json/" + test.name + "/" + module_name + ".json5") with open(input_file, 'r', encoding="utf8") as load_f: server.input_dict = json5.loads(load_f.read()) for key in server.input_dict: # 判断key是否以"test_"开始 if not str(key).startswith(check_name): logging.error("input_dict not start with test_") return False with open(output_file, 'r', encoding="utf8") as load_f: server.output_dict = json5.loads(load_f.read()) for key in server.output_dict: # 判断key是否以"test_"开始 if not str(key).startswith(check_name): logging.error("output_dict not start with test_") return False # 判断input和output的内容是否对应 if len(server.input_dict) == len(server.output_dict): for key in server.input_dict: if key in server.output_dict: logging.info("test case:%s", key) else: logging.error("input_json is different with output_json") return False else: return False return True
def parse_config(game_dir, language, progress): language_suffix = LANGUAGES.get(language) with contextlib.ExitStack() as stack: others_path = os.path.join(game_dir, CONFIGS_DIR, "others") others_file = stack.enter_context(open(others_path, "rb")) others_bundle = unitypack.load(others_file) if language_suffix is not None: language_path = os.path.join(game_dir, CONFIGS_DIR, language_suffix.lower()) language_file = stack.enter_context(open(language_path, "rb")) language_bundle = unitypack.load(language_file) # find and parse albums JSON albums_data = find_asset(others_bundle, "TextAsset", "albums") albums_json = json5.loads(albums_data.script) if language_suffix is not None: l_albums_data = find_asset(language_bundle, "TextAsset", "albums_" + language_suffix) l_albums_json = json5.loads(l_albums_data.script) else: l_albums_json = [{}] * len(albums_json) songs = [] # find and parse individual ALBUM* JSONs for album_num, (album_entry, l_album_entry) in enumerate( zip(albums_json, l_albums_json), 1): album_entry.update(l_album_entry) if not album_entry["jsonName"]: # Just as Planned is listed as an album without a jsonName continue entry_data = find_asset(others_bundle, "TextAsset", album_entry["jsonName"]) entry_json = json5.loads(entry_data.script) if language_suffix is not None: asset_name = album_entry["jsonName"] + "_" + language_suffix l_entry_data = find_asset(language_bundle, "TextAsset", asset_name) l_entry_json = json5.loads(l_entry_data.script) else: l_entry_json = [{}] * len(entry_json) for track_num, (song_entry, l_song_entry) in enumerate(zip( entry_json, l_entry_json), start=1): song_entry.update(l_song_entry) songs.append( Song( title=song_entry["name"], artist=song_entry["author"], album_number=int( album_entry["jsonName"].lstrip("ALBUM")), album_name=album_entry["title"], track_number=track_num, track_total=len(entry_json), music_name=song_entry["music"], cover_name=song_entry["cover"], )) progress(album_num / len(albums_json) * 100) return sorted(songs, key=lambda song: (song.album_number, song.track_number))
def check_fail(self, s, err=None): try: json5.loads(s) self.fail() # pragma: no cover except ValueError as e: if err: self.assertEqual(err, str(e))
def load_config(path: str) -> OrderedDict: '''加载配置文件''' config = None if path: with open(path, 'r', encoding='utf-8') as fp: config = loads(fp.read(), object_pairs_hook=OrderedDict) else: for path in ('./config/config.json', './config.json', '/etc/BiliExp/config.json'): if os.path.exists(path): with open(path, 'r', encoding='utf-8') as fp: config = loads(fp.read(), object_pairs_hook=OrderedDict) break local_config = './config/config.local.json' if os.path.exists(local_config): with open(local_config, 'r', encoding='utf-8') as fp: local_config = loads(fp.read(), object_pairs_hook=OrderedDict) if config is None: config = local_config else: config.update(local_config) if config is None: raise RuntimeError('未找到配置文件') else: return config
def _CollectDataFromPage(self, num): ''' Zbiera informacje o grupie produktów z danej strony :param num: numer strony :return: lista produktów ze strony ''' list = [] if self._Connect_wPage(num) == True: product_list = self.soup_euro.find("div", {"id": "product-list"}) product_script = product_list.find_all("script", {"type": "text/javascript"}) norm = str(product_script).replace("\t", "").replace("\n", "") data = re.search(r'\((.*?)\)', norm) js_script = json5.loads(data.group(1)) js_prod = json5.loads(js_script['products']) for tv in js_prod: js = json5.loads(tv) brand = js['brand'] name = js['name'] price = js['price'] id = js['id'] link = js['link'] try: new_resp = requests.get(url=self.url_euro_base + link) soup = bs(new_resp.content, 'html.parser') s_size, resolution, matrixType, smartTV = self._GetProductData(soup) list.append([brand, name, price, id, s_size, resolution, matrixType, smartTV]) except: self.licznik_niedostepnych += 1 else: self.licznik_niedostepnych += 1 return list
def test_numbers(self): # decimal literals self.check('1', 1) self.check('-1', -1) self.check('+1', 1) # hex literals self.check('0xf', 15) self.check('0xfe', 254) self.check('0xfff', 4095) self.check('0XABCD', 43981) self.check('0x123456', 1193046) # floats self.check('1.5', 1.5) self.check('1.5e3', 1500.0) self.check('-0.5e-2', -0.005) # names self.check('Infinity', float('inf')) self.check('-Infinity', float('-inf')) self.assertTrue(math.isnan(json5.loads('NaN'))) self.assertTrue(math.isnan(json5.loads('-NaN'))) # syntax errors self.check_fail('14d', '<string>:1 Unexpected "d" at column 3')
def test_parse_constant_nan(): json_string = """{"foo": NaN}""" assert json5.loads(json_string, parse_constant=const_to_silly) == { 'foo': 'Something Silly NaN' } assert json5.loads(json_string, parse_constant=const_to_silly) == json.loads( json_string, parse_constant=const_to_silly)
def test_parse_constant_negative_infinity(): json_string = """{"foo": -Infinity}""" assert json5.loads(json_string, parse_constant=const_to_silly) == { 'foo': 'Something Silly -Infinity' } assert json5.loads(json_string, parse_constant=const_to_silly) == json.loads( json_string, parse_constant=const_to_silly)
def test_json5_loads(input_object): dumped_json_string = json.dumps(input_object) dumped_json5_string = json5.dumps(input_object) parsed_object_from_json = json5.loads(dumped_json_string) parsed_object_from_json5 = json5.loads(dumped_json5_string) assert parsed_object_from_json == input_object assert parsed_object_from_json5 == input_object
def test_object_roundtrip(self, input_object): dumped_string_json = json.dumps(input_object) dumped_string_json5 = json5.dumps(input_object) parsed_object_json = json5.loads(dumped_string_json) parsed_object_json5 = json5.loads(dumped_string_json5) assert parsed_object_json == input_object assert parsed_object_json5 == input_object
def test_arrays(self): self.check('[]', []) self.check('[0]', [0]) self.check('[0,1]', [0, 1]) self.check('[ 0 , 1 ]', [0, 1]) try: json5.loads('[ ,]') self.fail() except ValueError as e: self.assertIn('Unexpected "," at column 3', str(e))
def load_config(path: str) -> OrderedDict: '''加载配置文件''' if path: with open(path,'r',encoding='utf-8') as fp: return loads(fp.read(), object_pairs_hook=OrderedDict) else: for path in ('./config/config.json', './config.json', '/etc/BiliExp/config.json'): if os.path.exists(path): with open(path,'r',encoding='utf-8') as fp: return loads(fp.read(), object_pairs_hook=OrderedDict) raise RuntimeError('未找到配置文件')
def cpush(to: str): """Push code to remote directory. pb cpush <remote> """ try: gconfig = GLOBAL_CONFIG_FILE.read_text() gconfig = json5.loads(gconfig) gconfig = GlobalConfig.parse_obj(gconfig) lconfig = find_config(Path.cwd()) project_dir = lconfig.parent lconfig = lconfig.read_text() lconfig = json5.loads(lconfig) lconfig = LocalConfig.parse_obj(lconfig) if to == "": if len(lconfig.remotes) == 1: remote = list(lconfig.remotes.keys())[0] else: raise click.UsageError("Multiple remotes available; must specify one.") else: if to in lconfig.remotes: remote = to else: raise click.UsageError(f"Remote {to} not defined in local config.") click.secho(f"Using remote: {remote}", fg="yellow") remote_dir = lconfig.remotes[remote] backup_dir = gconfig.backup_dir / lconfig.project / remote / "remote_backup" borg_repo = gconfig.backup_dir / lconfig.project / "borg_repo" borg_repo = cast(BorgRepo, borg_repo) if not backup_dir.is_dir(): backup_dir.mkdir(0o700, parents=True, exist_ok=True) if not borg_repo.is_dir(): borg_init(borg_repo) now = datetime.now().replace(microsecond=0).isoformat() borg_create(f"local-{now}", project_dir, borg_repo) rsync_pull(remote_dir, backup_dir) borg_create(f"{remote}-{now}", backup_dir, borg_repo) borg_prune(borg_repo) rsync_push(remote_dir, project_dir) click.secho("Code push finished completed successfully", fg="green") except (RuntimeError, FileNotFoundError, ValidationError) as e: click.secho(e, fg="red") sys.exit(1)
def refresh(): import json5 portDetails = json5.loads( self._getter.execute( f"mstpctl --format json showportdetail {self}")) bridgeDetails = json5.loads( self._getter.execute( f"mstpctl --format json showbridge {self}")) for entry in portDetails + bridgeDetails: if 'port' in entry: self._stpDetails[entry.get('port')] = dict(entry) else: self._stpDetails[entry.get('bridge')] = dict(entry)
def order(): if logged == 1: cur.execute('Select * from Customer') names = cur.fetchall() li = jsoncompilercust(names) cur.execute( 'Select RSGID, Name,Brand, HSN_CODE, Price ,Size, Unit_of_Size, Quantity from Inventory' ) goods = cur.fetchall() gli = jsoncompilergood(goods) got = request.form.get('sender') print('this is', got) order_list = [] try: got = got[1:-3] y = got.split('}, {') for x in y: new = '{' + x + '}' order_list.append(new) except Exception: print(Exception) if (len(order_list) > 0): bilno = bill_number_generator() billdts = order_list[ -1] # as the last one contains the customer id and transport name billdts = json.loads(billdts) insert('billinfo', ['bill_no', 'RSCID', 'Transport_name', 'additional_cost'], [ bilno, billdts['customer_id'], billdts['transport'], billdts['additional_cost'] ]) for j in order_list[:-1]: # inserting order into database. i = json.loads(j) l = list(i.keys()) l.append('bill_no') l1 = list(i.values()) l1.append(bilno) insert('bill', l, l1) return render_template('order.html', li=li, good=gli) else: return redirect(url_for('login'))
def is_json_str(self, str_to_check): ''' Very primitive test whether a passed-in string is (legal) JSON or not. :param str_to_check: string to examine :type str_to_check: str :return True/False :rtype bool ''' try: json5.loads(str_to_check) except JSONError: return False return True
def _get_settings(settings_dir, schema_name, schema): """ Returns a tuple containing the raw user settings, the parsed user settings, and a validation warning for a schema. """ path = _path(settings_dir, schema_name, False, SETTINGS_EXTENSION) raw = '{}' settings = dict() warning = '' validation_warning = 'Failed validating settings (%s): %s' parse_error = 'Failed loading settings (%s): %s' if os.path.exists(path): with open(path) as fid: try: # to load and parse the settings file. raw = fid.read() or raw settings = json5.loads(raw) except Exception as e: raise web.HTTPError(500, parse_error % (schema_name, str(e))) # Validate the parsed data against the schema. if len(settings): validator = Validator(schema) try: validator.validate(settings) except ValidationError as e: warning = validation_warning % (schema_name, str(e)) raw = '{}' return (raw, settings, warning)
def put(self, schema_name): overrides = self.overrides schemas_dir = self.schemas_dir settings_dir = self.settings_dir settings_error = 'No current settings directory' invalid_json_error = 'Failed parsing JSON payload: %s' invalid_payload_format_error = 'Invalid format for JSON payload. Must be in the form {\'raw\': ...}' validation_error = 'Failed validating input: %s' if not settings_dir: raise web.HTTPError(500, settings_error) raw_payload = self.request.body.strip().decode(u'utf-8') try: raw_settings = json.loads(raw_payload)['raw'] payload = json5.loads(raw_settings) except json.decoder.JSONDecodeError as e: raise web.HTTPError(400, invalid_json_error % str(e)) except KeyError as e: raise web.HTTPError(400, invalid_payload_format_error) # Validate the data against the schema. schema = _get_schema(schemas_dir, schema_name, overrides) validator = Validator(schema) try: validator.validate(payload) except ValidationError as e: raise web.HTTPError(400, validation_error % str(e)) # Write the raw data (comments included) to a file. path = _path(settings_dir, schema_name, True, SETTINGS_EXTENSION) with open(path, 'w') as fid: fid.write(raw_settings) self.set_status(204)
def get_hdl_db(path: str) -> Dict: path = Path(path) if path.exists(): return json.loads(path.read_text()) else: print("warn") return get_default_hdl_db()
def check_sdist(pid: PackageID, dirpath: Path) -> None: cand_files = ('package.json', 'package.json5', 'package.jsonc') candidates = (dirpath / fname for fname in cand_files) found = [c for c in candidates if c.is_file()] if not found: raise RuntimeError( f'Port for {pid} did not produce a package JSON manifest file') content = json5.loads(found[0].read_text()) if not isinstance( content, dict) or not 'name' in content or not 'version' in content: raise RuntimeError( f'Package manifest for {pid} is invalid (Got: {content})') try: manver = VersionInfo.parse(content['version']) except ValueError as e: raise RuntimeError( f'"version" for {pid} is not a valid semantic version (Got {content["version"]})' ) from e if content['name'] != pid.name: raise RuntimeError( f'Package manifest for {pid} declares different name "{content["name"]}' ) if manver != pid.version: raise RuntimeError( f'Package manifest for {pid} declares a different version [{manver}]' ) if not dirpath.joinpath('src').is_dir() and not dirpath.joinpath( 'include').is_dir(): raise RuntimeError( f'Package {pid} does not contain either a src/ or include/ directory' ) print(f'Package {pid} is OK')
def launch(launch_json: pathlib.Path, index: int = 0, open_file: str = '') -> None: src = launch_json.read_text(encoding='utf-8') def repl(m): key = m.group(1) if key.startswith('env:'): return os.getenv(key[4:]).replace('\\', '/') else: if key == 'workspaceFolder': return str(launch_json.parent.parent).replace('\\', '/') if key == 'workspaceRoot': return str(launch_json.parent.parent).replace('\\', '/') elif key == 'file': return open_file raise Exception(f'unknown variable ${{{key}}}') src = re.sub(r'\$\{([^}]+)\}', repl, src) parsed = json5.loads(src) selected = parsed['configurations'][index] conf = Configuration(**selected) conf.launch()
async def api_profile() -> Union[str, Response]: """Read or write profile JSON directly""" assert core is not None layers = request.args.get("layers", "all") if request.method == "POST": # Ensure that JSON is valid profile_json = json5.loads(await request.data) recursive_remove(core.profile.system_json, profile_json) profile_path = Path(core.profile.write_path("profile.json")) with open(profile_path, "w") as profile_file: json.dump(profile_json, profile_file, indent=4) msg = f"Wrote profile to {profile_path}" logger.debug(msg) return msg if layers == "defaults": # Read default settings return jsonify(core.defaults) if layers == "profile": # Local settings only profile_path = Path(core.profile.read_path("profile.json")) return await send_file(profile_path) return jsonify(core.profile.json)
def put(self, schema_name): overrides = self.overrides schemas_dir = self.schemas_dir settings_dir = self.settings_dir settings_error = 'No current settings directory' validation_error = 'Failed validating input: %s' if not settings_dir: raise web.HTTPError(500, settings_error) raw = self.request.body.strip().decode('utf-8') # Validate the data against the schema. schema, _ = _get_schema(schemas_dir, schema_name, overrides, labextensions_path=self.labextensions_path) validator = Validator(schema) try: validator.validate(json5.loads(raw)) except ValidationError as e: raise web.HTTPError(400, validation_error % str(e)) # Write the raw data (comments included) to a file. path = _path(settings_dir, schema_name, True, SETTINGS_EXTENSION) with open(path, 'w', encoding='utf-8') as fid: fid.write(raw) self.set_status(204)
def AddJSONToModel(self, json_string, in_file=None): '''Adds a |json_string| with variable definitions to the model. See *test.json5 files for a defacto format reference. |in_file| is used to populate a file-to-context map. ''' # TODO(calamity): Add allow_duplicate_keys=False once pyjson5 is # rolled. data = json5.loads(json_string, object_pairs_hook=collections.OrderedDict) # Use the generator's name to get the generator-specific context from # the input. generator_context = data.get('options', {}).get(self.GetName(), None) self.in_file_to_context[in_file] = generator_context for name, value in data['colors'].items(): if not re.match('^[a-z0-9_]+$', name): raise ValueError( '%s is not a valid variable name (lower case, 0-9, _)' % name) self.AddColor(name, value, generator_context) return generator_context
def _get_user_settings(settings_dir, schema_name, schema): """ Returns a dictionary containing the raw user settings, the parsed user settings, a validation warning for a schema, and file times. """ path = _path(settings_dir, schema_name, False, SETTINGS_EXTENSION) raw = '{}' settings = {} warning = '' validation_warning = 'Failed validating settings (%s): %s' parse_error = 'Failed loading settings (%s): %s' last_modified = None created = None if os.path.exists(path): stat = os.stat(path) with open(path) as fid: try: # to load and parse the settings file. raw = fid.read() or raw settings = json5.loads(raw) except Exception as e: raise web.HTTPError(500, parse_error % (schema_name, str(e))) # Validate the parsed data against the schema. if len(settings): validator = Validator(schema) try: validator.validate(settings) except ValidationError as e: warning = validation_warning % (schema_name, str(e)) raw = '{}' return dict(raw=raw, settings=settings, warning=warning)
def main(): config_path = sys.argv[1] with open(config_path, mode='r', encoding="utf-8") as file: config = file.read() config = json5.loads(config) bot = Bot(config) bot.start()
def get_lean_config(self) -> Dict[str, Any]: """Reads the Lean config into a dict. :return: a dict containing the contents of the Lean config file """ return json5.loads( self.get_lean_config_path().read_text(encoding="utf-8"))
def read_config(instance_file, schema_type): content = open(instance_file, 'r').read() print(content) if 'js' == schema_type: ctx = execjs.compile(content) return ctx.eval('__config__') return json5.loads(content)
def _fetch_provenance(self): if self.path.protocol == 'boss': return self.provenance with self._storage as stor: if stor.exists('provenance'): provfile = stor.get_file('provenance') provfile = provfile.decode('utf-8') try: provfile = json5.loads(provfile) except ValueError: raise ValueError( red("""The provenance file could not be JSON decoded. Please reformat the provenance file before continuing. Contents: {}""".format(provfile))) else: provfile = { "sources": [], "owners": [], "processing": [], "description": "", } return self._cast_provenance(provfile)
def test_encoding(self): if sys.version_info[0] < 3: s = '"\xf6"' else: s = b'"\xf6"' self.assertEqual(json5.loads(s, encoding='iso-8859-1'), u'\xf6')
def render(json_data, saltenv='base', sls='', **kws): ''' Accepts JSON as a string or as a file object and runs it through the JSON parser. :rtype: A Python data structure ''' if not isinstance(json_data, string_types): json_data = json_data.read() if json_data.startswith('#!'): json_data = json_data[(json_data.find('\n') + 1):] if not json_data.strip(): return {} return json.loads(json_data)
def check(self, s, obj): self.assertEqual(json5.loads(s), obj)