def settings_from_file(config_path): """Loads and parses a parameter file. Parameters =========== config_path : str The path to the parameter file, formatted as json. Returns =========== settings : dict, A dictionary with the parameters """ try: with open(config_path, 'r') as f: p = json.loads(json_minify(f.read())) except Exception as e: raise ValueError("Couldn't read config file: %s. Exception: %s" % (config_path, str(e))) # add default values for missing settings: with open(DEFAULTPATH, 'r') as f: defaults = json.loads(json_minify(f.read())) # settings = Settings(flat_merge(p, defaults)) settings = Settings(recursive_merge(p, defaults)) # ultimately overwrite settings from environ vars of the form PIE_{var} checked = [] for k in settings: env_k = 'PIE_{}'.format(k.upper()) if env_k in os.environ: # transform to target type and overwrite settings settings[k] = type(defaults[k])(os.environ[env_k]) checked.append(env_k) for env_k in os.environ: if env_k.startswith('PIE_') and env_k not in checked: raise ValueError( "Environment variable '{}' didn't match. Aborting!".format( env_k)) # store the config path too: settings.config_path = config_path if settings.verbose: print("\n::: Loaded Config :::\n") print(yaml.dump(dict(settings))) return merge_task_defaults(settings)
def __init__(self, confPath): """ :param confPath: either a single path or a list of paths :type confPath: """ if isinstance(confPath, list): for cp in confPath: # load and store the configuration and update the object's dictionary conf = json.loads(json_minify(open(cp).read())) self.__dict__.update(conf) else: # load and store the configuration and update the object's dictionary conf = json.loads(json_minify(open(confPath).read())) self.__dict__.update(conf)
def read_json_config(self): # read JSON with open(self.command_line_args.fname_json) as config_file: json_str = config_file.read() config_params = json.loads(json_minify(json_str)) # return config_params
def main(): import urllib3 import json http = urllib3.PoolManager() # lets seed some provider payloads payloads = get_oos_payloads() print payloads for i in payloads: data = payloads[i] data["payload"] = json_minify(data["payload"]) data = json.dumps(payloads[i]) print 'minified: {}'.format(data) encoded_data = data url = "http://localhost:8080/api/payload/seed" headers = { "Authorization": "Basic b3JkZXJtYXJrOm1vbmV5cHJpbnRpbmdtYWNoaW5l", "Cache-Control": "no-cache", "Content-Type": "application/json", "Postman-Token": "c9f67ec7-1c41-56eb-be8e-b3c5b57e03a3", "X-Appengine-Inbound-Appid": "ordermark-app", "ordermark-appid": "ordermark-app" } print data resp = http.request('POST', url, body=encoded_data, headers=headers) print resp.data
def getPermissions(permissionsFilePath): """ Return a dictionary of permissions from the given permissions.json file. Permissions that are not specified are set to their default values.""" # read permissions.json file with open(permissionsFilePath, 'r') as file_f: try: permissions = json.loads(json_minify.json_minify(file_f.read()), object_pairs_hook=collections.OrderedDict) except ValueError: sys.exit(permissionsFilePath + " is not valid json. " + allImagesMustHavePermissions) # Validate that the permissions are supported by this version of subuser. for permission in permissions.keys(): if not permission in permissionDefaults: sys.exit( "Error: the permission \"" + permission + "\" is not supported by your version of subuser. Try updating first." ) # Set permission defaults for permissions that are not explicitly specified in the permissions.json file if "basic-common-permissions" in permissions and permissions[ "basic-common-permissions"]: for basicCommonPermission in basicCommonPermissions: if not basicCommonPermission in permissions: permissions[basicCommonPermission] = True for permission, defaultValue in permissionDefaults.items(): if not permission in permissions: permissions[permission] = defaultValue return permissions
def __init__(self, confPath): """ load and store the configuration and update the object't dictinonary """ conf = json.loads(json_minify(open(confPath).read())) self.__dict__.update(conf)
def load_json(self, f_name): with open(f_name, 'r') as f: _raw = f.read() json_object = json.loads( json_minify(_raw) ) # minify is used so that we can place comments/documentation in the JSON config file (which is normally invalid in JSON) f.close() return json_object
def main(args): # Parse the arguments if args.verbose: logging.getLogger(__name__).setLevel(logging.INFO) spl = Splitter(settings=Settings( json.loads(json_minify.json_minify(args.settings.read()))), files=args.files) spl.scan(table=args.table) if args.output: spl.dispatch(args.output)
def create_from_config_file(cls, filename, single_root=True): conf = json.loads(json_minify(open(filename).read())) R, S = build_instances(conf) root_region_dict = id_root_regions(R, S) if len(root_region_dict) > 1 and single_root: print( f"[WARNING] Too many root elements- please recheck your config file" ) return None return Window([x for x in root_region_dict.values()][0])
def handle(self, *args, **options): shutil.os.chdir(PROJECT_PATH) package = {} configs = django_apps.get_app_configs() for config in configs: package_path = os.path.join(config.path, 'package.json') try: with open(package_path) as data_file: data = json_decode(json_minify(data_file.read())) except IOError: continue deep_merge_dicts(package, data) with open('package.json', 'w') as outfile: json.dump(package, outfile)
def parse(self) -> Dict: with open(self.file_path, 'r') as json_file: cleaned_str: str = json_minify(json_file.read()) components_to_neighbors = {} for entry in json.loads(cleaned_str): key = entry['ITEM'] states_list = entry['STATES'] if len(states_list) == 1 and 'NORMAL' == states_list[0]['STATE']: neighbors = states_list[0]['COMMON'] else: neighbors = {} for obj in states_list: neighbors[obj['STATE'].lower()] = obj['COMMON'] components_to_neighbors[key] = neighbors return components_to_neighbors
def _load_config_file(self, config_file: str) -> None: """Load config key/value pairs from a file :param config_file: name of the config file. When only the filename is passed, this method searches the file inside the root folder where the app was started. In case of GAE environment launch, it is the same folder of app.yaml. In case of a python launch, it is the same folder where python executable was launched (generally, the src folder). If the file is not found, this method looks inside the same folder where this class file is stored. Alternatively, It could also be a full path. In this case, it depends on the hosting filesystem structure, and I don't know what's the expected behavior under GAE. :type config_file: str :returns: """ self._config = {} if not os.path.isfile(config_file): # Folder where this file is, can work also without the abspath, # but better for debug so full path is traced in the error self._logger.info( "Exact config file was not found, falling back to this class folder" ) base_folder = os.path.abspath(os.path.dirname(__file__)) full_config_path = os.path.join( base_folder, config_file) # combine with the config file name else: full_config_path = config_file # Now it has the file and full path with configurations self._logger.info( "Loading configurating from {}".format(full_config_path)) if os.path.isfile(full_config_path): with open(full_config_path, 'r') as f: json_with_comment = open(full_config_path).read() # An error could be generated here if the config file is not # OK (a missing , for example). I leave as it is, so a clear output # is produced server side self._config = json.loads(json_minify(json_with_comment)) else: raise ValueError( "Cannot find configuration file {}".format(full_config_path)) # Checks if the config files has real values if len(self._config.keys()) == 0: raise ValueError( "Empty configuration file {}".format(full_config_path))
def handle(self, *args, **options): package = {} configs = django_apps.get_app_configs() for config in configs: app_package_path = os.path.join(config.path, "package.json") try: with open(app_package_path) as data_file: data = json.loads(json_minify(data_file.read())) except IOError: continue deep_merge_dicts(package, data) if not os.path.exists(TRANSPILE_CACHE_PATH): os.makedirs(TRANSPILE_CACHE_PATH) package_path = os.path.join(TRANSPILE_CACHE_PATH, "package.json") with open(package_path, "w") as outfile: json.dump(package, outfile)
def train_detectors(filename, force=True): conf = json.loads(json_minify(open(filename).read())) train_options = dlib.simple_object_detector_training_options() validate_detectable_conf(conf) for detectable in conf.get("Objects", []): if not force: detector_path = detectable['detector_filepath'] if exists(detector_path): continue images, box_dict = parse_labelbox_boxes( detectable["image_path"], detectable["annotation_filepath"], [detectable["name"]]) detector = dlib.train_simple_object_detector( images, box_dict.get(detectable.get("name")), train_options) detector.save(detectable['detector_filepath']) return True
def json_remove_comments(json_file): ''' Remove comments from a json file :param json_file: :return: ''' try: from json_minify import json_minify return json_minify(json_file) except ImportError: json_out='' for line in json_file: # Read it all in json_out += line almost_json=remove_comments(json_out) proper_json=remove_trailing_commas(almost_json) return proper_json
def _load_tasks(self, tasks_file: str) -> None: """ Load config key/value pairs from a file :param config_file: name of the file. Can be full path or, otherwise, same folder of this class is considered :type config_file: str :return: """ self._tasks = [] if not os.path.isfile(tasks_file): # Folder where this file is, can work also without the abspath, # but better for debug so full path is traced in the error base_folder = os.path.abspath(os.path.dirname(__file__)) full_tasks_path = os.path.join( base_folder, tasks_file) # combine with the config file name else: full_tasks_path = tasks_file # Now if has the file and full path with configurations if os.path.isfile(full_tasks_path): with open(full_tasks_path, 'r') as f: json_with_comment = open(full_tasks_path).read() tasks = json.loads(json_minify(json_with_comment)) # Converts into object for task_dict in tasks: task = SchedulerTask( task_dict['name'], #time.strptime(task_dict['when'], "%H:%M %Z"), task_dict['when'], task_dict['timezone'] if 'timezone' in task_dict else None, task_dict['intent'], task_dict['params'] if 'params' in task_dict else None, task_dict['surface_id'] if 'surface_id' in task_dict else None, task_dict['surface_channel_id'] if 'surface_channel_id' in task_dict else None, task_dict['default_message'] if 'default_message' in task_dict else None, ) self._tasks.append(task) else: raise ValueError( "Cannot find tasks file {}".format(full_tasks_path))
def read_config_json(name, path_or_url): try: log_verbose("Reading %s configuration" " from '%s'..." % (name, path_or_url)) json_str = read_json_string(path_or_url) # minify the json/remove comments and sh*t json_str = json_minify(json_str) json_val =json.loads(json_str, object_hook=json_util.object_hook) if not json_val and not isinstance(json_val,list): # b/c [] is not True raise MongoctlException("Unable to load %s " "config file: %s" % (name, path_or_url)) else: return json_val except MongoctlException,e: raise e
def read_config_json(name, path_or_url): try: log_verbose("Reading %s configuration" " from '%s'..." % (name, path_or_url)) json_str = read_json_string(path_or_url) # minify the json/remove comments and sh*t json_str = json_minify(json_str) json_val = json.loads(json_str, object_hook=json_util.object_hook) if not json_val and not isinstance(json_val, list): # b/c [] is not True raise MongoctlException("Unable to load %s " "config file: %s" % (name, path_or_url)) else: return json_val except MongoctlException, e: raise e
def load_files(): #caching stat objectives global uuids, names, skins, stat_list uuids = [] names = [] stat_list = [] f = open('statslist.txt', 'r') f = f.read().translate({ord(c): None for c in '"'}) s = f.split(',') for item in s: stat_list.append(item) #caching usernames to uuid files = glob.glob(str(os.path.join(STAT_FOLDER, '*.json'))) for item in files: filename = item[-41:] uuids.append(convert_uuid(filename.split('.json', 1)[0])) for item in uuids: try: url = "https://sessionserver.mojang.com/session/minecraft/profile/" + item response = requests.get(url) response.raise_for_status response = json.loads(response.text) response = json.loads( base64.b64decode(response['properties'][0]['value'])) names.append(response['profileName']) except: pass #load list of known locations on the server file = os.path.join(FOLDER, 'known_locations.json') with open(file, 'r') as f: json_data = json.loads(json_minify(f.read(None))) for entry in json_data: known_location = listcmd.KnownLocation.parse(entry) if known_location != None: known_locations[known_location.dimension].append( known_location)
def load_default_settings(): """ Load built-in default settings """ with open(DEFAULTPATH) as f: return merge_task_defaults(Settings(json.loads(json_minify(f.read()))))
def template(self, index): in_string, expected = self.tests[index - 1] in_dict = json.loads(json_minify(in_string)) expected_dict = json.loads(textwrap.dedent(expected)) self.assertEqual(in_dict, expected_dict)
def _dict_list_to_option_str(dict_list): return json_minify(document_pretty_string(dict_list))
import serial import time from data import Data from datetime import datetime, timezone from random import randint from json import load from pathlib import Path from json import loads from json_minify import json_minify from pathlib import Path import re timeout = 1 config = loads(json_minify(open(Path(__file__).absolute().parent.parent / 'config.json', 'r').read())) # docs: http://www.priority1design.com.au/rfidrw-e-ttl.pdf class sensor: def __init__(self, rfid_record_time): self.ser = serial.Serial( port="/dev/ttyAMA0", baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS, timeout=rfid_record_time, ) if config["rfid_tag_type"] == "EM4100": self.matcher = re.compile("[0-9A-F]{10}")
def __init__(self, confPath): conf = json.loads(json_minify(open(confPath).read())) self.__dict__.update(conf)
from pathlib import Path from json import loads from json_minify import json_minify ROOT_DIR = Path(__file__).absolute().parent.parent.parent DATA_DIR = ROOT_DIR / 'data' SRC_DIR = ROOT_DIR / 'src' CONFIG = loads(json_minify(open(DATA_DIR / 'config.json', 'r').read()))
def __init__(self, conf_path): # load and store the configuration and update the object's dictionary conf = json.loads(json_minify(open(conf_path).read())) self.__dict__.update(conf)
region.add_state(state) #check sizes match if region.width is None or region.height is None: region.width, region.height = state.width, state.height elif region.width != state.width or region.height != state.height: print( f"[WARNING] Size Doesn't match for Region: {region.name} and related State:{state.name}\n" f"Region '{region.name}' has size ({region.width},{region.height}) and State '{state.name}' has size ({state.width},{state.height})" ) print("DONE") return Regions, States def id_root_regions(Regions, States): non_root_regions = [] [non_root_regions.extend(x.regions) for x in States.values()] root_regions = { name: obj for name, obj in Regions.items() if obj not in non_root_regions } return root_regions if __name__ == '__main__': filename = 'Window.json' conf = json.loads(json_minify(open(filename).read())) pprint(conf) R, S = build_instances(conf) print(id_root_regions(R, S)) #R, S = check_sizes(R,S)
# http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import pygame as pg from json_minify import json_minify from controller import Controller if __name__ == '__main__': with open('config.json') as json_file: config = json.loads(json_minify(json_file.read())) pg.init() screen = pg.display.set_mode( config['size'], pg.FULLSCREEN ) pg.display.set_caption(config['title']) clock = pg.time.Clock() controller = Controller(config, screen, clock) controller.run()
#! /usr/bin/env python import json import ConfigParser import os.path from json_minify import json_minify configpath = raw_input("Where is your cjdroute.conf file? ") parser=ConfigParser.SafeConfigParser() parser.add_section("options") parser.add_section("cjdns") try: config = json.loads(json_minify(open(configpath).read())) parser.set("cjdns","adminpassword", config["admin"]["password"]) parser.set("cjdns","adminport",config["admin"]["bind"].split(":")[1]) parser.set("cjdns","cjdnsip",config["ipv6"]) parser.set("cjdns","publicKey",config["publicKey"]) parser.set("cjdns","peeringport",config["interfaces"]["UDPInterface"]["bind"].split(":")[1]) print "CJDNS setting detected! Congrats, you just skipped most of the boring parts." except: print "Failed to parse " + configpath + ". Are you sure it's proper JSON?" print "Hit ctrl-c to quit if you wanna try and re-type that. If it's just invalid JSON, edit the config file yourself." raw_input("Hit enter if you know it's bad JSON and just wanna config it manually (this will create a blankish config file)") parser.set("cjdns","adminpassword","") parser.set("cjdns","adminport","11234") parser.set("cjdns","cjdnsip","Your CJDNS IP address") parser.set("cjdns","publicKey","Your public key") parser.set("cjdns","peeringport","Port that CJDNS listens for connections from peers on") importpath = raw_input("Where are the cjdns python libraries stored (hint: <cjdns git>/contrib/python): ") name = raw_input("What is your machine's name? Displayed to people nearby: ")
def load_json(path): import json from json_minify import json_minify with open(path) as stream: return json.loads(json_minify(stream.read(), False))
res = requests.get(url_yml) doc_type = "yaml" # Skip any repo that has no manifest if res.status_code != 200: continue # Attempt to parse yaml/json manifests print("Parsing %s for %s" % (doc_type, r.full_name)) manifest_data = None if doc_type == "json": import json try: manifest_data = json.loads(res.content) except json.decoder.JSONDecodeError: from json_minify import json_minify try: manifest_data = json.loads(json_minify(res.text)) except json.decoder.JSONDecodeError as e: error_apps[r.name] = e continue if doc_type == "yaml": import yaml manifest_data = yaml.load(res.text, Loader=yaml.FullLoader) # Check if openjdk extension is used if 'sdk-extensions' in manifest_data.keys(): exts = manifest_data['sdk-extensions'] for ext in exts: if ext.startswith('org.freedesktop.Sdk.Extension.openjdk'): java_apps[r.name] = r.clone_url break java_apps_sorted = dict(sorted(java_apps.items()))
def load_config(): CONFIG_PATH = os.environ.get('CONFIG_PATH', 'config.json') with open(CONFIG_PATH) as config_file: return Config(**json.loads(json_minify(config_file.read())))
def get_json_data(self, input_file): with open(str(input_file), 'r') as f: json_string = json_minify(''.join(f.readlines())) json_data = json.loads(json_string) return json_data
Returns: A Python object representing the data encoded in the file. Raises: Exception: If the file could not be read or its contents could not be parsed as JSON data. """ try: json_file = open(path, 'r') except IOError, msg: raise Exception("Failed to read the file at %s: %s" % (path, msg)) try: json_str = json_file.read() json_obj = json.loads(minify.json_minify(json_str), encoding) except ValueError, msg: raise Exception("Failed to parse JSON out of file %s: %s" % (path, msg)) finally: json_file.close() return json_obj class ApiManifest(object): """ Represents the list of API methods contained in the extension API JSON """ def __init__(self, manifest_paths): """ Read the supplied manifest file and parse its contents. Args: manifest_paths: Array of paths to API schemas.
import logging from typing import Dict, List from numpy import ndarray, sort, array, float, isnan import json from collections import deque from pathlib import Path from datetime import datetime import time from json_minify import json_minify from .hx711 import HX711 from . import data config: Dict = json.loads( json_minify(open(Path.home() / 'data' / 'config.json', 'r+').read()))['weight'] logger = logging.getLogger(__name__) class sensor: def __init__(self): # Configure and initialise HX711 self.hx: HX711 = HX711(5, 6) self.hx.set_reading_format("LSB", "MSB") self.hx.set_reference_unit( config['reference_unit']) # TODO: Make Calibration Script self.hx.reset() self.last_ran = 0
def load_json(json_file_name): """Load json file with C-style comments""" with open(json_file_name) as file: json_with_comments = file.read() json_minified = json_minify(json_with_comments) return json.loads(json_minified)