def __load_or_generate_config(config_file: Optional[str]) -> dict: ## Setting up the players using the config file if config_file: # A custom config file location given: try: with open(config_file) as f: config_data = jsonplus.loads(f.read()) except: # Failed to load config, fallback to default values logger.error( f"config file '{config_file}' not found, using default value") raise else: # Default config file expected: config_dir = user_config_dir(APP_NAME) config_file = os.path.join(config_dir, DEFAULT_CONFIG_FILE) try: with open(config_file) as f: config_data = jsonplus.loads(f.read()) except FileNotFoundError: # Failed to load config, fallback to default values logger.warning(f"No default config file found, generating...") config_data = { "headless": False, "interactive": False, "start_paused": True, "wait_end": 5, "max_iterations": ITERATION_LIMIT, "tick_step": TICK_STEP } os.makedirs(config_dir, exist_ok=True) logger.warning(f"Writing default config into: {config_file}") with open(config_file, "w") as f: f.write(jsonplus.pretty(config_data)) config_data.setdefault('start_paused', False) config_data.setdefault('wait_end', 10) config_data.setdefault('assets', ASSET_DIRECTORY) config_data.setdefault('interactive', False) config_data.setdefault('tick_step', TICK_STEP) config_data.setdefault( 'no_text', False) # A work around Pillow (Python image library) bug config_data.setdefault('single_step', False) config_data.setdefault('endless', False) config_data.setdefault('rows', Game.ROW_COUNT) config_data.setdefault('columns', Game.COLUMN_COUNT) config_data.setdefault('max_iterations', ITERATION_LIMIT) return config_data
def __init__(self, issue_no, chart_id, run_mode: RunMode): self.issue_no = issue_no self.chart_id = chart_id self.run_mode = run_mode with open(f"issues/{issue_no}/data/pl-{chart_id}.json") as f: self.data = json.loads(f.read()) with open("charting/persisted_data_config.json") as f: self.config = json.loads(f.read()) self.persist_html = self.config[run_mode]["html"] self.persist_json = self.config[run_mode]["json"] self.persist_path = os.path.realpath( self.replace_issue_no_placeholder_for_path( self.config[run_mode]["path"], self.issue_no)) super().__init__()
def automatic_tests2(): with open('tests_cfg.json', 'r') as read_file: cfg = json.loads(read_file.read()) snr = cfg['snr'] signal_len = cfg['signal_len'] signal = generate_random_signal(signal_len) signal_power = cfg['signal_power'] number_of_tests = cfg['number_of_tests'] print( f"SNR={snr} Signal_len={signal_len} Signal_Power={signal_power} Number_of_tests={number_of_tests}" ) channel = komm.AWGNChannel(snr=snr, signal_power=signal_power) for config in cfg['configs']: # Tutaj testy print( f"{config['amplitudes']}; {config['phase_offsets']}; {config['orders']};", end=" ") modulation = komm.APSKModulation( orders=config['orders'], amplitudes=config['amplitudes'], phase_offsets=config['phase_offsets']) print(modulation.bits_per_symbol, end="; # ;") for i in range(cfg['number_of_tests']): print(single_test2(channel=channel, modulation=modulation, sygnal_wejsciowy=signal), end="; ") print("") input("Waiting for ENTER...")
def _get_encoded(self, key: str) -> object: encoded_value = super().get(key)[0] if encoded_value is None: return None return jsonplus.loads(encoded_value.decode('utf-8'))
def automatic_tests(): with open('test.json', 'r') as read_file: cfg = json.loads(read_file.read()) snr = cfg['snr'] signal_len = cfg['signal_len'] signal = generate_random_signal(signal_len) signal_power = cfg['signal_power'] number_of_tests = cfg['number_of_tests'] print( f"SNR={snr} Signal_len={signal_len} Signal_Power={signal_power} Number_of_tests={number_of_tests}" ) for config in cfg['configs']: # Tutaj testy print( f"{config['amplitudes']}; {config['phase_offsets']}; {config['orders']};", end=" ") for i in range(cfg['number_of_tests']): results = single_test(amplitudes=config['amplitudes'], phase_offsets=config['phase_offsets'], orders=config['orders'], snr=snr, signal_power=signal_power, signal_len=signal_len, sygnal_wejsciowy=signal) if i == 0: print(results[1], end="; # ;") print(results[0], end="; ") print("") input("Waiting for ENTER...")
def load(self): path_file = '%s/data.json' % self.path if not path.exists(path_file): raise NotFound with open(path_file, 'r') as f: data = json.loads(f.read()) self.data = data return self.data
def test_django_money(self): m = Money(313, 'USD') dm = DjangoMoney(313, 'USD') obj = jsonplus.loads(jsonplus.dumps(dm)) self.assertEqual(obj, dm) self.assertTrue(hasattr(obj, 'is_localized')) self.assertTrue(hasattr(dm, 'is_localized')) self.assertFalse(hasattr(m, 'is_localized'))
def get_prefix(self, basekey: str) -> list: encoded_values = super().get_prefix(basekey) flat_dict = {} for encoded_value in encoded_values: decoded_value = jsonplus.loads(encoded_value[0].decode('utf-8')) flat_dict[os.path.relpath(encoded_value[1].key.decode('utf-8'), start=basekey)] = decoded_value return unflatten(flat_dict, separator='/')
def clean_params(self): value = self.cleaned_data["params"] try: value = json.loads(value) except ValueError: raise ValidationError("Is not valid JSON") else: return ( [dict_items_to_str(i) if isinstance(i, dict) else i for i in value] if isinstance(value, (list, tuple)) else value )
def get_disks(self): ''' Get disks info data ''' try: disk_partitions = psutil.disk_partitions() # print str(disk_partitions) # print json.dumps(disk_partitions,default=lambda obj: obj.__dic__) print json.dumps(disk_partitions) temp_str = json.dumps(disk_partitions) temp_obj = json.loads(temp_str) print str(temp_obj) self.__save_data_to_json(temp_str) pass except Exception as e: self.__logger.exception(e) pass
def list_accounts_from_parent( ): # creates a new file with accouts info. origin: aws with open('accounts_from_parent.txt', 'w'): pass # makes the file empty for each run account = boto3.session.Session( profile_name='default', region_name='eu-west-1') # Session = Default (Organization access) client = account.client('organizations') with open('parentids.txt', 'r') as parents: # OU file to get accounts from for id in parents: id = id.strip("\n") # weak part- hardcoded. each row represents a request that is limited to 20 accounts. overall supports 100 accounts: response = client.list_accounts_for_parent(ParentId=id) response1 = client.list_accounts_for_parent( ParentId=id, NextToken=response['NextToken']) response2 = client.list_accounts_for_parent( ParentId=id, NextToken=response1['NextToken']) response3 = client.list_accounts_for_parent( ParentId=id, NextToken=response2['NextToken']) response4 = client.list_accounts_for_parent( ParentId=id, NextToken=response3['NextToken']) for res in [ response, response1, response2, response3, response4 ]: accounts = res['Accounts'] dict_len = len(accounts) y = jplus.dumps(accounts, indent=4) j = jplus.loads(y) counter = 0 while counter < dict_len: name = j[counter]['Name'] email = j[counter]['Email'] id = j[counter]['Id'] with open( 'accounts_from_parent.txt', mode='a' ) as file: # Appends name,email,id to the empty file writer = csv.writer(file, delimiter=',') writer.writerow([id, name, email]) counter += 1
def encode_and_decode(self, val, **kwargs): return json.loads(simplejson.dumps(val, cls=json.JSONEncoder, **kwargs), cls=json.JSONDecoder)
def dump_and_load(self, val, **kwargs): return jsonplus.loads( jsonplus.json.dumps(val, cls=jsonplus.JSONEncoder, **kwargs))
hash_index = dict() print "Now starting processing. This may take some time." for file in os.listdir(target_dir): # Skip non-xml files if not file.endswith('.db.json'): continue # Grab XML records, add to master with open(file) as infile: logging.info('Now processing file ' + file) incontent = infile.read() try: data = json.loads(incontent) except ValueError: logging.warning('Unable to parse file %s as JSON' % (file)) continue # Make a list of hashes for keeping track of configurations later hashes = list() hash_index[file] = list() # Record configuration logging.debug(str(len(data)) + ' reports found.') for property in data: # Add a hash to the property so we can compare them later. Only hash the subset of property, datum property['location'] = { k: v for k, v in property['location'].items() if k not in {'stmt_uid'}
print(string3) with open("d:/hello.txt", 'a') as f: flag = f.writable() string3 = f.write("\n刘颖慧,你好!!") print(string3) print(flag) #os.mkdir('d:/liuyinghui') #os.rmdir('d:/liuyinghui') print(os.name, os.environ['OS']) jsonDict = dict(name='liuyinghui', age=35, datetime=datetime.datetime.today()) jsonstr = json.dumps(jsonDict) print(jsonstr) print(jsonDict) jsonDict2 = json.loads(jsonstr) print(jsonDict2) print(json.dumps(std.__dict__)) print(type(std)) def jsonHanle(d): return St(d['name'], d['score']) std1 = json.loads(json.dumps(std.__dict__), object_hook=jsonHanle) print(std1) print(type(std1)) ''' print('Process (%s) start ...'%os.getpid()) pid = os.fork()
def loads(*pa, **kw): return jsonplus.loads(*pa, **kw)
if args.format == 'cbmc': data = xmltodict.parse(f.read()) # Skip files with no bugs if 'property' not in data['cprover']: logging.warning('File %s has no bugs' % (f)) continue # Record the number of bugs found logging.debug('%s reports found' % len(data['cprover']['property'])) property_list = data['cprover']['property'] elif args.format == 'infer': try: content = f.read() property_list = json.loads(content) except ValueError: continue elif args.format == 'clang' or args.format == 'clang7': clang_data = plistlib.readPlist(f) property_list = clang_data['diagnostics'] for property in property_list: property['source_file'] = clang_data['files'][0] # REMOVES PATH INFO, BECAUSE IT'S VERY LONG AND MAKES JSON REPORTS HARD TO READ del property['path'] elif args.format == 'ikos': try: property_list = json.loads(f.read()) except: logging.warning('File %s has no entries' % (entry)) continue
async def receive(reader): size = struct.unpack('>I', await reader.readexactly(4))[0] payload = await reader.readexactly(size) data = jsonplus.loads(payload.decode()) return data
def test_plus_loads(self): self.assertEqual(json.loads(self.plus_dumps), self.plus)
def test_basic_loads_dumps(self): basic = json.loads(self.basic_dumps) self.assertEqual(json.dumps(basic, sort_keys=True), self.basic_dumps)
def to_python(self, value): if not isinstance(value, six.string_types): return value return jsonplus.loads(value)
def from_db_value(self, value, expression, connection, context): if value is None: return value return jsonplus.loads(value)
def test_basic_loads(self): self.assertEqual(json.loads(self.basic_dumps), self.basic)
def dump_and_load(self, val, **kwargs): return json.loads(json.dumps(val, **kwargs))