def generateDefaultOpts(): print("generated opts") # BuildHrsRqd : 54 os.chdir(os.path.dirname(__file__)) with open("opts.json", "w") as optsFile: rapidjson.dump(defaultOptions, optsFile, indent=2)
def file_dump_json(filename: Path, data: Any, is_zip: bool = False, log: bool = True) -> None: """ Dump JSON data into a file :param filename: file to create :param data: JSON Data to save :return: """ if is_zip: if filename.suffix != '.gz': filename = filename.with_suffix('.gz') if log: logger.info(f'dumping json to "{filename}"') with gzip.open(filename, 'w') as fpz: rapidjson.dump(data, fpz, default=str, number_mode=rapidjson.NM_NATIVE) else: if log: logger.info(f'dumping json to "{filename}"') with open(filename, 'w') as fp: rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE) logger.debug(f'done json to "{filename}"')
def test_file_object(): for stream in tempfile.TemporaryFile(), tempfile.TemporaryFile('w+', encoding='utf-8'): with stream: datum = ['1234567890', 1234, 3.14, '~𓆙~'] rj.dump(datum, stream) stream.seek(0) assert rj.load(stream) == datum
def write_version_migrator_status(migrator, mctx): """write the status of the version migrator""" out = { "queued": [], "errored": [], "errors": {}, } mmctx = MigratorContext(session=mctx, migrator=migrator) migrator.bind_to_ctx(mmctx) for node in mmctx.effective_graph.nodes: attrs = mmctx.effective_graph.nodes[node]["payload"] new_version = attrs.get("new_version", None) if new_version is None: continue attempts = attrs.get("new_version_attempts", {}).get(new_version, 0) if attempts == 0: out["queued"].append(node) else: out["errored"].append(node) out["errors"][node] = attrs.get("new_version_errors", {}).get( new_version, "No error information available for version '%s'." % new_version, ) with open("./status/version_status.json", "w") as f: json.dump(out, f, sort_keys=True, indent=2)
def _store_publisher_info(uri_pub_map, no_uri_pubs, image_dir): """Stores a given pair of (uri_pub_map, no_uri_pubs) objects to a configuration cache file beneath image_dir.""" cache_path = os.path.join( image_dir, pkg.client.global_settings.sysrepo_pub_cache_path) cache_dir = os.path.dirname(cache_path) try: if not os.path.exists(cache_dir): os.makedirs(cache_dir, 0o700) try: # if the cache exists, it must be a file st_cache = os.lstat(cache_path) if not stat.S_ISREG(st_cache.st_mode): raise IOError("not a regular file") except IOError as e: # IOError has been merged into OSError in Python 3.4, # so we need a special case here. if str(e) == "not a regular file": raise except OSError: pass with open(cache_path, "w") as cache_file: json.dump((uri_pub_map, no_uri_pubs), cache_file, indent=True) os.chmod(cache_path, 0o600) except IOError as e: error( _("Unable to store config to {cache_path}: {e}").format( **locals()))
def save(self, path_single_file, object): logging.debug( 'The path for save is %s, I\' not enable for run this method sorry' ) file = open(path_single_file, "a") dump(object, file) file.close()
def populate_config_values(config_name, screener_whitelist): print(config_name) test_config_path = get_full_path( ['freqtrade', 'user_data', 'configs', config_name]) config_values = get_config_values(['configs', config_name]) bots_config = get_config_values(['bots_config.json']) bot_data = get_bot_data(bots_config) config_values['dry_run'] = bool( os.environ.get('DRY_RUN', bot_data['dry_run'])) config_values['initial_state'] = bot_data['initial_state'] config_values['exchange']['key'] = bot_data['exchange_key'] config_values['exchange']['secret'] = bot_data['exchange_secret'] config_values['telegram']['chat_id'] = bot_data['telegram_chat_id'] config_values['telegram']['token'] = os.environ.get( 'TELEGRAM_TOKEN', bot_data['telegram_token']) config_values['api_server']['username'] = bot_data['api_server_username'] config_values['api_server']['password'] = bot_data['api_server_password'] if screener_whitelist: config_values['exchange']['pair_whitelist'] = screener_whitelist # create the configs folder if it doesn't exist configs_folder = os.path.dirname(test_config_path) print(configs_folder) if not os.path.exists(configs_folder): os.mkdir(configs_folder) # write the populate config file to disk with open(test_config_path, 'w') as file: rapidjson.dump(config_values, file, indent=2)
def save_json(json_data, path_to_json, verbose=True): """ :param json_data: any object that could be dumped by the 'json' package :param path_to_json: [str] local file path :param verbose: [bool] (default: True) :return: whether the data has been successfully saved """ json_filename = os.path.basename(path_to_json) json_dir = os.path.basename(os.path.dirname(path_to_json)) json_dir_parent = os.path.basename( os.path.dirname(os.path.dirname(path_to_json))) print("{} \"{}\" ... ".format( "Updating" if os.path.isfile(path_to_json) else "Saving", " - ".join( [json_dir_parent, json_dir, json_filename])), end="") if verbose else None try: os.makedirs(os.path.dirname(os.path.abspath(path_to_json)), exist_ok=True) json_out = open(path_to_json, 'w') rapidjson.dump(json_data, json_out) json_out.close() print("Successfully.") if verbose else None except Exception as e: print("Failed. {}.".format(e))
def generate_sf2(filename: str, time_series: List, is_missing: bool, num_missing: int) -> None: # This function generates the test and train json files which will be converted to csv format if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) with open(filename, "w") as json_file: for ts in time_series: if is_missing: target = [] # type: List # For Forecast don't output feat_static_cat and feat_static_real for j, val in enumerate(ts[FieldName.TARGET]): # only add ones that are not missing if j != 0 and j % num_missing == 0: target.append(None) else: target.append(val) ts[FieldName.TARGET] = target ts.pop(FieldName.FEAT_STATIC_CAT, None) ts.pop(FieldName.FEAT_STATIC_REAL, None) # Chop features in training set if FieldName.FEAT_DYNAMIC_REAL in ts.keys( ) and "train" in filename: # TODO: Fix for missing values for i, feat_dynamic_real in enumerate( ts[FieldName.FEAT_DYNAMIC_REAL]): ts[FieldName.FEAT_DYNAMIC_REAL][ i] = feat_dynamic_real[:len(ts[FieldName.TARGET])] json.dump(ts, json_file) json_file.write("\n")
def _save_result(self, epoch: Dict) -> None: """ Save hyperopt results to file Store one line per epoch. While not a valid json object - this allows appending easily. :param epoch: result dictionary for this epoch. """ epoch[FTHYPT_FILEVERSION] = 2 with self.results_file.open('a') as f: rapidjson.dump(epoch, f, default=hyperopt_serializer, number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN) f.write("\n") self.num_epochs_saved += 1 logger.debug( f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} " f"saved to '{self.results_file}'.") # Store hyperopt filename latest_filename = Path.joinpath(self.results_file.parent, LAST_BT_RESULT_FN) file_dump_json(latest_filename, {'latest_hyperopt': str(self.results_file.name)}, log=False)
def zip_file(self, request, *args, **kwargs): socket = self.get_object() real_file_list = { f_key: request.build_absolute_uri(Socket.get_storage().url(f_val['file'])) for f_key, f_val in socket.file_list.items() if not f_key.startswith('<') } # File list with full urls can get quite big so we pass it through tempfile with tempfile.NamedTemporaryFile(delete=False, suffix='.file_list', mode="w") as list_file: json.dump(real_file_list, list_file) try: propagate_uwsgi_params(get_tracing_attrs()) uwsgi.add_var('OFFLOAD_HANDLER', 'apps.sockets.handlers.SocketZipHandler') uwsgi.add_var('LIST_FILE', list_file.name) uwsgi.add_var( 'FILE_NAME', get_valid_filename('{}_{}'.format(socket.name, socket.version))) except ValueError: os.unlink(list_file.name) raise UwsgiValueError() return HttpResponse()
def remove_images_from_reconstruction(sfm_data, images_to_remove): with open(sfm_data, "r") as infile: data = json.load(infile) # Get all valid views (not in image to remove) and remove poses for images to remove. invalid_pose_ids = [ view["value"]["ptr_wrapper"]["data"]["id_pose"] for view in data["views"] if view["value"]["ptr_wrapper"]["data"]["filename"] in images_to_remove ] data["extrinsics"] = [ pose for pose in data["extrinsics"] if pose["key"] not in invalid_pose_ids ] valid_view_ids = [ view["value"]["ptr_wrapper"]["data"]["id_view"] for view in data["views"] ] # Removing structures referencing those removed views. for structure in data["structure"]: for observation in structure["value"]["observations"]: if observation["key"] not in valid_view_ids: structure["value"]["observations"].remove(observation) # find a point has fewer than 2 views and remove them data["structure"][:] = [ data["structure"][i] for i in range(0, len(data["structure"])) if len(data["structure"][i]["value"]["observations"]) >= 2 ] with open(sfm_data, "w+") as outfile: json.dump(data, outfile, indent=4)
def getRateData(): rateDICT = None dateSTR = datetime.utcnow().strftime("%Y%m%d") filePath = os.path.join(BASE_PATH, "rate_{}.json".format(dateSTR)) # 如果本機已有當天的匯率資料,直接讀取使用 if os.path.exists(filePath): rateDICT = json.load(open(filePath, encoding="utf-8")) # 本機無資料時,利用 [全球即時匯率API] 取得最新匯率 else: # 取得最新的匯率資料 result = get("https://tw.rter.info/capi.php") if result.status_code == codes.ok: rateDICT = result.json() # 儲存匯率資料,下次就可以直接讀取 with open(filePath, "w", encoding="utf-8") as f: json.dump(rateDICT, f, ensure_ascii=False) else: print("[ERROR] getRateData() status_code => {}".format( result.status_code)) return rateDICT
def dump(obj, stream, **kw): if "json" in DebugValues: _start() ret = _json.dump(obj, stream, **kw) _end('dump', _file(stream), ret) return ret else: return _json.dump(obj, stream, **kw)
def save(self, outputfile): """datasetをjson形式でfileに保存する Arguments: outputfile {str} -- save path """ with open(outputfile, "w") as f: json.dump(self.data, f)
def thread(players, queue_subproc, mode): """Handles running of the match loop""" uuid_ = unique_uuid("matches") base_agent = pommerman.agents.BaseAgent env = pommerman.make( mode, [base_agent(), base_agent(), base_agent(), base_agent()]) net, net_end = multiprocessing.Pipe() queue_subproc.put([net_end, players, uuid_]) obs = env.reset() record = { "board": numpy.array(env._board, copy=True).tolist(), "actions": [], "mode": str(mode) } done = False while not done: obs_res = resolve_classes(obs.copy()) turn_id = str(uuid.uuid4())[:5] try: obs_bytes = [] for key, value in enumerate(obs_res): if 10 + key in obs[0]["alive"]: obs_bytes.append( gzip.compress( bytes( rapidjson.dumps({ "o": value, # o = obs "i": turn_id, # i = Turn ID "d": False # d = Dead }), "utf8"))) else: obs_bytes.append( gzip.compress( bytes( rapidjson.dumps({ "d": True # d = Dead }), "utf8"))) net.send([ constants.SubprocessCommands.match_next.value, turn_id, obs_bytes, len(obs[0]["alive"]) ]) act = net.recv() except: act = [0, 0, 0, 0] record["actions"].append(numpy.array(act, copy=True).tolist()) obs, rew, done = env.step(act)[:3] record["reward"] = rew env.close() with open("./matches/" + uuid_ + ".json", "w") as file: rapidjson.dump(record, file) net.send([constants.SubprocessCommands.match_end.value, rew]) net.recv() exit(0)
def saverapid(self): stime = time.time() if len(self.sequel) > 0: with open(self.root, 'a') as sf: for snst in self.sequel: rapidjson.dump(snst, sf) b = sf.write('\n') print('rapidjson :', time.time() - stime) self.sequel = []
def test_dump_with_unicode(self): jsonobj = {"test": [1, u"こんにちは"]} fp = NamedTemporaryFile(mode='w', delete=False) rapidjson.dump(jsonobj, fp) fp.close() check_fp = open(fp.name) ret = json.load(check_fp) self.assertEqual(jsonobj[u"test"][0], ret[u"test"][0]) check_fp.close() os.remove(fp.name)
def test_dump_with_utf8(self): jsonobj = {"test": [1, "こんにちは"]} fp = NamedTemporaryFile(mode='w', delete=False) rapidjson.dump(jsonobj, fp) fp.close() check_fp = open(fp.name) ret = json.load(check_fp) self.assertEqual(jsonobj[u"test"][0], ret[u"test"][0]) check_fp.close() os.remove(fp.name)
def test_dump(self): jsonobj = {"test": [1, "hello"]} fp = NamedTemporaryFile(mode='w', delete=False) rapidjson.dump(jsonobj, fp) fp.close() check_fp = open(fp.name) ret = json.load(check_fp) self.assertEqual(jsonobj, ret) check_fp.close() os.remove(fp.name)
def generateDefaultOpts(): print("generated opts") os.chdir(os.path.dirname(__file__)) optsDir = os.path.split(jsonFile)[0] if not os.path.exists(optsDir): os.mkdir(optsDir) with open(jsonFile, "w") as optsFile: rapidjson.dump(defaultOptions, optsFile, indent=2)
def connection_logging(self): """ connection_logging synapse-based [[0's pre, 0's post], [1's pre, 1's post], ... ] """ connections = [] for s in self.s_list: connections.append(s.get_connection()) with open(os.path.join(LOG_path, LOG_connection_name), 'w') as logfile: rapidjson.dump(connections, logfile)
def print_schedule_graph(self, fpath: str): obj_list = {"nodes": [], "edges": []} for node in self._dfg_nodes: obj_list["nodes"].append(node.to_json()) for edge in self._dfg_edges: obj_list["edges"].append(edge.to_json()) with open(fpath, 'w') as outfile: rapidjson.dump(obj_list, outfile, indent=2)
def _dump(database: Mapping[str, Any], databasePath: str, schemaPath: str) -> None: """ Saves a database to disk. Args: database: The database to be saved. databasePath: The location where the database should be saved. schemaPath: The location of the schema. """ with open(databasePath, "w", encoding="utf-8") as fileObj: _validate(database, schemaPath) rapidjson.dump(database, fileObj, sort_keys=True, indent=2, chunk_size=2 ** 16)
def closeEvent(self, event: QCloseEvent): self.writeCurrentImageData() # TODO: write to temp file then rename once complete pass with open("test.json", "w") as pfile: json.dump( self.project, pfile, ensure_ascii=False, indent=4 )
def to_geojson_file(self, filename, crs=None): """ Export this query to a GeoJson FeatureCollection file. Parameters ---------- filename : str File to save resulting geojson as. crs : int or str Optionally give an integer srid, or valid proj4 string to transform output to """ with open(filename, "w") as fout: json.dump(self.to_geojson(crs=crs), fout)
def save(path, result): try: validate = json.Validator(json.dumps(REPORT_SCHEMA)) validate(json.dumps(result)) except ValueError as error: print(error.args) exit(-1) os.makedirs(os.path.dirname(path), exist_ok=True) print("Writing to {}".format(path)) with open(path, 'w') as f: json.dump(result, f, indent=4, sort_keys=True)
def save(self, path): """ Save wallet to the given path. The wallet will be saved with a temporary name, and that file is renamed to (potentially) replace the existing file. :param str path: Path to the wallet file """ logger.info("Saving wallet to %s", path) algorithm = self.encryption.algorithm result = self.to_dict() if self.encryption.wallet_encrypted: # If we are encrypting the entire wallet, store everything in # encrypted format except the encryption settings wallet_key = self.wallet_key result_bytes = bytes(rapidjson.dumps(result), "utf-8") encrypted_wallet_secret = Secret(val=result_bytes, secret_key=wallet_key, algorithm=algorithm).json() result = self.encryption.to_dict() # We don't need to store information about whether the secrets # are encrypted as well if self.encryption.secrets_encrypted: del result["secrets_encrypted"] del result["secret_checksum"] result["wallet_data"] = encrypted_wallet_secret path, file_name = os.path.split(path) tmp_name = "{}.tmp{}".format(file_name, secrets.token_hex(4)) # To avoid data loss, save the wallet to a temporary file and # then rename that file to replace the original wallet tmp_path = os.path.join(path, tmp_name) final_path = os.path.join(path, file_name) logger.debug("Saving wallet to temp file %s", tmp_path) with open(tmp_path, "w") as f: rapidjson.dump(result, f, indent=2) logger.debug("Replacing wallet with new copy") os.rename(tmp_path, final_path) logger.info("Finished saving wallet to %s", path) return True
def _save(self, fobj, reset_volatiles=False): """Save a json encoded representation of this plan description objects into the specified file object.""" state = PlanDescription.getstate(self, reset_volatiles=reset_volatiles) try: fobj.truncate() json.dump(state, fobj) fobj.flush() except OSError as e: # Access to protected member; pylint: disable=W0212 raise apx._convert_error(e) del state
def dump(self, debug=False): if debug: self.debug_dump() return with open(self._file_name, "w") as file: serialized = dict(**self) for key, value in serialized.items(): serialized[key] = (self._serialize_map[key](value) if key in self._serialize_map else value) rapidjson.dump(serialized, file, indent=2) return serialized
def encode_json(obj, fd=None, indent=None, sort_keys=True, **kwargs): r"""Encode a Python object in JSON format. Args: obj (object): Python object to encode. fd (file, optional): File descriptor for file that encoded object should be written to. Defaults to None and string is returned. indent (int, str, optional): Indentation for new lines in encoded string. Defaults to None. sort_keys (bool, optional): If True, the keys will be output in sorted order. Defaults to True. **kwargs: Additional keyword arguments are passed to json.dumps. Returns: str, bytes: Encoded object. """ if (indent is None) and (fd is not None): indent = '\t' if backwards.PY2 or _use_rapidjson: # pragma: Python 2 # Character indents not allowed in Python 2 json indent = indent_char2int(indent) kwargs['indent'] = indent kwargs['sort_keys'] = sort_keys if fd is None: return backwards.as_bytes(json.dumps(obj, **kwargs)) else: return json.dump(obj, fd, **kwargs)
def test_dump_with_io_stringio(self): jsonobj = {"test": [1, "hello"]} stream = StringIO() rapidjson.dump(jsonobj, stream) stream.seek(0) self.assertEqual("{\"test\":[1,\"hello\"]}", stream.read())