def save_temp_config(self, camera_serial_number, width, height): save_path = os.path.join( self.root_config_path, 'config_%s_%s_temp.toml' % (self.type, camera_serial_number)) if self.type == 'intrinsic' or self.type == 'alignment': if len(self.allCorners) > 0: stuff = { 'corners': self.allCorners, 'ids': self.allIds, 'camera_serial_number': camera_serial_number, 'width': width, 'height': height, 'type': self.type, 'date': time.strftime("%Y-%m-%d-%H:%M:%S", time.localtime()) } with open(save_path, 'w') as f: toml.dump(stuff, f, encoder=toml.TomlNumpyEncoder()) return "temp calibration file saved!" else: return "Didn't detect any markers or corners" else: stuff = { 'camera_serial_number': camera_serial_number, 'width': width, 'height': height, 'type': self.type, 'date': time.strftime("%Y-%m-%d-%H:%M:%S", time.localtime()) } with open(save_path, 'w') as f: toml.dump(stuff, f, encoder=toml.TomlNumpyEncoder()) return "temp calibration file saved!"
def save(self, filename, metadata=None): """ Saves the model as a set of parameters into a TOML file Parameters ---------- filename : str or path-like The TOML filename to save to. metadata : dict, optional If provided, will save the provided dictionary under a 'metadata' key. This will not be read in when loading models but provides a way of providing information in the actual TOML files. Default is None. """ output = {"parameters": self.params.as_dict(), "frozen": self.frozen} meta = {} meta["name"] = self.name meta["data"] = self.data_name if self.emulator.name is not None: meta["emulator"] = self.emulator.name if metadata is not None: meta.update(metadata) output["metadata"] = meta with open(filename, "w") as handler: encoder = toml.TomlNumpyEncoder(output.__class__) toml.dump(output, handler, encoder=encoder) self.log.info(f"Saved current state at {filename}")
def header_txt(request): # Import all the input form data so it can be written to file try: objects = request.session["objects"] except KeyError: return HttpResponseRedirect("/") # Open up file-like objects for response response = HttpResponse(content_type="application/zip") response["Content-Disposition"] = "attachment; filename=THM-parameters.zip" buff = io.BytesIO() archive = zipfile.ZipFile(buff, "w", zipfile.ZIP_DEFLATED) for i, (label, o) in enumerate(objects.items()): s = io.BytesIO() s.write( toml.dumps(framework_to_dict(o), encoder=toml.TomlNumpyEncoder()).encode()) archive.writestr(f"{label}.toml", s.getvalue()) s.close() archive.close() buff.flush() ret_zip = buff.getvalue() buff.close() response.write(ret_zip) return response
def save_processed_config(self, temp_path): if os.path.exists(temp_path): with open(temp_path, 'r') as f: stuff = toml.load(f) save_path = os.path.join( self.root_config_path, 'config_%s_%s.toml' % (self.type, stuff['camera_serial_number'])) if self.type == "intrinsic": if str(stuff['camera_serial_number']) == TOP_CAM: # regular intrinsic calibration for top camera param = quick_calibrate(stuff['corners'], stuff['ids'], self.board, stuff['width'], stuff['height']) else: # fisheye calibration for side cameras param = quick_calibrate_fisheye(stuff['corners'], stuff['width'], stuff['height']) # add camera serial number param['camera_serial_number'] = stuff['camera_serial_number'] param['date'] = stuff['date'] with open(save_path, 'w') as f: toml.dump(param, f) #with open(archive_path,'w') as f: # toml.dump(param,f) elif self.type == 'alignment': # get intrinsic file top_intrinsic = 'config_intrinsic_%s.toml' % TOP_CAM intrinsic_path = os.path.join(self.root_config_path, top_intrinsic) with open(intrinsic_path, 'r') as f: intrinsic = toml.load(f) camera_mat = np.array(intrinsic['camera_mat']) dist = np.array(intrinsic['dist_coeff']) markers = undistort_markers(stuff['corners'], camera_mat, dist) param = { 'undistorted_corners': stuff['corners'], # TODO: changed to distorted 'ids': np.array(stuff['ids']), 'camera_serial_number': stuff['camera_serial_number'], 'date': stuff['date'] } with open(save_path, 'w') as f: toml.dump(param, f, encoder=toml.TomlNumpyEncoder()) #with open(archive_path,'w') as f: # toml.dump(param,f,encoder=toml.TomlNumpyEncoder()) elif self.type == 'extrinsic': # sepearte process in Processing Group pass
def test_numpy_ints(): import numpy as np encoder = toml.TomlNumpyEncoder() d = {'a': np.array([1, 3], dtype=np.int64)} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder)) d = {'a': np.array([1, 3], dtype=np.int32)} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder)) d = {'a': np.array([1, 3], dtype=np.int16)} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder))
def test_numpy_ints(): np = pytest.importorskip('numpy') encoder = toml.TomlNumpyEncoder() d = {'a': np.array([1, 3], dtype=np.int64)} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder)) d = {'a': np.array([1, 3], dtype=np.int32)} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder)) d = {'a': np.array([1, 3], dtype=np.int16)} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder))
def export_options( self, config_path: Optional[Union[str, os.PathLike]] = None) -> None: """Export the full configuration, ie the options of the robot ( including controller), and the engine. .. note:: the configuration can be imported thereafter using `import_options` method. """ if config_path is None: if isinstance(self.robot, BaseJiminyRobot): urdf_path = self.robot.urdf_path_orig else: urdf_path = self.robot.urdf_path config_path = str( pathlib.Path(urdf_path).with_suffix('')) + '_options.toml' with open(config_path, 'w') as f: toml.dump(self.get_options(), f, encoder=toml.TomlNumpyEncoder())
def find_windows(self): alignment = 'config_alignment_%s.toml' % TOP_CAM rig = 'config_behavior_rig.toml' with open(os.path.join(self.global_config_path, alignment), 'r') as f: config = toml.load(f) if 'recorded_center' not in config.keys(): try: new_corners = np.array(config['undistorted_corners']) except: new_corners = np.array(config['corners']) ids = config['ids'] ids = [int(i[0][0]) for i in ids] new_corners = np.array( [corner for _, corner in sorted(zip(ids, new_corners))]) if new_corners.shape != (6, 1, 4, 2): raise Exception("can't proceed! missing corners") with open(os.path.join(self.global_config_path, rig), 'r') as f: rig = toml.load(f) results = find_board_center_and_windows(new_corners, rig) with open(os.path.join(self.global_config_path, alignment), 'a') as f: toml.dump(results, f, encoder=toml.TomlNumpyEncoder())
def run_cli(config, pkg_name, args, outdir, label, pkgs, default_framework): console.print( Panel(f"Welcome to {pkg_name}!", box=box.DOUBLE_EDGE), style="bold", justify="center", ) console.print() for pkg in pkgs: console.print( f"Using {pkg.__name__} version [blue]{pkg.__version__}[/blue]", style="strong", ) cfg = _get_config(config) # Update the file-based config with options given on the CLI. if "params" not in cfg: cfg["params"] = {} if args: cfg["params"].update(_ctx_to_dct(args)) cfg["params"] = _process_dct(cfg["params"]) console.print() console.print("You set the following parameters explicitly:", style="bold") for k, v in cfg.get("params", {}).items(): console.print(f" {k}: {v}") quantities = cfg.get("quantities", ["m", "dndm"]) out = get_hmf( quantities, framework=cfg.get("framework", default_framework), get_label=True, label_kind="filename", **cfg.get("params", {}), ) outdir = Path(outdir) console.print() console.print("Quantities to be obtained: ", style="bold") for q in quantities: console.print(f" - {q}", style="dim grey53") console.print() console.print(Rule("Starting Calculations", style="grey53")) t = time() for quants, obj, lab in out: lab = lab or label table = Table.grid() table.expand = True table.add_column(style="bold", justify="left") table.add_column(style="blue", justify="right") table.add_row(f"Calculated {lab}:", f"[[{time() - t:.2f} sec]]") console.print(table) t = time() # Write out quantities for qname, q in zip(quantities, quants): np.savetxt(outdir / f"{lab}_{qname}.txt", q) console.print( f" Writing quantities to [cyan]{outdir}/{lab}_<quantity>.txt[/cyan]." ) # Write out parameters dct = framework_to_dict(obj) dct["quantities"] = quantities with open(outdir / f"{lab}_cfg.toml", "w") as fl: toml.dump(dct, fl, encoder=toml.TomlNumpyEncoder()) console.print( f" Writing full config to [cyan]{outdir}/{lab}_cfg.toml[/cyan]." ) console.print() console.print(Rule("Finished!", style="grey53"), style="bold green")
import numpy as np import toml a = np.arange(0, 10, dtype=np.double) output = {"a": a} print(f"{output = }") # 'a = [ "0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0",]\n' toml_str = toml.dumps(output) print(f"{toml_str = }") toml_str2 = toml.dumps(output, encoder=toml.TomlNumpyEncoder()) print(f"{toml_str2 = }") # 'a = [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0,]\n'
def write_to_file(self, dump_to_file=False): import decimal decimal.getcontext().prec = int(number_of_decimals) + 2 def have_ending_zeros(lis): #lis = np.asarray(lis) if type(lis[0]) == tuple: for i in range(len(lis)): lis[i] = list(lis[i]) for i in range(len(lis)): if type(lis[i]) == np.float64 or type( lis[i]) == np.float32 or type(lis[i]) == int or type( lis[i]) == float: lis[i] = decimal.Decimal( ("{0:0." + str(len(str(lis[i]))) + "f}").format( lis[i]) + "0" ) #.quantize(Decimal("1." + "0"*(int(number_of_decimals)))) #lis[i] = decimal.Decimal(lis[i]).quantize(Decimal("1." + "0"*(int(number_of_decimals)))) else: if type(lis[i]) == type(decimal.Decimal(1.0)): pass for j in range(len(lis[i])): if type(lis[i][j]) == np.float64 or type( lis[i][j]) == np.float32 or type( lis[i][j]) == int or type( lis[i][j]) == float or True: lis[i][j] = decimal.Decimal( ("{0:0." + str(len(str(lis[i][j]))) + "f}").format(lis[i][j]) + "0" ) #.quantize(decimal.Decimal("1." + "0"*(int(number_of_decimals)))) #lis[i][j] = decimal.Decimal(lis[i][j]).quantize(decimal.Decimal("1." + "0"*(int(number_of_decimals)))) else: if type(lis[i][j]) == type(decimal.Decimal(1.0)): pass for k in range(len(lis[i][j])): if type(lis[i][j][k]) == np.float64 or type( lis[i][j][k]) == np.float32 or type( lis[i][j][k]) == int or type( lis[i][j][k]) == float: lis[i][j][k] = decimal.Decimal( ("{0:0." + str(len(str(lis[i][j][k]))) + "f}").format(lis[i][j][k]) + "0" ) #.quantize(Decimal("1." + "0"*(int(number_of_decimals)))) #lis[i][j][k] = decimal.Decimal(lis[i][j][k]).quantize(Decimal("1." + "0"*(int(number_of_decimals)))) else: pass return lis file = open("Particles.toml", "w") file.seek(0) temp_dict = { "particle_parameters": { "length_unit": "MICRON", "energy_unit": "EV", "mass_unit": "AMU", "N": [int(i) for i in self.N], "m": self.masses, "Z": self.Z, "E": self.E, "Ec": self.Ec, "Es": self.Es, "pos": self.positions, "dir": self.directions, "interaction_index": [int(i) for i in self.interaction_index], "particle_input_filename": "" } } if dump_to_file: import toml toml.dump(temp_dict, file, encoder=toml.TomlNumpyEncoder()) return temp_dict
def dumper(data: dict, path: Union[Path, str], overwrite: bool = False, compress: bool = None): """Write dict to a TOML/YAML/JSON file. Dump data to a TOML/YAML/JSON format file, optionally compressing the contents with gzip and optionally overwriting the file. Args: data (dict): The dictionary to be written. path (str): The file to write. Valid extensions are .toml, .json, .yaml with optional extension .gz. overwrite (bool): If True, overwrite the file if it exists. If False, then existing files will cause an exception. compress (bool): If True, compress the data with gzip on write. If None, dispatch by its extension. Returns: None """ path = Path(path) if path.exists(): if overwrite: if path.is_file(): path.unlink() else: raise FileExistsError( f'{path} exists and is not a file, abort overwriting...') else: raise FileExistsError( f"{path} exists. Consider using `overwrite` option.") ext = path.suffix.lower() if compress is None: if ext == '.gz': logger.info( f'Setting compression to on according to extension {ext}') compress = True elif compress is True: if ext != '.gz': raise ValueError( f'Compression is on but the extension {ext} is not .gz. Consider changing that to .gz.' ) if compress: # ext must be .gz now exts = path.suffixes if len(exts) != 2: raise ValueError( f'Entension {exts} not understood. Expect for example .toml.gz' ) ext = exts[0].lower() dumper_dict = { '.json': partial(json.dumps, indent=4, sort_keys=True), '.toml': partial(toml.dumps, encoder=toml.TomlNumpyEncoder()), '.yaml': yaml.dump if yamlloader is None else partial( yaml.dump, Dumper=yamlloader.ordereddict.CDumper), } try: data_str = dumper_dict[ext](data) except AttributeError: raise ValueError( 'Do not understand extension {ext}. Consider choosing .json, .toml, or .yaml.' ) if compress: with gzip.open(path, "wb") as f: f.write(data_str.encode()) else: with open(path, "w") as f: f.write(data_str) return
def write_to_file(self, dump_to_file=False): ''' returns a dictionary. Turn the required mesh2D stuff into a format that TOML and RustBCA like. Optional to write to a .toml file instead of returning a dictionary ''' triangle_list, material_densities = self.return_Triangles() Simulation_Boundaries = self.Simulation_boundaries material_boundary_points = [] for shape, _ in self.shapes: material_boundary_points += shape.exterior.coords for i, point in reversed(list(enumerate(material_boundary_points))): for shape, _ in self.shapes: pointPoint = Point(point) if pointPoint.within(shape): material_boundary_points.pop(i) material_boundary_points.sort() material_boundary_points = list( material_boundary_points for material_boundary_points, _ in itertools.groupby( material_boundary_points)) #print(len(material_boundary_points)) electronic_stopping_correction_factors = self.electronic_stopping_corrections '''import decimal decimal.getcontext().prec = int(number_of_decimals) + 2 def have_ending_zeros(lis): #lis = np.asarray(lis) if type(lis[0]) == tuple: for i in range(len(lis)): lis[i] = list(lis[i]) for i in range(len(lis)): if type(lis[i]) == np.float64 or type(lis[i]) == np.float32 or type(lis[i]) == int or type(lis[i]) == float: lis[i] = decimal.Decimal(("{0:0." + str(len(str(lis[i]))) + "f}").format(lis[i])+"0")#.quantize(Decimal("1." + "0"*(int(number_of_decimals)))) #lis[i] = decimal.Decimal(lis[i]).quantize(Decimal("1." + "0"*(int(number_of_decimals)))) else: if type(lis[i]) == type(decimal.Decimal(1.0)): pass for j in range(len(lis[i])): if type(lis[i][j]) == np.float64 or type(lis[i][j]) == np.float32 or type(lis[i][j]) == int or type(lis[i][j]) == float: lis[i][j] = decimal.Decimal(("{0:0." + str(len(str(lis[i][j]))) + "f}").format(lis[i][j])+"0")#.quantize(Decimal("1." + "0"*(int(number_of_decimals)))) #lis[i][j] = decimal.Decimal(lis[i][j]).quantize(Decimal("1." + "0"*(int(number_of_decimals)))) else: if type(lis[i][j]) == type(decimal.Decimal(1.0)): pass for k in range(len(lis[i][j])): if type(lis[i][j][k]) == np.float64 or type(lis[i][j][k]) == np.float32 or type(lis[i][j][k]) == int or type(lis[i][j][k]) == float: lis[i][j][k] = decimal.Decimal(("{0:0." + str(len(str(lis[i][j][k]))) + "f}").format(lis[i][j][k])+"0")#.quantize(Decimal("1." + "0"*(int(number_of_decimals)))) #lis[i][j][k] = decimal.Decimal(lis[i][j][k]).quantize(Decimal("1." + "0"*(int(number_of_decimals)))) else: pass return lis ''' file = open("Mesh2D.toml", "w") file.seek(0) ''' temp_dict = { "mesh_2d_input" : { "length_unit":self.length_unit, "energy_barrier_thickness": decimal.Decimal(("{0:0." + str(len(str(self.energy_barrier_thickness))) + "f}").format(self.energy_barrier_thickness)+"0"), "triangles": have_ending_zeros(triangle_list), "densities":have_ending_zeros(material_densities), "material_boundary_points": have_ending_zeros(material_boundary_points), "simulation_boundary_points":have_ending_zeros(Simulation_Boundaries), "electronic_stopping_correction_factors":have_ending_zeros(electronic_stopping_correction_factors) } }''' temp_dict = { "mesh_2d_input": { "length_unit": self.length_unit, "energy_barrier_thickness": self.energy_barrier_thickness, "triangles": triangle_list, "densities": material_densities, "material_boundary_points": material_boundary_points, "simulation_boundary_points": Simulation_Boundaries, "electronic_stopping_correction_factors": electronic_stopping_correction_factors } } if dump_to_file: import toml toml.dump(temp_dict, file, encoder=toml.TomlNumpyEncoder()) #print(type(triangle_list)) return temp_dict
def save_config_file(out_dir, config): with open(out_dir / 'urbafoam.toml', 'w') as dst: toml.dump(config, dst, encoder=toml.TomlNumpyEncoder())