def read(self, obj, frame=0): if struct.isstruct(obj): obj = _transform_for_writing(obj) names = struct.flatten(obj) if not np.all([isinstance(n, six.string_types) for n in names]): names = struct.names(obj) data = struct.map(lambda name: self.read_array(self._filename(name), frame), names) return data else: return self.read_array('unnamed', frame)
def load_state(state): if isinstance(state, StateProxy): state = state.state assert isinstance(state, State) state = _transform_for_writing(state) names = struct.names(state) with _unsafe(): placeholders = placeholder(state.shape) state_in = struct.map( lambda x: x, placeholders) # validates fields, splits staggered tensors return state_in, {placeholders: names}
def write(self, obj, names=None, frame=0): if struct.isstruct(obj): obj = _transform_for_writing(obj) if names is None: names = struct.names(obj) values = struct.flatten(obj) names = struct.flatten(names) names = [_slugify_filename(name) for name in names] self.write_sim_frame(values, names, frame) else: name = str(names) if names is not None else 'unnamed' self.write_sim_frame([obj], [name], frame)
def build_graph_input(obj, input_type='placeholder', frames=None): """ Create placeholders for tensors in the supplied state. :param obj: struct or StateProxy :param input_type: 'placeholder' or 'dataset_handle' :param frames: Number of input frames. If not None, returns a list of input structs. :return: 1. Valid state containing or derived from created placeholders or dataset handle 2. dict mapping from placeholders to their default names (using struct.names) """ if isinstance(obj, StateProxy): obj = obj.state assert struct.isstruct(obj) # --- Shapes and names --- writable_obj = _transform_for_writing(obj) shape = _writing_staticshape(obj) names = struct.names(writable_obj) if input_type == 'placeholder': if frames is not None: raise NotImplementedError() with _unsafe(): placeholders = placeholder(shape) graph_in = struct.map( lambda x: x, placeholders) # validates fields, splits staggered tensors return graph_in, {placeholders: names} elif input_type == 'dataset_handle': with _unsafe(): dtypes = struct.dtype(writable_obj) dataset_nodes, iterator_handle = dataset_handle(shape, dtypes, frames=frames) graph_in = struct.map( lambda x: x, dataset_nodes) # validates fields, splits staggered tensors shapes = struct.flatten(struct.staticshape(dataset_nodes), leaf_condition=is_static_shape) dtypes = struct.flatten(struct.dtype(dataset_nodes)) return graph_in, { 'names': struct.flatten(names), 'iterator_handle': iterator_handle, 'shapes': shapes, 'dtypes': dtypes, 'frames': frames } else: raise ValueError(input_type)
def write_sim_frame(directory: math.Tensor, fields: Field or tuple or list or dict or struct.Struct, frame: int, names: str or tuple or list or struct.Struct or None = None): """ Write a Field or structure of Fields to files. The filenames are created from the provided names and the frame index in accordance with the scene format specification at https://tum-pbs.github.io/PhiFlow/Scene_Format_Specification.html . This method can be used in batch mode. Batch mode is active if a list of directories is given instead of a single directory. Then, all fields are unstacked along the batch_dim dimension and matched with the directories list. Args: directory: directory name or list of directories. If a list is provided, all fields are unstacked along batch_dim and matched with their respective directory. fields: single field or structure of Fields to save. frame: Number < 1000000, typically time step index. names: (Optional) Structure matching fields, holding the filename for each respective Field. If not provided, names are automatically generated based on the structure of fields. """ if names is None: names = struct.names(fields) if frame > 1000000: warnings.warn( f"frame too large: {frame}. Data will be saved but filename might cause trouble in the future." ) def single_write(f, name): name = _slugify_filename(name) files = math.map(lambda dir_: _filename(dir_, name, frame), directory) if isinstance(f, SampledField): write(f, files) elif isinstance(f, math.Tensor): raise NotImplementedError() elif isinstance(f, Field): raise ValueError( "write_sim_frame: only SampledField instances are saved. Resample other Fields before saving them." ) else: raise ValueError( f"write_sim_frame: only SampledField instances can be saved but got {f}" ) struct.foreach(single_write, fields, names)