def append_values(self, timestamp): # ======= # for prop in self._data: # self._data[prop] = None # def get_current_data(self): # curr_data = {} # for prop in self.properties: # value = self._obj.GetValue(prop, convert=True) # if len(value.make_columns()) > 1: # for column, val in zip(value.make_columns(), value.value): # curr_data[column] = val # else: # curr_data[value.make_columns()[0]] = value.value # return curr_data # def append_values(self): # >>>>>>> bug_fixes_merge curr_data = {} cached_values = {} for prop in self._properties: if not prop.should_sample_value(self._step_number): continue prop_key = self._prop_key(prop) value_key = self._value_key(prop) # Don't re-read the same value multiple times. if value_key in cached_values: value = cached_values[value_key] else: value = self._get_value_func_by_type[prop.store_values_type]( prop, prop_key, timestamp) if value is not None: cached_values[value_key] = value if not self._should_store_by_type[prop.store_values_type]( prop, prop_key, value): continue if len(value.make_columns()) > 1: for column, val in zip(value.make_columns(), value.value): curr_data[column] = val else: curr_data[value.make_columns()[0]] = value.value if self._data[prop_key] is None: path = f"Exports/{self._scenario}/{prop.elem_class}/{self._name}/{prop.storage_name}" self._data[prop_key] = ValueContainer( value, self._hdf_store, path, prop.get_max_size(self._num_steps), dataset_property_type=prop.get_dataset_property_type(), max_chunk_bytes=self._max_chunk_bytes, store_timestamp=prop.should_store_timestamp(), ) self._data[prop_key].append(value, timestamp=timestamp) self._step_number += 1 return curr_data
def make_container(hdf_store, path, prop, num_steps, max_chunk_bytes, values, elem_names): """Return an instance of ValueContainer for storing values.""" container = ValueContainer( values, hdf_store, path, prop.get_max_size(num_steps), elem_names, prop.get_dataset_property_type(), max_chunk_bytes=max_chunk_bytes, store_time_step=prop.should_store_time_step(), ) logger.debug("Created storage container path=%s", path) return container
def export_sums(self): """Export properties stored as sums to disk.""" for key, value in self._sums.items(): path = f"Exports/{self._scenario}/{key[0]}/{self._name}/{key[1]}" container = ValueContainer( value, self._hdf_store, path, 1, max_chunk_bytes=self._max_chunk_bytes, dataset_property_type=DatasetPropertyType.NUMBER, ) container.append(value) container.flush_data()
def export_change_counts(self): """Export properties stored as change counts to disk.""" for key, val in self._change_counts.items(): elem_class = key[0] prop = key[1] value = ValueByNumber(self._name, prop, val[1]) path = f"Exports/{self._scenario}/{elem_class}/{self._name}/{prop}" container = ValueContainer( value, self._hdf_store, path, 1, max_chunk_bytes=self._max_chunk_bytes, dataset_property_type=DatasetPropertyType.NUMBER, ) container.append(value) container.flush_data()