def __init__(self, data_source, weight_field = None): self.data_source = data_source self.ds = data_source.ds self.field_map = {} self.field_info = {} self.field_data = YTFieldData() if weight_field is not None: self.variance = YTFieldData() weight_field = self.data_source._determine_fields(weight_field)[0] self.weight_field = weight_field self.field_units = {} ParallelAnalysisInterface.__init__(self, comm=data_source.comm)
def __init__(self, base_region, data_files, overlap_files=None, domain_id=-1): if overlap_files is None: overlap_files = [] self.field_data = YTFieldData() self.field_parameters = {} self.data_files = list(always_iterable(data_files)) self.overlap_files = list(always_iterable(overlap_files)) self.ds = self.data_files[0].ds self._last_mask = None self._last_selector_id = None self._current_particle_type = "all" # self._current_fluid_type = self.ds.default_fluid_type if hasattr(base_region, "base_selector"): self.base_selector = base_region.base_selector self.base_region = base_region.base_region else: self.base_region = base_region self.base_selector = base_region.selector self._octree = None self._temp_spatial = False if isinstance(base_region, ParticleContainer): self._temp_spatial = base_region._temp_spatial self._octree = base_region._octree # To ensure there are not domains if global octree not used self.domain_id = -1
def __init__(self, base_region, data_files, ds, min_ind=0, max_ind=0, over_refine_factor=1): # The first attempt at this will not work in parallel. self._num_zones = 1 << (over_refine_factor) self._oref = over_refine_factor self.data_files = data_files self.field_data = YTFieldData() self.field_parameters = {} self.ds = ds self._index = self.ds.index self.oct_handler = ds.index.oct_handler self.min_ind = min_ind if max_ind == 0: max_ind = (1 << 63) self.max_ind = max_ind self._last_mask = None self._last_selector_id = None self._current_particle_type = 'all' self._current_fluid_type = self.ds.default_fluid_type self.base_region = base_region self.base_selector = base_region.selector
def __init__(self, base_region, sfc_start, sfc_end, oct_handler, ds): self.field_data = YTFieldData() self.field_parameters = {} self.sfc_start = sfc_start self.sfc_end = sfc_end self.oct_handler = oct_handler self.ds = ds self._last_mask = None self._last_selector_id = None self._current_particle_type = 'all' self._current_fluid_type = self.ds.default_fluid_type self.base_region = base_region self.base_selector = base_region.selector
def __init__(self, id, filename=None, index=None): self.field_data = YTFieldData() self.field_parameters = {} self.id = id self._child_mask = self._child_indices = self._child_index_mask = None self.ds = index.dataset self._index = weakref.proxy(index) self.start_index = None self.filename = filename self._last_mask = None self._last_count = -1 self._last_selector_id = None self._current_particle_type = 'all' self._current_fluid_type = self.ds.default_fluid_type
def __init__(self, base_region, domain, ds, over_refine_factor=1): self._num_zones = 1 << (over_refine_factor) self._oref = over_refine_factor self.field_data = YTFieldData() self.field_parameters = {} self.domain = domain self.domain_id = domain.domain_id self.ds = domain.ds self._index = self.ds.index self.oct_handler = domain.oct_handler self._last_mask = None self._last_selector_id = None self._current_particle_type = 'all' self._current_fluid_type = self.ds.default_fluid_type self.base_region = base_region self.base_selector = base_region.selector
def __init__(self, mesh_id, filename, connectivity_indices, connectivity_coords, index): self.field_data = YTFieldData() self.filename = filename self.field_parameters = {} self.mesh_id = mesh_id # This is where we set up the connectivity information self.connectivity_indices = connectivity_indices if connectivity_indices.shape[1] != self._connectivity_length: raise RuntimeError self.connectivity_coords = connectivity_coords self.ds = index.dataset self._index = index self._last_mask = None self._last_count = -1 self._last_selector_id = None self._current_particle_type = 'all' self._current_fluid_type = self.ds.default_fluid_type
def __init__(self, outputs, indices, fields=None, suppress_logging=False): indices.sort() # Just in case the caller wasn't careful self.field_data = YTFieldData() if isinstance(outputs, DatasetSeries): self.data_series = outputs else: self.data_series = DatasetSeries(outputs) self.masks = [] self.sorts = [] self.array_indices = [] self.indices = indices self.num_indices = len(indices) self.num_steps = len(outputs) self.times = [] self.suppress_logging = suppress_logging # Default fields if fields is None: fields = [] fields.append("particle_position_x") fields.append("particle_position_y") fields.append("particle_position_z") fields = list(OrderedDict.fromkeys(fields)) if self.suppress_logging: old_level = int(ytcfg.get("yt", "loglevel")) mylog.setLevel(40) my_storage = {} pbar = get_pbar("Constructing trajectory information", len(self.data_series)) for i, (sto, ds) in enumerate(self.data_series.piter(storage=my_storage)): dd = ds.all_data() idx_field = dd._determine_fields("particle_index")[0] newtags = dd[idx_field].ndarray_view().astype("int64") mask = np.in1d(newtags, indices, assume_unique=True) sorts = np.argsort(newtags[mask]) self.array_indices.append( np.where(np.in1d(indices, newtags, assume_unique=True))[0]) self.masks.append(mask) self.sorts.append(sorts) sto.result_id = ds.parameter_filename sto.result = ds.current_time pbar.update(i) pbar.finish() if self.suppress_logging: mylog.setLevel(old_level) times = [] for fn, time in sorted(my_storage.items()): times.append(time) self.times = self.data_series[0].arr([time for time in times], times[0].units) self.particle_fields = [] # Instantiate fields the caller requested for field in fields: self._get_data(field)
def __init__(self, outputs, indices, fields=None, suppress_logging=False): indices.sort() # Just in case the caller wasn't careful self.field_data = YTFieldData() if isinstance(outputs, DatasetSeries): self.data_series = outputs else: self.data_series = DatasetSeries(outputs) self.masks = [] self.sorts = [] self.array_indices = [] self.indices = indices self.num_indices = len(indices) self.num_steps = len(outputs) self.times = [] self.suppress_logging = suppress_logging if fields is None: fields = [] fields = list(OrderedDict.fromkeys(fields)) if self.suppress_logging: old_level = int(ytcfg.get("yt", "loglevel")) mylog.setLevel(40) fds = {} ds_first = self.data_series[0] dd_first = ds_first.all_data() idx_field = dd_first._determine_fields("particle_index")[0] for field in ("particle_position_%s" % ax for ax in "xyz"): fds[field] = dd_first._determine_fields(field)[0] my_storage = {} pbar = get_pbar("Constructing trajectory information", len(self.data_series)) for i, (sto, ds) in enumerate(self.data_series.piter(storage=my_storage)): dd = ds.all_data() newtags = dd[idx_field].ndarray_view().astype("int64") mask = np.in1d(newtags, indices, assume_unique=True) sort = np.argsort(newtags[mask]) array_indices = np.where( np.in1d(indices, newtags, assume_unique=True))[0] self.array_indices.append(array_indices) self.masks.append(mask) self.sorts.append(sort) pfields = {} for field in ("particle_position_%s" % ax for ax in "xyz"): pfields[field] = dd[fds[field]].ndarray_view()[mask][sort] sto.result_id = ds.parameter_filename sto.result = (ds.current_time, array_indices, pfields) pbar.update(i) pbar.finish() if self.suppress_logging: mylog.setLevel(old_level) times = [] for fn, (time, indices, pfields) in sorted(my_storage.items()): times.append(time) self.times = self.data_series[0].arr([time for time in times], times[0].units) self.particle_fields = [] output_field = np.empty((self.num_indices, self.num_steps)) output_field.fill(np.nan) for field in ("particle_position_%s" % ax for ax in "xyz"): for i, (fn, (time, indices, pfields)) in enumerate(sorted(my_storage.items())): output_field[indices, i] = pfields[field] self.field_data[field] = array_like_field(dd_first, output_field.copy(), fds[field]) self.particle_fields.append(field) # Instantiate fields the caller requested self._get_data(fields)