Beispiel #1
0
    def __init__(self,
                 data_source,
                 x_field,
                 x_n,
                 x_min,
                 x_max,
                 x_log,
                 y_field,
                 y_n,
                 y_min,
                 y_max,
                 y_log,
                 z_field,
                 z_n,
                 z_min,
                 z_max,
                 z_log,
                 weight_field=None):
        super(Profile3D, self).__init__(data_source, weight_field)
        # X
        self.x_field = data_source._determine_fields(x_field)[0]
        self.x_log = x_log
        self.field_info[self.x_field] = \
            self.data_source.ds.field_info[self.x_field]
        x_min, x_max = _sanitize_min_max_units(x_min, x_max,
                                               self.field_info[self.x_field],
                                               self.ds.unit_registry)
        self.x_bins = array_like_field(
            data_source, self._get_bins(x_min, x_max, x_n, x_log),
            self.x_field)
        # Y
        self.y_field = data_source._determine_fields(y_field)[0]
        self.y_log = y_log
        self.field_info[self.y_field] = \
            self.data_source.ds.field_info[self.y_field]
        y_min, y_max = _sanitize_min_max_units(y_min, y_max,
                                               self.field_info[self.y_field],
                                               self.ds.unit_registry)
        self.y_bins = array_like_field(
            data_source, self._get_bins(y_min, y_max, y_n, y_log),
            self.y_field)
        # Z
        self.z_field = data_source._determine_fields(z_field)[0]
        self.z_log = z_log
        self.field_info[self.z_field] = \
            self.data_source.ds.field_info[self.z_field]
        z_min, z_max = _sanitize_min_max_units(z_min, z_max,
                                               self.field_info[self.z_field],
                                               self.ds.unit_registry)
        self.z_bins = array_like_field(
            data_source, self._get_bins(z_min, z_max, z_n, z_log),
            self.z_field)

        self.size = (self.x_bins.size - 1, self.y_bins.size - 1,
                     self.z_bins.size - 1)

        self.bin_fields = (self.x_field, self.y_field, self.z_field)
        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
        self.y = 0.5 * (self.y_bins[1:] + self.y_bins[:-1])
        self.z = 0.5 * (self.z_bins[1:] + self.z_bins[:-1])
Beispiel #2
0
    def __init__(self,
                 data_source,
                 x_field,
                 x_n,
                 x_min,
                 x_max,
                 x_log,
                 weight_field=None,
                 override_bins_x=None):
        super(Profile1D, self).__init__(data_source, weight_field)
        self.x_field = data_source._determine_fields(x_field)[0]
        self.field_info[self.x_field] = \
            self.data_source.ds.field_info[self.x_field]
        self.x_log = x_log
        x_min, x_max = _sanitize_min_max_units(x_min, x_max,
                                               self.field_info[self.x_field],
                                               self.ds.unit_registry)
        self.x_bins = array_like_field(
            data_source, self._get_bins(x_min, x_max, x_n, x_log),
            self.x_field)

        if override_bins_x is not None:
            self.x_bins = array_like_field(data_source, override_bins_x,
                                           self.x_field)

        self.size = (self.x_bins.size - 1, )
        self.bin_fields = (self.x_field, )
        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
 def process_chunk(self, data, field, sample_fields):
     field = data._determine_fields(field)[0]
     ma = array_like_field(data, self._sign * HUGE, field)
     vals = [array_like_field(data, -1, sf) for sf in sample_fields]
     maxi = -1
     if data[field].size > 0:
         maxi = self._func(data[field])
         ma = data[field][maxi]
         vals = [data[sf][maxi] for sf in sample_fields]
     return (ma, ) + tuple(vals)
Beispiel #4
0
def test_array_like_field_output_units():
    ds = load(ISOGAL)
    ad = ds.all_data()
    u1 = ad["particle_mass"].units
    u2 = array_like_field(ad, 1., ("all", "particle_mass")).units
    assert u1 == u2
    assert str(u1) == ds.fields.all.particle_mass.output_units
    u1 = ad['gas', 'x'].units
    u2 = array_like_field(ad, 1., ("gas", "x")).units
    assert u1 == u2
    assert str(u1) == ds.fields.gas.x.units
Beispiel #5
0
 def process_chunk(self, data, field):
     field = data._determine_fields(field)[0]
     ma = array_like_field(data, HUGE, field)
     mx = array_like_field(data, -1, "x")
     my = array_like_field(data, -1, "y")
     mz = array_like_field(data, -1, "z")
     mini = -1
     if data[field].size > 0:
         mini = np.argmin(data[field])
         ma = data[field][mini]
         mx, my, mz = [data[ax][mini] for ax in 'xyz']
     return (ma, mini, mx, my, mz)
 def process_chunk(self, data, field):
     field = data._determine_fields(field)[0]
     ma = array_like_field(data, HUGE, field)
     mx = array_like_field(data, -1, "x")
     my = array_like_field(data, -1, "y")
     mz = array_like_field(data, -1, "z")
     mini = -1
     if data[field].size > 0:
         mini = np.argmin(data[field])
         ma = data[field][mini]
         mx, my, mz = [data[ax][mini] for ax in 'xyz']
     return (ma, mini, mx, my, mz)
 def process_chunk(self, data, fields, non_zero):
     vals = []
     for field in fields:
         field = data._determine_fields(field)[0]
         fd = data[field]
         if non_zero: fd = fd[fd > 0.0]
         if fd.size > 0:
             vals += [fd.min(), fd.max()]
         else:
             vals += [array_like_field(data, HUGE, field),
                      array_like_field(data, -HUGE, field)]
     return vals
 def process_chunk(self, data, fields, non_zero):
     vals = []
     for field in fields:
         field = data._determine_fields(field)[0]
         fd = data[field]
         if non_zero: fd = fd[fd > 0.0]
         if fd.size > 0:
             vals += [fd.min(), fd.max()]
         else:
             vals += [array_like_field(data, HUGE, field),
                      array_like_field(data, -HUGE, field)]
     return vals
 def process_chunk(self, data, field):
     axis_names = data.ds.coordinates.axis_name
     field = data._determine_fields(field)[0]
     ma = array_like_field(data, -HUGE, field)
     mx = array_like_field(data, -1, axis_names[0])
     my = array_like_field(data, -1, axis_names[1])
     mz = array_like_field(data, -1, axis_names[2])
     maxi = -1
     if data[field].size > 0:
         maxi = np.argmax(data[field])
         ma = data[field][maxi]
         mx, my, mz = [data[ax][maxi] for ax in (axis_names[0],
                                                 axis_names[1],
                                                 axis_names[2])]
     return (ma, maxi, mx, my, mz)
Beispiel #10
0
 def process_chunk(self, data, field):
     axis_names = data.ds.coordinates.axis_name
     field = data._determine_fields(field)[0]
     ma = array_like_field(data, -HUGE, field)
     mx = array_like_field(data, -1, axis_names[0])
     my = array_like_field(data, -1, axis_names[1])
     mz = array_like_field(data, -1, axis_names[2])
     maxi = -1
     if data[field].size > 0:
         maxi = np.argmax(data[field])
         ma = data[field][maxi]
         mx, my, mz = [
             data[ax][maxi]
             for ax in (axis_names[0], axis_names[1], axis_names[2])
         ]
     return (ma, maxi, mx, my, mz)
Beispiel #11
0
 def _get_data(self, field):
     """
     Get a field to include in the trajectory collection.
     The trajectory collection itself is a dict of 2D numpy arrays,
     with shape (num_indices, num_steps)
     """
     if field not in self.field_data:
         if self.suppress_logging:
             old_level = int(ytcfg.get("yt", "loglevel"))
             mylog.setLevel(40)
         ds_first = self.data_series[0]
         dd_first = ds_first.all_data()
         fd = dd_first._determine_fields(field)[0]
         if field not in self.particle_fields:
             if self.data_series[0].field_info[fd].particle_type:
                 self.particle_fields.append(field)
         particles = np.empty((self.num_indices, self.num_steps))
         particles[:] = np.nan
         step = int(0)
         pbar = get_pbar("Generating field %s in trajectories." % (field),
                         self.num_steps)
         my_storage = {}
         for i, (sto, ds) in enumerate(
                 self.data_series.piter(storage=my_storage)):
             mask = self.masks[i]
             sort = self.sorts[i]
             if field in self.particle_fields:
                 # This is easy... just get the particle fields
                 dd = ds.all_data()
                 pfield = dd[fd].ndarray_view()[mask][sort]
             else:
                 # This is hard... must loop over grids
                 pfield = np.zeros((self.num_indices))
                 x = self["particle_position_x"][:, step].ndarray_view()
                 y = self["particle_position_y"][:, step].ndarray_view()
                 z = self["particle_position_z"][:, step].ndarray_view()
                 # This will fail for non-grid index objects
                 particle_grids, particle_grid_inds = ds.index._find_points(
                     x, y, z)
                 for grid in particle_grids:
                     cube = grid.retrieve_ghost_zones(1, [fd])
                     CICSample_3(
                         x, y, z, pfield, self.num_indices, cube[fd],
                         np.array(grid.LeftEdge).astype(np.float64),
                         np.array(grid.ActiveDimensions).astype(np.int32),
                         grid.dds[0])
             sto.result_id = ds.parameter_filename
             sto.result = (self.array_indices[i], pfield)
             pbar.update(step)
             step += 1
         pbar.finish()
         for i, (fn, (indices,
                      pfield)) in enumerate(sorted(my_storage.items())):
             particles[indices, i] = pfield
         self.field_data[field] = array_like_field(dd_first, particles, fd)
         if self.suppress_logging:
             mylog.setLevel(old_level)
     return self.field_data[field]
 def _get_data(self, field):
     """
     Get a field to include in the trajectory collection.
     The trajectory collection itself is a dict of 2D numpy arrays,
     with shape (num_indices, num_steps)
     """
     if field not in self.field_data:
         if self.suppress_logging:
             old_level = int(ytcfg.get("yt","loglevel"))
             mylog.setLevel(40)
         ds_first = self.data_series[0]
         dd_first = ds_first.all_data()
         fd = dd_first._determine_fields(field)[0]
         if field not in self.particle_fields:
             if self.data_series[0].field_info[fd].particle_type:
                 self.particle_fields.append(field)
         particles = np.empty((self.num_indices,self.num_steps))
         particles[:] = np.nan
         step = int(0)
         pbar = get_pbar("Generating field %s in trajectories." % (field), self.num_steps)
         my_storage={}
         for i, (sto, ds) in enumerate(self.data_series.piter(storage=my_storage)):
             mask = self.masks[i]
             sort = self.sorts[i]
             if field in self.particle_fields:
                 # This is easy... just get the particle fields
                 dd = ds.all_data()
                 pfield = dd[fd].ndarray_view()[mask][sort]
             else:
                 # This is hard... must loop over grids
                 pfield = np.zeros((self.num_indices))
                 x = self["particle_position_x"][:,step].ndarray_view()
                 y = self["particle_position_y"][:,step].ndarray_view()
                 z = self["particle_position_z"][:,step].ndarray_view()
                 # This will fail for non-grid index objects
                 particle_grids, particle_grid_inds = ds.index._find_points(x,y,z)
                 for grid in particle_grids:
                     cube = grid.retrieve_ghost_zones(1, [fd])
                     CICSample_3(x,y,z,pfield,
                                 self.num_indices,
                                 cube[fd],
                                 np.array(grid.LeftEdge).astype(np.float64),
                                 np.array(grid.ActiveDimensions).astype(np.int32),
                                 grid.dds[0])
             sto.result_id = ds.parameter_filename
             sto.result = (self.array_indices[i], pfield)
             pbar.update(step)
             step += 1
         pbar.finish()
         for i, (fn, (indices, pfield)) in enumerate(sorted(my_storage.items())):
             particles[indices,i] = pfield
         self.field_data[field] = array_like_field(dd_first, particles, fd)
         if self.suppress_logging:
             mylog.setLevel(old_level)
     return self.field_data[field]
Beispiel #13
0
    def _finalize_storage(self, fields, temp_storage):
        # We use our main comm here
        # This also will fill _field_data

        for i, field in enumerate(fields):
            # q values are returned as q * weight but we want just q
            temp_storage.qvalues[..., i][temp_storage.used] /= \
              temp_storage.weight_values[temp_storage.used]

        # get the profile data from all procs
        all_store = {self.comm.rank: temp_storage}
        all_store = self.comm.par_combine_object(all_store,
                                                 "join",
                                                 datatype="dict")

        all_val = np.zeros_like(temp_storage.values)
        all_mean = np.zeros_like(temp_storage.mvalues)
        all_std = np.zeros_like(temp_storage.qvalues)
        all_weight = np.zeros_like(temp_storage.weight_values)
        all_used = np.zeros_like(temp_storage.used, dtype="bool")

        # Combine the weighted mean and standard deviation from each processor.
        # For two samples with total weight, mean, and standard deviation
        # given by w, m, and s, their combined mean and standard deviation are:
        # m12 = (m1 * w1 + m2 * w2) / (w1 + w2)
        # s12 = (m1 * (s1**2 + (m1 - m12)**2) +
        #        m2 * (s2**2 + (m2 - m12)**2)) / (w1 + w2)
        # Here, the mvalues are m and the qvalues are s**2.
        for p in sorted(all_store.keys()):
            all_used += all_store[p].used
            old_mean = all_mean.copy()
            old_weight = all_weight.copy()
            all_weight[all_store[p].used] += \
              all_store[p].weight_values[all_store[p].used]
            for i, field in enumerate(fields):
                all_val[..., i][all_store[p].used] += \
                  all_store[p].values[..., i][all_store[p].used]

                all_mean[..., i][all_store[p].used] = \
                  (all_mean[..., i] * old_weight +
                   all_store[p].mvalues[..., i] *
                   all_store[p].weight_values)[all_store[p].used] / \
                   all_weight[all_store[p].used]

                all_std[..., i][all_store[p].used] = \
                  (old_weight * (all_std[..., i] +
                                 (old_mean[..., i] - all_mean[..., i])**2) +
                   all_store[p].weight_values *
                   (all_store[p].qvalues[..., i] +
                    (all_store[p].mvalues[..., i] -
                     all_mean[..., i])**2))[all_store[p].used] / \
                    all_weight[all_store[p].used]

        all_std = np.sqrt(all_std)
        del all_store
        self.used = all_used
        blank = ~all_used

        self.weight = all_weight
        self.weight[blank] = 0.0

        for i, field in enumerate(fields):
            if self.weight_field is None:
                self.field_data[field] = \
                  array_like_field(self.data_source,
                                   all_val[...,i], field)
            else:
                self.field_data[field] = \
                  array_like_field(self.data_source,
                                   all_mean[...,i], field)
                self.standard_deviation[field] = \
                  array_like_field(self.data_source,
                                   all_std[...,i], field)
                self.standard_deviation[field][blank] = 0.0
            self.field_data[field][blank] = 0.0
            self.field_units[field] = self.field_data[field].units
            if isinstance(field, tuple):
                self.field_map[field[1]] = field
            else:
                self.field_map[field] = field
Beispiel #14
0
    def __init__(self,
                 outputs,
                 indices,
                 fields=None,
                 suppress_logging=False,
                 ptype=None):

        indices.sort()  # Just in case the caller wasn't careful
        self.field_data = YTFieldData()
        self.data_series = outputs
        self.masks = []
        self.sorts = []
        self.array_indices = []
        self.indices = indices
        self.num_indices = len(indices)
        self.num_steps = len(outputs)
        self.times = []
        self.suppress_logging = suppress_logging
        self.ptype = ptype

        if fields is None:
            fields = []
        fields = list(OrderedDict.fromkeys(fields))

        if self.suppress_logging:
            old_level = int(ytcfg.get("yt", "loglevel"))
            mylog.setLevel(40)
        ds_first = self.data_series[0]
        dd_first = ds_first.all_data()

        fds = {}
        for field in (
                "particle_index",
                "particle_position_x",
                "particle_position_y",
                "particle_position_z",
        ):
            fds[field] = self._get_full_field_name(field)[0]

        my_storage = {}
        pbar = get_pbar("Constructing trajectory information",
                        len(self.data_series))
        for i, (sto,
                ds) in enumerate(self.data_series.piter(storage=my_storage)):
            dd = ds.all_data()
            newtags = dd[fds["particle_index"]].d.astype("int64")
            mask = np.in1d(newtags, indices, assume_unique=True)
            sort = np.argsort(newtags[mask])
            array_indices = np.where(
                np.in1d(indices, newtags, assume_unique=True))[0]
            self.array_indices.append(array_indices)
            self.masks.append(mask)
            self.sorts.append(sort)

            pfields = {}
            for field in (f"particle_position_{ax}" for ax in "xyz"):
                pfields[field] = dd[fds[field]].ndarray_view()[mask][sort]

            sto.result_id = ds.parameter_filename
            sto.result = (ds.current_time, array_indices, pfields)
            pbar.update(i)
        pbar.finish()

        if self.suppress_logging:
            mylog.setLevel(old_level)

        sorted_storage = sorted(my_storage.items())
        times = [time for _fn, (time, *_) in sorted_storage]
        self.times = self.data_series[0].arr(times, times[0].units)

        self.particle_fields = []
        output_field = np.empty((self.num_indices, self.num_steps))
        output_field.fill(np.nan)
        for field in (f"particle_position_{ax}" for ax in "xyz"):
            for i, (_fn, (_time, indices,
                          pfields)) in enumerate(sorted_storage):
                try:
                    # This will fail if particles ids are
                    # duplicate. This is due to the fact that the rhs
                    # would then have a different shape as the lhs
                    output_field[indices, i] = pfields[field]
                except ValueError as e:
                    raise YTIllDefinedParticleData(
                        "This dataset contains duplicate particle indices!"
                    ) from e
            self.field_data[field] = array_like_field(dd_first,
                                                      output_field.copy(),
                                                      fds[field])
            self.particle_fields.append(field)

        # Instantiate fields the caller requested
        self._get_data(fields)
Beispiel #15
0
    def _get_data(self, fields):
        """
        Get a list of fields to include in the trajectory collection.
        The trajectory collection itself is a dict of 2D numpy arrays,
        with shape (num_indices, num_steps)
        """

        missing_fields = [
            field for field in fields if field not in self.field_data
        ]
        if not missing_fields:
            return

        if self.suppress_logging:
            old_level = int(ytcfg.get("yt", "loglevel"))
            mylog.setLevel(40)
        ds_first = self.data_series[0]
        dd_first = ds_first.all_data()

        fds = {}
        new_particle_fields = []
        for field in missing_fields:
            fds[field] = dd_first._determine_fields(field)[0]
            if field not in self.particle_fields:
                if self.data_series[0]._get_field_info(
                        *fds[field]).particle_type:
                    self.particle_fields.append(field)
                    new_particle_fields.append(field)

        grid_fields = [
            field for field in missing_fields
            if field not in self.particle_fields
        ]
        step = int(0)
        pbar = get_pbar(
            f"Generating [{', '.join(missing_fields)}] fields in trajectories",
            self.num_steps,
        )
        my_storage = {}

        for i, (sto,
                ds) in enumerate(self.data_series.piter(storage=my_storage)):
            mask = self.masks[i]
            sort = self.sorts[i]
            pfield = {}

            if new_particle_fields:  # there's at least one particle field
                dd = ds.all_data()
                for field in new_particle_fields:
                    # This is easy... just get the particle fields
                    pfield[field] = dd[fds[field]].d[mask][sort]

            if grid_fields:
                # This is hard... must loop over grids
                for field in grid_fields:
                    pfield[field] = np.zeros(self.num_indices)
                x = self["particle_position_x"][:, step].d
                y = self["particle_position_y"][:, step].d
                z = self["particle_position_z"][:, step].d
                particle_grids, particle_grid_inds = ds.index._find_points(
                    x, y, z)

                # This will fail for non-grid index objects
                for grid in particle_grids:
                    cube = grid.retrieve_ghost_zones(1, grid_fields)
                    for field in grid_fields:
                        CICSample_3(
                            x,
                            y,
                            z,
                            pfield[field],
                            self.num_indices,
                            cube[fds[field]],
                            np.array(grid.LeftEdge).astype(np.float64),
                            np.array(grid.ActiveDimensions).astype(np.int32),
                            grid.dds[0],
                        )
            sto.result_id = ds.parameter_filename
            sto.result = (self.array_indices[i], pfield)
            pbar.update(step)
            step += 1
        pbar.finish()

        output_field = np.empty((self.num_indices, self.num_steps))
        output_field.fill(np.nan)
        for field in missing_fields:
            fd = fds[field]
            for i, (_fn, (indices,
                          pfield)) in enumerate(sorted(my_storage.items())):
                output_field[indices, i] = pfield[field]
            self.field_data[field] = array_like_field(dd_first,
                                                      output_field.copy(), fd)

        if self.suppress_logging:
            mylog.setLevel(old_level)
Beispiel #16
0
    def __init__(self, outputs, indices, fields=None, suppress_logging=False):

        indices.sort() # Just in case the caller wasn't careful
        self.field_data = YTFieldData()
        self.data_series = outputs
        self.masks = []
        self.sorts = []
        self.array_indices = []
        self.indices = indices
        self.num_indices = len(indices)
        self.num_steps = len(outputs)
        self.times = []
        self.suppress_logging = suppress_logging

        if fields is None: fields = []
        fields = list(OrderedDict.fromkeys(fields))

        if self.suppress_logging:
            old_level = int(ytcfg.get("yt","loglevel"))
            mylog.setLevel(40)
        
        fds = {}
        ds_first = self.data_series[0]
        dd_first = ds_first.all_data()
        idx_field = dd_first._determine_fields("particle_index")[0]
        for field in ("particle_position_%s" % ax for ax in "xyz"):
            fds[field] = dd_first._determine_fields(field)[0]

        my_storage = {}
        pbar = get_pbar("Constructing trajectory information", len(self.data_series))
        for i, (sto, ds) in enumerate(self.data_series.piter(storage=my_storage)):
            dd = ds.all_data()
            newtags = dd[idx_field].d.astype("int64")
            mask = np.in1d(newtags, indices, assume_unique=True)
            sort = np.argsort(newtags[mask])
            array_indices = np.where(np.in1d(indices, newtags, assume_unique=True))[0]
            self.array_indices.append(array_indices)
            self.masks.append(mask)
            self.sorts.append(sort)

            pfields = {}
            for field in ("particle_position_%s" % ax for ax in "xyz"):
                pfields[field] = dd[fds[field]].ndarray_view()[mask][sort]

            sto.result_id = ds.parameter_filename
            sto.result = (ds.current_time, array_indices, pfields)
            pbar.update(i)
        pbar.finish()

        if self.suppress_logging:
            mylog.setLevel(old_level)

        times = []
        for fn, (time, indices, pfields) in sorted(my_storage.items()):
            times.append(time)
        self.times = self.data_series[0].arr([time for time in times], times[0].units)

        self.particle_fields = []
        output_field = np.empty((self.num_indices, self.num_steps))
        output_field.fill(np.nan)
        for field in ("particle_position_%s" % ax for ax in "xyz"):
            for i, (fn, (time, indices, pfields)) in enumerate(sorted(my_storage.items())):
                output_field[indices, i] = pfields[field]
            self.field_data[field] = array_like_field(
                dd_first, output_field.copy(), fds[field])
            self.particle_fields.append(field)

        # Instantiate fields the caller requested
        self._get_data(fields)
Beispiel #17
0
def test_array_like_field():
    ds = fake_random_ds(4, particles=64)
    ad = ds.all_data()
    u1 = ad["particle_mass"].units
    u2 = array_like_field(ad, 1., ("all", "particle_mass")).units
    assert u1 == u2