Beispiel #1
0
    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
                        dimension, periodic):

        period = self.period[:2].copy()
        period[0] = self.period[self.x_axis[dimension]]
        period[1] = self.period[self.y_axis[dimension]]
        if hasattr(period, "in_units"):
            period = period.in_units("code_length").d

        # For a radial axis, px will correspond to longitude and py will
        # correspond to latitude.
        px = data_source["px"]
        pdx = data_source["pdx"]
        py = data_source["py"]
        pdy = data_source["pdy"]
        buff = np.zeros((size[1], size[0]), dtype="f8")
        pixelize_cartesian(
            buff,
            px,
            py,
            pdx,
            pdy,
            data_source[field],
            bounds,
            int(antialias),
            period,
            int(periodic),
        )
        return buff
Beispiel #2
0
    def _ortho_pixelize(self, data_source, field, bounds, size, antialias, dim,
                        periodic):
        # We should be using fcoords
        period = self.period[:2].copy()  # dummy here
        period[0] = self.period[self.x_axis[dim]]
        period[1] = self.period[self.y_axis[dim]]
        if hasattr(period, 'in_units'):
            period = period.in_units("code_length").d

        buff = np.zeros((size[1], size[0]), dtype="f8")

        finfo = self.ds._get_field_info(field)
        nodal_flag = finfo.nodal_flag
        if np.any(nodal_flag):
            nodal_data = get_nodal_data(data_source, field)
            coord = data_source.coord.d
            pixelize_cartesian_nodal(buff, data_source['px'],
                                     data_source['py'], data_source['pz'],
                                     data_source['pdx'], data_source['pdy'],
                                     data_source['pdz'], nodal_data, coord,
                                     bounds, int(antialias), period,
                                     int(periodic))
        else:
            pixelize_cartesian(buff, data_source['px'], data_source['py'],
                               data_source['pdx'],
                               data_source['pdy'], data_source[field], bounds,
                               int(antialias), period, int(periodic))
        return buff
Beispiel #3
0
 def _ortho_pixelize(self, data_source, field, bounds, size, antialias, dim,
                     periodic):
     period = self.period[:2].copy()  # dummy here
     period[0] = self.period[self.x_axis[dim]]
     period[1] = self.period[self.y_axis[dim]]
     if hasattr(period, 'in_units'):
         period = period.in_units("code_length").d
     buff = np.zeros(size, dtype="f8")
     pixelize_cartesian(buff, data_source['px'], data_source['py'],
                        data_source['pdx'], data_source['pdy'],
                        data_source[field], bounds, int(antialias), period,
                        int(periodic))
     return buff
 def _ortho_pixelize(self, data_source, field, bounds, size, antialias, dim,
                     periodic):
     period = self.period[:2].copy()  # dummy here
     period[0] = self.period[self.x_axis[dim]]
     period[1] = self.period[self.y_axis[dim]]
     if hasattr(period, "in_units"):
         period = period.in_units("code_length").d
     buff = np.full(size, np.nan, dtype="float64")
     pixelize_cartesian(
         buff,
         data_source["px"],
         data_source["py"],
         data_source["pdx"],
         data_source["pdy"],
         data_source[field],
         bounds,
         int(antialias),
         period,
         int(periodic),
     )
     return buff
Beispiel #5
0
 def _ortho_pixelize(self, data_source, field, bounds, size, antialias, dim,
                     periodic):
     # We should be using fcoords
     period = self.period[:2].copy()  # dummy here
     period[0] = self.period[self.x_axis[dim]]
     period[1] = self.period[self.y_axis[dim]]
     if hasattr(period, 'in_units'):
         period = period.in_units("code_length").d
     buff = pixelize_cartesian(data_source['px'], data_source['py'],
                               data_source['pdx'], data_source['pdy'],
                               data_source[field], size[0], size[1], bounds,
                               int(antialias), period,
                               int(periodic)).transpose()
     return buff
Beispiel #6
0
    def _ortho_pixelize(self, data_source, field, bounds, size, antialias, dim,
                        periodic):
        from yt.data_objects.construction_data_containers import YTParticleProj
        from yt.data_objects.selection_objects.slices import YTSlice
        from yt.frontends.sph.data_structures import ParticleDataset
        from yt.frontends.stream.data_structures import StreamParticlesDataset

        # We should be using fcoords
        field = data_source._determine_fields(field)[0]
        finfo = data_source.ds.field_info[field]
        period = self.period[:2].copy()  # dummy here
        period[0] = self.period[self.x_axis[dim]]
        period[1] = self.period[self.y_axis[dim]]
        if hasattr(period, "in_units"):
            period = period.in_units("code_length").d

        buff = np.zeros((size[1], size[0]), dtype="f8")
        particle_datasets = (ParticleDataset, StreamParticlesDataset)
        is_sph_field = finfo.is_sph_field

        finfo = self.ds._get_field_info(field)
        if np.any(finfo.nodal_flag):
            nodal_data = get_nodal_data(data_source, field)
            coord = data_source.coord.d
            pixelize_cartesian_nodal(
                buff,
                data_source["px"],
                data_source["py"],
                data_source["pz"],
                data_source["pdx"],
                data_source["pdy"],
                data_source["pdz"],
                nodal_data,
                coord,
                bounds,
                int(antialias),
                period,
                int(periodic),
            )
        elif isinstance(data_source.ds, particle_datasets) and is_sph_field:
            ptype = field[0]
            if ptype == "gas":
                ptype = data_source.ds._sph_ptypes[0]
            px_name = self.axis_name[self.x_axis[dim]]
            py_name = self.axis_name[self.y_axis[dim]]
            ounits = data_source.ds.field_info[field].output_units
            bnds = data_source.ds.arr(bounds, "code_length").tolist()
            if isinstance(data_source, YTParticleProj):
                weight = data_source.weight_field
                le, re = data_source.data_source.get_bbox()
                xa = self.x_axis[dim]
                ya = self.y_axis[dim]
                # If we're not periodic, we need to clip to the boundary edges
                # or we get errors about extending off the edge of the region.
                if not self.ds.periodicity[xa]:
                    le[xa] = max(bounds[0], self.ds.domain_left_edge[xa])
                    re[xa] = min(bounds[1], self.ds.domain_right_edge[xa])
                else:
                    le[xa] = bounds[0]
                    re[xa] = bounds[1]
                if not self.ds.periodicity[ya]:
                    le[ya] = max(bounds[2], self.ds.domain_left_edge[ya])
                    re[ya] = min(bounds[3], self.ds.domain_right_edge[ya])
                else:
                    le[ya] = bounds[2]
                    re[ya] = bounds[3]
                # We actually need to clip these
                proj_reg = data_source.ds.region(
                    left_edge=le,
                    right_edge=re,
                    center=data_source.center,
                    data_source=data_source.data_source,
                )
                proj_reg.set_field_parameter("axis", data_source.axis)
                buff = np.zeros(size, dtype="float64")
                if weight is None:
                    for chunk in proj_reg.chunks([], "io"):
                        data_source._initialize_projected_units([field], chunk)
                        pixelize_sph_kernel_projection(
                            buff,
                            chunk[ptype, px_name].to("code_length"),
                            chunk[ptype, py_name].to("code_length"),
                            chunk[ptype, "smoothing_length"].to("code_length"),
                            chunk[ptype, "mass"].to("code_mass"),
                            chunk[ptype, "density"].to("code_density"),
                            chunk[field].in_units(ounits),
                            bnds,
                            check_period=int(periodic),
                            period=period,
                        )
                    # We use code length here, but to get the path length right
                    # we need to multiply by the conversion factor between
                    # code length and the unit system's length unit
                    default_path_length_unit = data_source.ds.unit_system[
                        "length"]
                    dl_conv = data_source.ds.quan(
                        1.0, "code_length").to(default_path_length_unit)
                    buff *= dl_conv.v
                # if there is a weight field, take two projections:
                # one of field*weight, the other of just weight, and divide them
                else:
                    weight_buff = np.zeros(size, dtype="float64")
                    buff = np.zeros(size, dtype="float64")
                    wounits = data_source.ds.field_info[weight].output_units
                    for chunk in proj_reg.chunks([], "io"):
                        data_source._initialize_projected_units([field], chunk)
                        data_source._initialize_projected_units([weight],
                                                                chunk)
                        pixelize_sph_kernel_projection(
                            buff,
                            chunk[ptype, px_name].to("code_length"),
                            chunk[ptype, py_name].to("code_length"),
                            chunk[ptype, "smoothing_length"].to("code_length"),
                            chunk[ptype, "mass"].to("code_mass"),
                            chunk[ptype, "density"].to("code_density"),
                            chunk[field].in_units(ounits),
                            bnds,
                            check_period=int(periodic),
                            period=period,
                            weight_field=chunk[weight].in_units(wounits),
                        )
                    mylog.info(
                        "Making a fixed resolution buffer of (%s) %d by %d",
                        weight,
                        size[0],
                        size[1],
                    )
                    for chunk in proj_reg.chunks([], "io"):
                        data_source._initialize_projected_units([weight],
                                                                chunk)
                        pixelize_sph_kernel_projection(
                            weight_buff,
                            chunk[ptype, px_name].to("code_length"),
                            chunk[ptype, py_name].to("code_length"),
                            chunk[ptype, "smoothing_length"].to("code_length"),
                            chunk[ptype, "mass"].to("code_mass"),
                            chunk[ptype, "density"].to("code_density"),
                            chunk[weight].in_units(wounits),
                            bnds,
                            check_period=int(periodic),
                            period=period,
                        )
                    normalization_2d_utility(buff, weight_buff)
            elif isinstance(data_source, YTSlice):
                smoothing_style = getattr(self.ds, "sph_smoothing_style",
                                          "scatter")
                normalize = getattr(self.ds, "use_sph_normalization", True)

                if smoothing_style == "scatter":
                    buff = np.zeros(size, dtype="float64")
                    if normalize:
                        buff_den = np.zeros(size, dtype="float64")

                    for chunk in data_source.chunks([], "io"):
                        pixelize_sph_kernel_slice(
                            buff,
                            chunk[ptype, px_name].to("code_length"),
                            chunk[ptype, py_name].to("code_length"),
                            chunk[ptype, "smoothing_length"].to("code_length"),
                            chunk[ptype, "mass"].to("code_mass"),
                            chunk[ptype, "density"].to("code_density"),
                            chunk[field].in_units(ounits),
                            bnds,
                            check_period=int(periodic),
                            period=period,
                        )
                        if normalize:
                            pixelize_sph_kernel_slice(
                                buff_den,
                                chunk[ptype, px_name].to("code_length"),
                                chunk[ptype, py_name].to("code_length"),
                                chunk[ptype,
                                      "smoothing_length"].to("code_length"),
                                chunk[ptype, "mass"].to("code_mass"),
                                chunk[ptype, "density"].to("code_density"),
                                np.ones(chunk[ptype, "density"].shape[0]),
                                bnds,
                                check_period=int(periodic),
                                period=period,
                            )

                    if normalize:
                        normalization_2d_utility(buff, buff_den)

                if smoothing_style == "gather":
                    # Here we find out which axis are going to be the "x" and
                    # "y" axis for the actual visualisation and then we set the
                    # buffer size and bounds to match. The z axis of the plot
                    # is the axis we slice over and the buffer will be of size 1
                    # in that dimension
                    x, y, z = self.x_axis[dim], self.y_axis[dim], dim

                    buff_size = np.zeros(3, dtype="int64")
                    buff_size[x] = size[0]
                    buff_size[y] = size[1]
                    buff_size[z] = 1

                    buff_bounds = np.zeros(6, dtype="float64")
                    buff_bounds[2 * x:2 * x + 2] = bounds[0:2]
                    buff_bounds[2 * y:2 * y + 2] = bounds[2:4]
                    buff_bounds[2 * z] = data_source.coord
                    buff_bounds[2 * z + 1] = data_source.coord

                    # then we do the interpolation
                    buff_temp = np.zeros(buff_size, dtype="float64")

                    fields_to_get = [
                        "particle_position",
                        "density",
                        "mass",
                        "smoothing_length",
                        field[1],
                    ]
                    all_fields = all_data(self.ds,
                                          ptype,
                                          fields_to_get,
                                          kdtree=True)

                    num_neighbors = getattr(self.ds, "num_neighbors", 32)
                    interpolate_sph_grid_gather(
                        buff_temp,
                        all_fields["particle_position"].to("code_length"),
                        buff_bounds,
                        all_fields["smoothing_length"].to("code_length"),
                        all_fields["mass"].to("code_mass"),
                        all_fields["density"].to("code_density"),
                        all_fields[field[1]].in_units(ounits),
                        self.ds.index.kdtree,
                        num_neigh=num_neighbors,
                        use_normalization=normalize,
                    )

                    # We swap the axes back so the axis which was sliced over
                    # is the last axis, as this is the "z" axis of the plots.
                    if z != 2:
                        buff_temp = buff_temp.swapaxes(2, z)
                        if x == 2:
                            x = z
                        else:
                            y = z

                    buff = buff_temp[:, :, 0]

                    # Then we just transpose if the buffer x and y are
                    # different than the plot x and y
                    if y < x:
                        buff = buff.transpose()
            else:
                raise NotImplementedError(
                    "A pixelization routine has not been implemented for %s "
                    "data objects" % str(type(data_source)))
            buff = buff.transpose()
        else:
            pixelize_cartesian(
                buff,
                data_source["px"],
                data_source["py"],
                data_source["pdx"],
                data_source["pdy"],
                data_source[field],
                bounds,
                int(antialias),
                period,
                int(periodic),
            )
        return buff