def _calculate_simulation_bounds(self):
        """
        Figure out the starting and stopping time and redshift for the simulation.
        """

        if 'StopCycle' in self.parameters:
            self.stop_cycle = self.parameters['StopCycle']

        # Convert initial/final redshifts to times.
        if self.cosmological_simulation:
            self.initial_time = self.cosmology.t_from_z(self.initial_redshift)
            self.initial_time.units.registry = self.unit_registry
            self.final_time = self.cosmology.t_from_z(self.final_redshift)
            self.final_time.units.registry = self.unit_registry

        # If not a cosmology simulation, figure out the stopping criteria.
        else:
            if 'InitialTime' in self.parameters:
                self.initial_time = self.quan(self.parameters['InitialTime'], "code_time")
            else:
                self.initial_time = self.quan(0., "code_time")

            if 'StopTime' in self.parameters:
                self.final_time = self.quan(self.parameters['StopTime'], "code_time")
            else:
                self.final_time = None
            if not ('StopTime' in self.parameters or
                    'StopCycle' in self.parameters):
                raise NoStoppingCondition(self.parameter_filename)
            if self.final_time is None:
                mylog.warn(
                    "Simulation %s has no stop time set, stopping condition " +
                    "will be based only on cycles.",
                    self.parameter_filename)
    def _calculate_simulation_bounds(self):
        """
        Figure out the starting and stopping time and redshift for the simulation.
        """

        if 'StopCycle' in self.parameters:
            self.stop_cycle = self.parameters['StopCycle']

        # Convert initial/final redshifts to times.
        if self.cosmological_simulation:
            self.initial_time = self.cosmology.t_from_z(self.initial_redshift)
            self.initial_time.units.registry = self.unit_registry
            self.final_time = self.cosmology.t_from_z(self.final_redshift)
            self.final_time.units.registry = self.unit_registry

        # If not a cosmology simulation, figure out the stopping criteria.
        else:
            if 'InitialTime' in self.parameters:
                self.initial_time = self.quan(self.parameters['InitialTime'],
                                              "code_time")
            else:
                self.initial_time = self.quan(0., "code_time")

            if 'StopTime' in self.parameters:
                self.final_time = self.quan(self.parameters['StopTime'],
                                            "code_time")
            else:
                self.final_time = None
            if not ('StopTime' in self.parameters
                    or 'StopCycle' in self.parameters):
                raise NoStoppingCondition(self.parameter_filename)
            if self.final_time is None:
                mylog.warn(
                    "Simulation %s has no stop time set, stopping condition " +
                    "will be based only on cycles.", self.parameter_filename)
Beispiel #3
0
    def __init__(self,
                 base_region,
                 ds,
                 oct_handler,
                 over_refine_factor=1,
                 num_ghost_zones=0):
        self._over_refine_factor = over_refine_factor
        self._num_zones = 1 << (over_refine_factor)
        self.field_data = YTFieldData()
        self.field_parameters = {}
        self.ds = ds
        self.oct_handler = oct_handler
        self._last_mask = None
        self._last_selector_id = None
        self._current_particle_type = "io"
        self._current_fluid_type = self.ds.default_fluid_type
        self.base_region = base_region
        self.base_selector = base_region.selector

        self._num_ghost_zones = num_ghost_zones

        if num_ghost_zones > 0:
            if not all(ds.periodicity):
                mylog.warn(
                    "Ghost zones will wrongly assume the domain to be periodic."
                )
            base_grid = StreamOctreeSubset(base_region, ds, oct_handler,
                                           over_refine_factor)
            self._base_grid = base_grid
    def _calculate_cycle_outputs(self):
        """
        Calculate cycle outputs.
        """

        mylog.warn(
            'Calculating cycle outputs.  Dataset times will be unavailable.')

        if self.stop_cycle is None or \
            not 'CycleSkipDataDump' in self.parameters or \
            self.parameters['CycleSkipDataDump'] <= 0.0:
            return []

        self.all_time_outputs = []
        index = 0
        for cycle in range(0, self.stop_cycle + 1,
                           self.parameters['CycleSkipDataDump']):
            filename = os.path.join(
                self.parameters['GlobalDir'],
                "%s%04d" % (self.parameters['DataDumpDir'], index),
                "%s%04d" % (self.parameters['DataDumpName'], index))

            output = {'index': index, 'filename': filename, 'cycle': cycle}
            self.all_time_outputs.append(output)
            index += 1
Beispiel #5
0
 def print_key_parameters(self):
     for a in ["current_time", "domain_dimensions", "domain_left_edge",
               "domain_right_edge", "cosmological_simulation"]:
         v = getattr(self, a)
         if v is not None: mylog.info("Parameters: %-25s = %s", a, v)
     if hasattr(self, "cosmological_simulation") and \
        getattr(self, "cosmological_simulation"):
         for a in ["current_redshift", "omega_lambda", "omega_matter",
                   "hubble_constant"]:
             v = getattr(self, a)
             if v is not None: mylog.info("Parameters: %-25s = %s", a, v)
     mylog.warn("Geometric data selection not available for this dataset type.")
Beispiel #6
0
def enable_plugins():
    """Forces the plugins file to be parsed.

    This plugin file is a means of creating custom fields, quantities,
    data objects, colormaps, and other code classes and objects to be used
    in yt scripts without modifying the yt source directly.

    The file must be located at ``$HOME/.config/yt/my_plugins.py``.

    Warning: when you use this function, your script will only be reproducible
    if you also provide the ``my_plugins.py`` file.
    """
    import yt
    from yt.fields.my_plugin_fields import my_plugins_fields
    from yt.config import ytcfg, CONFIG_DIR
    my_plugin_name = ytcfg.get("yt", "pluginfilename")

    # In the following order if pluginfilename is: an absolute path, located in
    # the CONFIG_DIR, located in an obsolete config dir.
    _fn = None
    old_config_dir = os.path.join(os.path.expanduser('~'), '.yt')
    for base_prefix in ('', CONFIG_DIR, old_config_dir):
        if os.path.isfile(os.path.join(base_prefix, my_plugin_name)):
            _fn = os.path.join(base_prefix, my_plugin_name)
            break

    if _fn is not None and os.path.isfile(_fn):
        if _fn.startswith(old_config_dir):
            mylog.warn(
                'Your plugin file is located in a deprecated directory. '
                'Please move it from %s to %s',
                os.path.join(old_config_dir, my_plugin_name),
                os.path.join(CONFIG_DIR, my_plugin_name))
        mylog.info("Loading plugins from %s", _fn)
        ytdict = yt.__dict__
        execdict = ytdict.copy()
        execdict['add_field'] = my_plugins_fields.add_field
        localdict = {}
        with open(_fn) as f:
            code = compile(f.read(), _fn, 'exec')
            exec(code, execdict, localdict)
        ytnamespace = list(ytdict.keys())
        for k in localdict.keys():
            if k not in ytnamespace:
                if callable(localdict[k]):
                    setattr(yt, k, localdict[k])
    def _calculate_cycle_outputs(self):
        """
        Calculate cycle outputs.
        """

        mylog.warn('Calculating cycle outputs.  Dataset times will be unavailable.')

        if self.stop_cycle is None or \
            not 'CycleSkipDataDump' in self.parameters or \
            self.parameters['CycleSkipDataDump'] <= 0.0: return []

        self.all_time_outputs = []
        index = 0
        for cycle in range(0, self.stop_cycle+1, self.parameters['CycleSkipDataDump']):
            filename = os.path.join(self.parameters['GlobalDir'],
                                    "%s%04d" % (self.parameters['DataDumpDir'], index),
                                    "%s%04d" % (self.parameters['DataDumpName'], index))

            output = {'index': index, 'filename': filename, 'cycle': cycle}
            self.all_time_outputs.append(output)
            index += 1
Beispiel #8
0
    def make_light_ray(self,
                       seed=None,
                       periodic=True,
                       left_edge=None,
                       right_edge=None,
                       min_level=None,
                       start_position=None,
                       end_position=None,
                       trajectory=None,
                       fields=None,
                       setup_function=None,
                       solution_filename=None,
                       data_filename=None,
                       get_los_velocity=None,
                       use_peculiar_velocity=True,
                       redshift=None,
                       field_parameters=None,
                       njobs=-1):
        """
        Actually generate the LightRay by traversing the desired dataset.

        A light ray consists of a list of field values for cells
        intersected by the ray and the path length of the ray through
        those cells. Light ray data must be written out to an hdf5 file.

        **Parameters**

        :seed: optional, int

            Seed for the random number generator.
            Default: None.

        :periodic: optional, bool

            If True, ray trajectories will make use of periodic
            boundaries.  If False, ray trajectories will not be
            periodic.
            Default : True.

        :left_edge: optional, iterable of floats or YTArray

            The left corner of the region in which rays are to be
            generated.  If None, the left edge will be that of the
            domain.  If specified without units, it is assumed to
            be in code units.
            Default: None.

        :right_edge: optional, iterable of floats or YTArray

            The right corner of the region in which rays are to be
            generated.  If None, the right edge will be that of the
            domain.  If specified without units, it is assumed to
            be in code units.
            Default: None.

        :min_level: optional, int

            The minimum refinement level of the spatial region in which
            the ray passes.  This can be used with zoom-in simulations
            where the high resolution region does not keep a constant
            geometry.
            Default: None.

        :start_position: optional, iterable of floats or YTArray.

            Used only if creating a light ray from a single dataset.
            The coordinates of the starting position of the ray.
            If specified without units, it is assumed to be in code units.
            Default: None.

        :end_position: optional, iterable of floats or YTArray.

            Used only if creating a light ray from a single dataset.
            The coordinates of the ending position of the ray.
            If specified without units, it is assumed to be in code units.
            Default: None.

        :trajectory: optional, list of floats

            Used only if creating a light ray from a single dataset.
            The (r, theta, phi) direction of the light ray.  Use either
            end_position or trajectory, not both.
            Default: None.

        :fields: optional, list

            A list of fields for which to get data.
            Default: None.

        :setup_function: optional, callable, accepts a ds

            This function will be called on each dataset that is loaded
            to create the light ray.  For, example, this can be used to
            add new derived fields.
            Default: None.

        :solution_filename: optional, string

            Path to a text file where the trajectories of each
            subray is written out.
            Default: None.

        :data_filename: optional, string

            Path to output file for ray data.
            Default: None.

        :use_peculiar_velocity: optional, bool

            If True, the peculiar velocity along the ray will be sampled for
            calculating the effective redshift combining the cosmological
            redshift and the doppler redshift.
            Default: True.

        :redshift: optional, float

            Used with light rays made from single datasets to specify a
            starting redshift for the ray.  If not used, the starting
            redshift will be 0 for a non-cosmological dataset and
            the dataset redshift for a cosmological dataset.
            Default: None.

        :field_parameters: optional, dict
            Used to set field parameters in light rays. For example,
            if the 'bulk_velocity' field parameter is set, the relative
            velocities used to calculate peculiar velocity will be adjusted
            accordingly.
            Default: None.

        :njobs: optional, int

            The number of parallel jobs over which the segments will
            be split.  Choose -1 for one processor per segment.
            Default: -1.

        **Examples**

        Make a light ray from multiple datasets:

        >>> import yt
        >>> from trident import LightRay
        >>> my_ray = LightRay("enzo_tiny_cosmology/32Mpc_32.enzo", "Enzo",
        ...                   0., 0.1, time_data=False)
        ...
        >>> my_ray.make_light_ray(seed=12345,
        ...                       solution_filename="solution.txt",
        ...                       data_filename="my_ray.h5",
        ...                       fields=["temperature", "density"],
        ...                       use_peculiar_velocity=True)

        Make a light ray from a single dataset:

        >>> import yt
        >>> from trident import LightRay
        >>> my_ray = LightRay("IsolatedGalaxy/galaxy0030/galaxy0030")
        ...
        >>> my_ray.make_light_ray(start_position=[0., 0., 0.],
        ...                       end_position=[1., 1., 1.],
        ...                       solution_filename="solution.txt",
        ...                       data_filename="my_ray.h5",
        ...                       fields=["temperature", "density"],
        ...                       use_peculiar_velocity=True)

        """

        if self.simulation_type is None:
            domain = self.ds
        else:
            domain = self.simulation

        assumed_units = "code_length"
        if left_edge is None:
            left_edge = domain.domain_left_edge
        elif not hasattr(left_edge, 'units'):
            left_edge = domain.arr(left_edge, assumed_units)
        left_edge.convert_to_units('unitary')

        if right_edge is None:
            right_edge = domain.domain_right_edge
        elif not hasattr(right_edge, 'units'):
            right_edge = domain.arr(right_edge, assumed_units)
        right_edge.convert_to_units('unitary')

        if start_position is not None:
            if hasattr(start_position, 'units'):
                start_position = start_position
            else:
                start_position = self.ds.arr(start_position, assumed_units)
            start_position.convert_to_units('unitary')

        if end_position is not None:
            if hasattr(end_position, 'units'):
                end_position = end_position
            else:
                end_position = self.ds.arr(end_position, assumed_units)
            end_position.convert_to_units('unitary')

        if get_los_velocity is not None:
            use_peculiar_velocity = get_los_velocity
            mylog.warn("'get_los_velocity' kwarg is deprecated. " + \
                       "Use 'use_peculiar_velocity' instead.")

        # Calculate solution.
        self._calculate_light_ray_solution(seed=seed,
                                           left_edge=left_edge,
                                           right_edge=right_edge,
                                           min_level=min_level,
                                           periodic=periodic,
                                           start_position=start_position,
                                           end_position=end_position,
                                           trajectory=trajectory,
                                           filename=solution_filename)

        if field_parameters is None:
            field_parameters = {}

        # Initialize data structures.
        self._data = {}
        # temperature field is automatically added to fields
        if fields is None: fields = []
        if (('gas', 'temperature') not in fields) and \
           ('temperature' not in fields):
            fields.append(('gas', 'temperature'))
        data_fields = fields[:]
        all_fields = fields[:]
        all_fields.extend(['l', 'dl', 'redshift'])
        all_fields.extend(['x', 'y', 'z'])
        data_fields.extend(['x', 'y', 'z'])
        if use_peculiar_velocity:
            all_fields.extend([
                'relative_velocity_x', 'relative_velocity_y',
                'relative_velocity_z', 'velocity_los', 'redshift_eff',
                'redshift_dopp'
            ])
            data_fields.extend([
                'relative_velocity_x', 'relative_velocity_y',
                'relative_velocity_z'
            ])

        all_ray_storage = {}
        for my_storage, my_segment in parallel_objects(self.light_ray_solution,
                                                       storage=all_ray_storage,
                                                       njobs=njobs):

            # In case of simple rays, use the already loaded dataset: self.ds,
            # otherwise, load dataset for segment.
            if self.ds is None:
                ds = load(my_segment['filename'], **self.load_kwargs)
            else:
                ds = self.ds

            if redshift is not None:
                if ds.cosmological_simulation and redshift != ds.current_redshift:
                    mylog.warn(
                        "Generating light ray with different redshift than " +
                        "the dataset itself.")
                my_segment["redshift"] = redshift

            if setup_function is not None:
                setup_function(ds)

            if not ds.cosmological_simulation:
                next_redshift = my_segment["redshift"]
            elif self.near_redshift == self.far_redshift:
                if isinstance(my_segment["traversal_box_fraction"], YTArray) and \
                  not my_segment["traversal_box_fraction"].units.is_dimensionless:
                    segment_length = \
                      my_segment["traversal_box_fraction"].in_units("Mpccm / h")
                else:
                    segment_length = my_segment["traversal_box_fraction"] * \
                      ds.domain_width[0].in_units("Mpccm / h")
                next_redshift = my_segment["redshift"] - \
                  self._deltaz_forward(my_segment["redshift"],
                                       segment_length)
            elif my_segment.get("next", None) is None:
                next_redshift = self.near_redshift
            else:
                next_redshift = my_segment['next']['redshift']

            # Make sure start, end, left, right
            # are using the dataset's unit system.
            my_start = ds.arr(my_segment['start'])
            my_end = ds.arr(my_segment['end'])
            my_left = ds.arr(left_edge)
            my_right = ds.arr(right_edge)
            mylog.info("Getting segment at z = %s: %s to %s." %
                       (my_segment['redshift'], my_start, my_end))

            # Break periodic ray into non-periodic segments.
            sub_segments = periodic_ray(my_start,
                                        my_end,
                                        left=my_left,
                                        right=my_right)

            # Prepare data structure for subsegment.
            sub_data = {}
            # Put supplementary data that we want communicated across
            # processors in here.
            sub_data['extra_data'] = {}
            sub_data['extra_data']['segment_redshift'] = \
              my_segment['redshift']
            sub_data['extra_data']['unique_identifier'] = \
              ds.unique_identifier
            for field in all_fields:
                sub_data[field] = []

            # Get data for all subsegments in segment.
            for sub_segment in sub_segments:
                mylog.info("Getting subsegment: %s to %s." %
                           (list(sub_segment[0]), list(sub_segment[1])))
                sub_ray = ds.ray(sub_segment[0], sub_segment[1])
                for key, val in field_parameters.items():
                    sub_ray.set_field_parameter(key, val)
                asort = np.argsort(sub_ray["t"])
                sub_data['l'].extend(
                    sub_ray['t'][asort] *
                    vector_length(sub_ray.start_point, sub_ray.end_point))
                sub_data['dl'].extend(
                    sub_ray['dts'][asort] *
                    vector_length(sub_ray.start_point, sub_ray.end_point))

                for field in data_fields:
                    sub_data[field].extend(sub_ray[field][asort])

                if use_peculiar_velocity:
                    line_of_sight = sub_segment[0] - sub_segment[1]
                    line_of_sight /= ((line_of_sight**2).sum())**0.5
                    sub_vel = ds.arr([
                        sub_ray['relative_velocity_x'],
                        sub_ray['relative_velocity_y'],
                        sub_ray['relative_velocity_z']
                    ])
                    # Line of sight velocity = vel_los
                    sub_vel_los = (np.rollaxis(sub_vel, 1) * \
                                   line_of_sight).sum(axis=1)
                    sub_data['velocity_los'].extend(sub_vel_los[asort])

                    # doppler redshift:
                    # See https://en.wikipedia.org/wiki/Redshift and
                    # Peebles eqns: 5.48, 5.49

                    # 1 + redshift_dopp = (1 + v*cos(theta)/c) /
                    # sqrt(1 - v**2/c**2)

                    # where v is the peculiar velocity (ie physical velocity
                    # without the hubble flow, but no hubble flow in sim, so
                    # just the physical velocity).

                    # the bulk of the doppler redshift is from line of sight
                    # motion, but there is a small amount from time dilation
                    # of transverse motion, hence the inclusion of theta (the
                    # angle between line of sight and the velocity).
                    # theta is the angle between the ray vector (i.e. line of
                    # sight) and the velocity vectors: a dot b = ab cos(theta)

                    sub_vel_mag = sub_ray['velocity_magnitude']
                    cos_theta = line_of_sight.dot(sub_vel) / sub_vel_mag
                    # Protect against stituations where velocity mag is exactly
                    # zero, in which case zero / zero = NaN.
                    cos_theta = np.nan_to_num(cos_theta)
                    redshift_dopp = \
                        (1 + sub_vel_mag * cos_theta / speed_of_light_cgs) / \
                         np.sqrt(1 - sub_vel_mag**2 / speed_of_light_cgs**2) - 1
                    sub_data['redshift_dopp'].extend(redshift_dopp[asort])
                    del sub_vel, sub_vel_los, sub_vel_mag, cos_theta, \
                        redshift_dopp

                sub_ray.clear_data()
                del sub_ray, asort

            for key in sub_data:
                if key == "extra_data":
                    continue
                sub_data[key] = ds.arr(sub_data[key]).in_cgs()

            # Get redshift for each lixel.  Assume linear relation between l
            # and z.  so z = z_start - (l * (z_range / l_range))
            sub_data['redshift'] = my_segment['redshift'] - \
              (sub_data['l'] * \
              (my_segment['redshift'] - next_redshift) / \
              vector_length(my_start, my_end).in_cgs())

            # When using the peculiar velocity, create effective redshift
            # (redshift_eff) field combining cosmological redshift and
            # doppler redshift.

            # then to add cosmological redshift and doppler redshifts, follow
            # eqn 3.75 in Peacock's Cosmological Physics:
            # 1 + z_eff = (1 + z_cosmo) * (1 + z_doppler)

            if use_peculiar_velocity:
                sub_data['redshift_eff'] = ((1 + sub_data['redshift_dopp']) * \
                                             (1 + sub_data['redshift'])) - 1

            # Remove empty lixels.
            sub_dl_nonzero = sub_data['dl'].nonzero()
            for field in all_fields:
                sub_data[field] = sub_data[field][sub_dl_nonzero]
            del sub_dl_nonzero

            # Add to storage.
            my_storage.result = sub_data

            del ds

        # Reconstruct ray data from parallel_objects storage.
        all_data = [my_data for my_data in all_ray_storage.values()]
        # This is now a list of segments where each one is a dictionary
        # with all the fields.
        all_data.sort(key=lambda a: a['extra_data']['segment_redshift'],
                      reverse=True)

        # Gather segment data to add to the light ray solution.
        for segment_data, my_segment in \
          zip(all_data, self.light_ray_solution):
            my_segment["unique_identifier"] = \
              segment_data["extra_data"]["unique_identifier"]

        # Flatten the list into a single dictionary containing fields
        # for the whole ray.
        all_data = _flatten_dict_list(all_data, exceptions=['extra_data'])

        self._data = all_data

        if data_filename is not None:
            self._write_light_ray(data_filename, all_data)
            ray_ds = load(data_filename)
            return ray_ds
        else:
            return None
    def get_time_series(self, initial_time=None, final_time=None,
                        initial_redshift=None, final_redshift=None,
                        times=None, redshifts=None, tolerance=None,
                        parallel=True, setup_function=None):

        """
        Instantiate a DatasetSeries object for a set of outputs.

        If no additional keywords given, a DatasetSeries object will be
        created with all potential datasets created by the simulation.

        Outputs can be gather by specifying a time or redshift range
        (or combination of time and redshift), with a specific list of
        times or redshifts), or by simply searching all subdirectories 
        within the simulation directory.

        initial_time : tuple of type (float, str)
            The earliest time for outputs to be included.  This should be 
            given as the value and the string representation of the units.
            For example, (5.0, "Gyr").  If None, the initial time of the 
            simulation is used.  This can be used in combination with 
            either final_time or final_redshift.
            Default: None.
        final_time : tuple of type (float, str)
            The latest time for outputs to be included.  This should be 
            given as the value and the string representation of the units.
            For example, (13.7, "Gyr"). If None, the final time of the 
            simulation is used.  This can be used in combination with either 
            initial_time or initial_redshift.
            Default: None.
        times : tuple of type (float array, str)
            A list of times for which outputs will be found and the units 
            of those values.  For example, ([0, 1, 2, 3], "s").
            Default: None.
        initial_redshift : float
            The earliest redshift for outputs to be included.  If None,
            the initial redshift of the simulation is used.  This can be
            used in combination with either final_time or
            final_redshift.
            Default: None.
        final_redshift : float
            The latest redshift for outputs to be included.  If None,
            the final redshift of the simulation is used.  This can be
            used in combination with either initial_time or
            initial_redshift.
            Default: None.
        redshifts : array_like
            A list of redshifts for which outputs will be found.
            Default: None.
        tolerance : float
            Used in combination with "times" or "redshifts" keywords,
            this is the tolerance within which outputs are accepted
            given the requested times or redshifts.  If None, the
            nearest output is always taken.
            Default: None.
        parallel : bool/int
            If True, the generated DatasetSeries will divide the work
            such that a single processor works on each dataset.  If an
            integer is supplied, the work will be divided into that
            number of jobs.
            Default: True.
        setup_function : callable, accepts a ds
            This function will be called whenever a dataset is loaded.

        Examples
        --------

        >>> import yt
        >>> gs = yt.simulation("my_simulation.par", "Gadget")
        
        >>> gs.get_time_series(initial_redshift=10, final_time=(13.7, "Gyr"))

        >>> gs.get_time_series(redshifts=[3, 2, 1, 0])

        >>> # after calling get_time_series
        >>> for ds in gs.piter():
        ...     p = ProjectionPlot(ds, "x", "density")
        ...     p.save()

        >>> # An example using the setup_function keyword
        >>> def print_time(ds):
        ...     print ds.current_time
        >>> gs.get_time_series(setup_function=print_time)
        >>> for ds in gs:
        ...     SlicePlot(ds, "x", "Density").save()

        """

        if (initial_redshift is not None or \
            final_redshift is not None) and \
            not self.cosmological_simulation:
            raise InvalidSimulationTimeSeries(
                "An initial or final redshift has been given for a " +
                "noncosmological simulation.")

        my_all_outputs = self.all_outputs
        if not my_all_outputs:
            DatasetSeries.__init__(self, outputs=[], parallel=parallel,
                                   unit_base=self.unit_base)
            mylog.info("0 outputs loaded into time series.")
            return

        # Apply selection criteria to the set.
        if times is not None:
            my_outputs = self._get_outputs_by_key("time", times,
                                                  tolerance=tolerance,
                                                  outputs=my_all_outputs)

        elif redshifts is not None:
            my_outputs = self._get_outputs_by_key("redshift",
                                                  redshifts, tolerance=tolerance,
                                                  outputs=my_all_outputs)

        else:
            if initial_time is not None:
                if isinstance(initial_time, float):
                    initial_time = self.quan(initial_time, "code_time")
                elif isinstance(initial_time, tuple) and len(initial_time) == 2:
                    initial_time = self.quan(*initial_time)
                elif not isinstance(initial_time, YTArray):
                    raise RuntimeError(
                        "Error: initial_time must be given as a float or " +
                        "tuple of (value, units).")
            elif initial_redshift is not None:
                my_initial_time = self.cosmology.t_from_z(initial_redshift)
            else:
                my_initial_time = self.initial_time

            if final_time is not None:
                if isinstance(final_time, float):
                    final_time = self.quan(final_time, "code_time")
                elif isinstance(final_time, tuple) and len(final_time) == 2:
                    final_time = self.quan(*final_time)
                elif not isinstance(final_time, YTArray):
                    raise RuntimeError(
                        "Error: final_time must be given as a float or " +
                        "tuple of (value, units).")
                my_final_time = final_time.in_units("s")
            elif final_redshift is not None:
                my_final_time = self.cosmology.t_from_z(final_redshift)
            else:
                my_final_time = self.final_time

            my_initial_time.convert_to_units("s")
            my_final_time.convert_to_units("s")
            my_times = np.array([a["time"] for a in my_all_outputs])
            my_indices = np.digitize([my_initial_time, my_final_time], my_times)
            if my_initial_time == my_times[my_indices[0] - 1]: my_indices[0] -= 1
            my_outputs = my_all_outputs[my_indices[0]:my_indices[1]]

        init_outputs = []
        for output in my_outputs:
            if os.path.exists(output["filename"]):
                init_outputs.append(output["filename"])
        if len(init_outputs) == 0 and len(my_outputs) > 0:
            mylog.warn("Could not find any datasets.  " +
                       "Check the value of OutputDir in your parameter file.")
            
        DatasetSeries.__init__(self, outputs=init_outputs, parallel=parallel,
                                setup_function=setup_function,
                                unit_base=self.unit_base)
        mylog.info("%d outputs loaded into time series.", len(init_outputs))
Beispiel #10
0
    def _parse_parameter_file(self):
        """
        Parse the SWIFT "parameter file" -- really this actually reads info
        from the main HDF5 file as everything is replicated there and usually
        parameterfiles are not transported.

        The header information from the HDF5 file is stored in an un-parsed
        format in self.parameters should users wish to use it.
        """

        self.unique_identifier = uuid4()

        # Read from the HDF5 file, this gives us all the info we need. The rest
        # of this function is just parsing.
        header = self._get_info_attributes("Header")
        runtime_parameters = self._get_info_attributes("RuntimePars")

        policy = self._get_info_attributes("Policy")
        # These are the parameterfile parameters from *.yml at runtime
        parameters = self._get_info_attributes("Parameters")

        # Not used in this function, but passed to parameters
        hydro = self._get_info_attributes("HydroScheme")
        subgrid = self._get_info_attributes("SubgridScheme")

        self.domain_right_edge = header["BoxSize"]
        self.domain_left_edge = np.zeros_like(self.domain_right_edge)

        self.dimensionality = int(header["Dimension"])

        # SWIFT is either all periodic, or not periodic at all
        periodic = int(runtime_parameters["PeriodicBoundariesOn"])

        if periodic:
            self.periodicity = [True] * self.dimensionality
        else:
            self.periodicity = [False] * self.dimensionality

        # Units get attached to this
        self.current_time = float(header["Time"])

        # Now cosmology enters the fray, as a runtime parameter.
        self.cosmological_simulation = int(policy["cosmological integration"])

        if self.cosmological_simulation:
            try:
                self.current_redshift = float(header["Redshift"])
                # These won't be present if self.cosmological_simulation is false
                self.omega_lambda = float(parameters["Cosmology:Omega_lambda"])
                self.omega_matter = float(parameters["Cosmology:Omega_m"])
                # This is "little h"
                self.hubble_constant = float(parameters["Cosmology:h"])
            except KeyError:
                mylog.warn(
                    ("Could not find cosmology information in Parameters," +
                     " despite having ran with -c signifying a cosmological" +
                     " run."))
                mylog.info("Setting up as a non-cosmological run. Check this!")
                self.cosmological_simulation = 0
                self.current_redshift = 0.0
                self.omega_lambda = 0.0
                self.omega_matter = 0.0
                self.hubble_constant = 0.0
        else:
            self.current_redshift = 0.0
            self.omega_lambda = 0.0
            self.omega_matter = 0.0
            self.hubble_constant = 0.0

        # Store the un-parsed information should people want it.
        self.parameters = dict(header=header,
                               runtime_parameters=runtime_parameters,
                               policy=policy,
                               parameters=parameters,
                               hydro=hydro,
                               subgrid=subgrid)

        # SWIFT never has multi file snapshots
        self.file_count = 1
        self.filename_template = self.parameter_filename

        return
    def get_time_series(self,
                        initial_time=None,
                        final_time=None,
                        initial_redshift=None,
                        final_redshift=None,
                        times=None,
                        redshifts=None,
                        tolerance=None,
                        parallel=True,
                        setup_function=None):
        """
        Instantiate a DatasetSeries object for a set of outputs.

        If no additional keywords given, a DatasetSeries object will be
        created with all potential datasets created by the simulation.

        Outputs can be gather by specifying a time or redshift range
        (or combination of time and redshift), with a specific list of
        times or redshifts), or by simply searching all subdirectories 
        within the simulation directory.

        initial_time : tuple of type (float, str)
            The earliest time for outputs to be included.  This should be 
            given as the value and the string representation of the units.
            For example, (5.0, "Gyr").  If None, the initial time of the 
            simulation is used.  This can be used in combination with 
            either final_time or final_redshift.
            Default: None.
        final_time : tuple of type (float, str)
            The latest time for outputs to be included.  This should be 
            given as the value and the string representation of the units.
            For example, (13.7, "Gyr"). If None, the final time of the 
            simulation is used.  This can be used in combination with either 
            initial_time or initial_redshift.
            Default: None.
        times : tuple of type (float array, str)
            A list of times for which outputs will be found and the units 
            of those values.  For example, ([0, 1, 2, 3], "s").
            Default: None.
        initial_redshift : float
            The earliest redshift for outputs to be included.  If None,
            the initial redshift of the simulation is used.  This can be
            used in combination with either final_time or
            final_redshift.
            Default: None.
        final_redshift : float
            The latest redshift for outputs to be included.  If None,
            the final redshift of the simulation is used.  This can be
            used in combination with either initial_time or
            initial_redshift.
            Default: None.
        redshifts : array_like
            A list of redshifts for which outputs will be found.
            Default: None.
        tolerance : float
            Used in combination with "times" or "redshifts" keywords,
            this is the tolerance within which outputs are accepted
            given the requested times or redshifts.  If None, the
            nearest output is always taken.
            Default: None.
        parallel : bool/int
            If True, the generated DatasetSeries will divide the work
            such that a single processor works on each dataset.  If an
            integer is supplied, the work will be divided into that
            number of jobs.
            Default: True.
        setup_function : callable, accepts a ds
            This function will be called whenever a dataset is loaded.

        Examples
        --------

        >>> import yt
        >>> gs = yt.simulation("my_simulation.par", "Gadget")
        
        >>> gs.get_time_series(initial_redshift=10, final_time=(13.7, "Gyr"))

        >>> gs.get_time_series(redshifts=[3, 2, 1, 0])

        >>> # after calling get_time_series
        >>> for ds in gs.piter():
        ...     p = ProjectionPlot(ds, "x", "density")
        ...     p.save()

        >>> # An example using the setup_function keyword
        >>> def print_time(ds):
        ...     print ds.current_time
        >>> gs.get_time_series(setup_function=print_time)
        >>> for ds in gs:
        ...     SlicePlot(ds, "x", "Density").save()

        """

        if (initial_redshift is not None or \
            final_redshift is not None) and \
            not self.cosmological_simulation:
            raise InvalidSimulationTimeSeries(
                "An initial or final redshift has been given for a " +
                "noncosmological simulation.")

        my_all_outputs = self.all_outputs
        if not my_all_outputs:
            DatasetSeries.__init__(self,
                                   outputs=[],
                                   parallel=parallel,
                                   unit_base=self.unit_base)
            mylog.info("0 outputs loaded into time series.")
            return

        # Apply selection criteria to the set.
        if times is not None:
            my_outputs = self._get_outputs_by_key("time",
                                                  times,
                                                  tolerance=tolerance,
                                                  outputs=my_all_outputs)

        elif redshifts is not None:
            my_outputs = self._get_outputs_by_key("redshift",
                                                  redshifts,
                                                  tolerance=tolerance,
                                                  outputs=my_all_outputs)

        else:
            if initial_time is not None:
                if isinstance(initial_time, float):
                    initial_time = self.quan(initial_time, "code_time")
                elif isinstance(initial_time,
                                tuple) and len(initial_time) == 2:
                    initial_time = self.quan(*initial_time)
                elif not isinstance(initial_time, YTArray):
                    raise RuntimeError(
                        "Error: initial_time must be given as a float or " +
                        "tuple of (value, units).")
            elif initial_redshift is not None:
                my_initial_time = self.cosmology.t_from_z(initial_redshift)
            else:
                my_initial_time = self.initial_time

            if final_time is not None:
                if isinstance(final_time, float):
                    final_time = self.quan(final_time, "code_time")
                elif isinstance(final_time, tuple) and len(final_time) == 2:
                    final_time = self.quan(*final_time)
                elif not isinstance(final_time, YTArray):
                    raise RuntimeError(
                        "Error: final_time must be given as a float or " +
                        "tuple of (value, units).")
                my_final_time = final_time.in_units("s")
            elif final_redshift is not None:
                my_final_time = self.cosmology.t_from_z(final_redshift)
            else:
                my_final_time = self.final_time

            my_initial_time.convert_to_units("s")
            my_final_time.convert_to_units("s")
            my_times = np.array([a["time"] for a in my_all_outputs])
            my_indices = np.digitize([my_initial_time, my_final_time],
                                     my_times)
            if my_initial_time == my_times[my_indices[0] - 1]:
                my_indices[0] -= 1
            my_outputs = my_all_outputs[my_indices[0]:my_indices[1]]

        init_outputs = []
        for output in my_outputs:
            if os.path.exists(output["filename"]):
                init_outputs.append(output["filename"])
        if len(init_outputs) == 0 and len(my_outputs) > 0:
            mylog.warn("Could not find any datasets.  " +
                       "Check the value of OutputDir in your parameter file.")

        DatasetSeries.__init__(self,
                               outputs=init_outputs,
                               parallel=parallel,
                               setup_function=setup_function,
                               unit_base=self.unit_base)
        mylog.info("%d outputs loaded into time series.", len(init_outputs))
Beispiel #12
0
def add_particle_filters(ds):
    pfilters = ["pop_3", "black_hole"]
    for pfilter in pfilters:
        if not ds.add_particle_filter(pfilter):
            mylog.warn("Failed to add filter: %s." % pfilter)
Beispiel #13
0
def add_p2p_particle_filters(ds):
    pfilters = ["pop3"]
    for pfilter in pfilters:
        if not ds.add_particle_filter(pfilter):
            mylog.warn("Failed to add filter: %s." % pfilter)