Esempio n. 1
0
    def _check_for_outputs(self, potential_outputs):
        r"""
        Check a list of files to see if they are valid datasets.
        """

        only_on_root(mylog.info, "Checking %d potential outputs.",
                     len(potential_outputs))

        my_outputs = {}
        llevel = mylog.level
        # suppress logging as we load every dataset, unless set to debug
        if llevel > 10 and llevel < 40:
            mylog.setLevel(40)
        for my_storage, output in parallel_objects(potential_outputs,
                                                   storage=my_outputs):
            if os.path.exists(output):
                try:
                    ds = load(output)
                    if ds is not None:
                        my_storage.result = {
                            "filename": output,
                            "time": ds.current_time.in_units("s")
                        }
                        if ds.cosmological_simulation:
                            my_storage.result["redshift"] = ds.current_redshift
                except YTOutputNotIdentified:
                    mylog.error("Failed to load %s", output)
        mylog.setLevel(llevel)
        my_outputs = [my_output for my_output in my_outputs.values() \
                      if my_output is not None]
        return my_outputs
Esempio n. 2
0
def yt_load(filename, **kwargs):
    """
    Suppress logging for yt.load, but return to original setting.

    This allows yt.load to show logs in scripts, but not in ytree.
    """
    level = ytLogger.level
    if level > 10 and level < 40:
        ytLogger.setLevel(40)
    ds = _yt_load(filename, **kwargs)
    ytLogger.setLevel(level)
    return ds
Esempio n. 3
0
def run_nose(
    verbose=False,
    run_answer_tests=False,
    answer_big_data=False,
    call_pdb=False,
    module=None,
):
    import sys

    from yt.utilities.logger import ytLogger as mylog
    from yt.utilities.on_demand_imports import _nose

    orig_level = mylog.getEffectiveLevel()
    mylog.setLevel(50)
    nose_argv = sys.argv
    nose_argv += ["--exclude=answer_testing", "--detailed-errors", "--exe"]
    if call_pdb:
        nose_argv += ["--pdb", "--pdb-failures"]
    if verbose:
        nose_argv.append("-v")
    if run_answer_tests:
        nose_argv.append("--with-answer-testing")
    if answer_big_data:
        nose_argv.append("--answer-big-data")
    if module:
        nose_argv.append(module)
    initial_dir = os.getcwd()
    yt_file = os.path.abspath(__file__)
    yt_dir = os.path.dirname(yt_file)
    if os.path.samefile(os.path.dirname(yt_dir), initial_dir):
        # Provide a nice error message to work around nose bug
        # see https://github.com/nose-devs/nose/issues/701
        raise RuntimeError(
            """
    The yt.run_nose function does not work correctly when invoked in
    the same directory as the installed yt package. Try starting
    a python session in a different directory before invoking yt.run_nose
    again. Alternatively, you can also run the "nosetests" executable in
    the current directory like so:

        $ nosetests
            """
        )
    os.chdir(yt_dir)
    try:
        _nose.run(argv=nose_argv)
    finally:
        os.chdir(initial_dir)
        mylog.setLevel(orig_level)
Esempio n. 4
0
    def _check_for_outputs(self, potential_outputs):
        """
        Check a list of files to see if they are valid datasets.
        """

        only_on_root(
            mylog.info, "Checking %d potential outputs.", len(potential_outputs)
        )

        my_outputs = {}
        llevel = mylog.level
        # suppress logging as we load every dataset, unless set to debug
        if llevel > 10 and llevel < 40:
            mylog.setLevel(40)
        for my_storage, output in parallel_objects(
            potential_outputs, storage=my_outputs
        ):
            if self.parameters["DataDumpDir"] in output:
                dir_key = self.parameters["DataDumpDir"]
                output_key = self.parameters["DataDumpName"]
            else:
                dir_key = self.parameters["RedshiftDumpDir"]
                output_key = self.parameters["RedshiftDumpName"]
            index = output[output.find(dir_key) + len(dir_key) :]
            filename = os.path.join(
                self.parameters["GlobalDir"],
                f"{dir_key}{index}",
                f"{output_key}{index}",
            )
            try:
                ds = load(filename)
            except (FileNotFoundError, YTUnidentifiedDataType):
                mylog.error("Failed to load %s", filename)
                continue
            my_storage.result = {
                "filename": filename,
                "time": ds.current_time.in_units("s"),
            }
            if ds.cosmological_simulation:
                my_storage.result["redshift"] = ds.current_redshift
        mylog.setLevel(llevel)
        my_outputs = [
            my_output for my_output in my_outputs.values() if my_output is not None
        ]

        return my_outputs
Esempio n. 5
0
    def _check_for_outputs(self, potential_outputs):
        """
        Check a list of files to see if they are valid datasets.
        """

        only_on_root(mylog.info, "Checking %d potential outputs.",
                     len(potential_outputs))

        my_outputs = {}
        llevel = mylog.level
        # suppress logging as we load every dataset, unless set to debug
        if llevel > 10 and llevel < 40:
            mylog.setLevel(40)
        for my_storage, output in parallel_objects(potential_outputs,
                                                   storage=my_outputs):
            if self.parameters['DataDumpDir'] in output:
                dir_key = self.parameters['DataDumpDir']
                output_key = self.parameters['DataDumpName']
            else:
                dir_key = self.parameters['RedshiftDumpDir']
                output_key = self.parameters['RedshiftDumpName']
            index = output[output.find(dir_key) + len(dir_key):]
            filename = os.path.join(self.parameters['GlobalDir'],
                                    "%s%s" % (dir_key, index),
                                    "%s%s" % (output_key, index))
            if os.path.exists(filename):
                try:
                    ds = load(filename)
                    if ds is not None:
                        my_storage.result = {
                            'filename': filename,
                            'time': ds.current_time.in_units("s")
                        }
                        if ds.cosmological_simulation:
                            my_storage.result['redshift'] = ds.current_redshift
                except YTOutputNotIdentified:
                    mylog.error('Failed to load %s', filename)
        mylog.setLevel(llevel)
        my_outputs = [my_output for my_output in my_outputs.values() \
                      if my_output is not None]

        return my_outputs
Esempio n. 6
0
import gc
import numpy as np
import os
import sys
import yt
yt.enable_parallelism()
import ytree

from yt.analysis_modules.level_sets.api import *
from yt.funcs import \
    ensure_dir
from yt.utilities.physical_constants import G

from yt.utilities.logger import \
    ytLogger
ytLogger.setLevel(20)

from yt.extensions.p3bh import *
from yt.extensions.p3bh.merger_tree_analysis import \
    get_existing_datasets


def _bulk_velocity(clump, **kwargs):
    bv = clump.quantities.bulk_velocity(**kwargs)
    return "Bulk velocity: %s.", bv


add_clump_info("bulk_velocity", _bulk_velocity)


def _total_volume(clump):
                       "bh_clump_distance_edge_%.0e_inner.h5" % value)

    contained_info = {}

    my_storage = {}
    for sto, (i, fn) in yt.parallel_objects(enumerate(fns),
                                            storage=my_storage):
        my_dmin = []
        contained = 0
        cfns = glob.glob(os.path.join(data_dir, os.path.dirname(fn), "*.h5"))
        pbar = yt.get_pbar(
            "%s (z = %f) - Calculating distances" %
            (os.path.dirname(fn), es.data["redshift"][i]), len(cfns))
        for cfn in cfns:
            clump_dmin = []
            mylog.setLevel(40)
            ds = yt.load(cfn)
            mylog.setLevel(llevel)

            if not hasattr(ds, "tree") or \
               ds.tree.children is None or \
               len(ds.tree.children) == 0:
                del ds
                pbar.update(1)
                continue

            bhps = ds.tree["black_hole", "particle_position"]
            bhids = ds.tree["black_hole", "particle_index"].d.astype(np.int64)
            if bhps.size == 0:
                del ds
                pbar.update(1)
Esempio n. 8
0
import glob
import h5py
from matplotlib import pyplot, cm
from matplotlib.ticker import \
     FuncFormatter, FixedLocator, StrMethodFormatter, NullFormatter
import matplotlib as mpl
import numpy as np
import os
import yt
from yt.visualization.color_maps import *
from yt.units.yt_array import YTQuantity, YTArray
from yt.utilities.cosmology import Cosmology

from yt.utilities.logger import \
    ytLogger
ytLogger.setLevel(40)

from grid_figure import GridFigure

mpl.rcParams['axes.unicode_minus'] = False


def _z_from_t(t, pos):
    co = Cosmology(omega_matter=0.266,
                   omega_lambda=0.734,
                   hubble_constant=0.71)
    return "%d" % np.round(co.z_from_t(co.quan(t, "Myr")))


def plot_profile(my_fig, my_axes, data_dir, title):
    my_files = glob.glob(os.path.join(data_dir, "profiles/*.h5"))