コード例 #1
0
def test_save_load_pickle():
    """Main test for loading pickled objects"""
    return # Until boolean regions are implemented we can't test this
    test_ds = fake_random_ds(64)

    # create extracted region from boolean (fairly complex object)
    center = (test_ds.domain_left_edge + test_ds.domain_right_edge) / 2
    sp_outer = test_ds.sphere(center, test_ds.domain_width[0])
    sp_inner = test_ds.sphere(center, test_ds.domain_width[0] / 10.0)
    sp_boolean = test_ds.boolean([sp_outer, "NOT", sp_inner])

    minv, maxv = sp_boolean.quantities["Extrema"]("density")[0]
    contour_threshold = min(minv * 10.0, 0.9 * maxv)

    contours = sp_boolean.extract_connected_sets(
        "density", 1, contour_threshold, maxv + 1, log_space=True, cache=True)

    # save object
    cpklfile = tempfile.NamedTemporaryFile(delete=False)
    cPickle.dump(contours[1][0], cpklfile)
    cpklfile.close()

    # load object
    test_load = cPickle.load(open(cpklfile.name, "rb"))

    assert_equal.description = \
        "%s: File was pickle-loaded succesfully" % __name__
    yield assert_equal, test_load is not None, True
    assert_equal.description = \
        "%s: Length of pickle-loaded connected set object" % __name__
    yield assert_equal, len(contours[1][0]), len(test_load)

    os.remove(cpklfile.name)
コード例 #2
0
 def __setitem__(self, test_name, result):
     # We have to close our shelf manually,
     # as the destructor does not necessarily do this.
     # Context managers would be more appropriate.
     f = open(self._fn(test_name), "wb")
     cPickle.dump(result, f, protocol=-1)
     f.close()
コード例 #3
0
 def __setitem__(self, test_name, result):
     # We have to close our shelf manually,
     # as the destructor does not necessarily do this.
     # Context managers would be more appropriate.
     f = open(self._fn(test_name), "wb")
     cPickle.dump(result, f, protocol=-1)
     f.close()
コード例 #4
0
ファイル: testing.py プロジェクト: sflarkin/yt
 def _func(*args, **kwargs):
     name = kwargs.pop("result_basename", func.__name__)
     rv = func(*args, **kwargs)
     if hasattr(rv, "convert_to_base"):
         rv.convert_to_base()
         _rv = rv.ndarray_view()
     else:
         _rv = rv
     mi = _rv.min()
     ma = _rv.max()
     st = _rv.std(dtype="float64")
     su = _rv.sum(dtype="float64")
     si = _rv.size
     ha = hashlib.md5(_rv.tostring()).hexdigest()
     fn = "func_results_ref_%s.cpkl" % (name)
     with open(fn, "wb") as f:
         cPickle.dump((mi, ma, st, su, si, ha), f)
     return rv
コード例 #5
0
def test_ytarray_pickle():
    ds = fake_random_ds(64, nprocs=1)
    test_data = [ds.quan(12.0, 'code_length'),
                 ds.arr([1, 2, 3], 'code_length')]

    for data in test_data:
        tempf = tempfile.NamedTemporaryFile(delete=False)
        pickle.dump(data, tempf)
        tempf.close()

        with open(tempf.name, "rb") as fname:
            loaded_data = pickle.load(fname)
        os.unlink(tempf.name)

        assert_array_equal(data, loaded_data)
        assert_equal(data.units, loaded_data.units)
        assert_array_equal(array(data.in_cgs()), array(loaded_data.in_cgs()))
        assert_equal(float(data.units.base_value), float(loaded_data.units.base_value))
コード例 #6
0
 def _func(*args, **kwargs):
     name = kwargs.pop("result_basename", func.__name__)
     rv = func(*args, **kwargs)
     if hasattr(rv, "convert_to_cgs"):
         rv.convert_to_cgs()
         _rv = rv.ndarray_view()
     else:
         _rv = rv
     mi = _rv.min()
     ma = _rv.max()
     st = _rv.std(dtype="float64")
     su = _rv.sum(dtype="float64")
     si = _rv.size
     ha = hashlib.md5(_rv.tostring()).hexdigest()
     fn = "func_results_ref_%s.cpkl" % (name)
     with open(fn, "wb") as f:
         cPickle.dump( (mi, ma, st, su, si, ha), f)
     return rv
コード例 #7
0
def test_ytarray_pickle():
    ds = fake_random_ds(64, nprocs=1)
    test_data = [ds.quan(12.0, 'code_length'),
                 ds.arr([1, 2, 3], 'code_length')]

    for data in test_data:
        tempf = tempfile.NamedTemporaryFile(delete=False)
        pickle.dump(data, tempf)
        tempf.close()

        with open(tempf.name, "rb") as fname:
            loaded_data = pickle.load(fname)
        os.unlink(tempf.name)

        yield assert_array_equal, data, loaded_data
        yield assert_equal, data.units, loaded_data.units
        yield assert_array_equal, array(data.in_cgs()), \
            array(loaded_data.in_cgs())
        yield assert_equal, float(data.units.base_value), \
            float(loaded_data.units.base_value)
コード例 #8
0
def find_halo_relationships(output1_id,
                            output2_id,
                            output_basename=None,
                            radius=0.10,
                            external_FOF=True,
                            FOF_directory='FOF'):
    r"""Calculate the parentage and child relationships between two EnzoFOF
    halo catalogs.

    This function performs a very simple merger tree calculation between two
    sets of halos.  For every halo in the second halo catalog, it looks to the
    first halo catalog to find the parents by looking at particle IDs.  The
    particle IDs from the child halos are identified in potential parents, and
    then both percent-of-parent and percent-to-child values are recorded.

    Note that this works with catalogs constructed by Enzo's FOF halo
    when used in external_FOF=True mode, whereas it will work with 
    catalogs constructed by yt using external_FOF=False mode.

    Parameters
    ----------
    output1_id : int
        This is the integer output id of the (first) halo catalog to parse and
        load.
    output2_id : int
        This is the integer output id of the (second) halo catalog to parse and
        load.
    output_basename : string
        If provided, both .cpkl and .txt files containing the parentage
        relationships will be output.
    radius : float, default to 0.10
        In absolute units, the radius to examine when guessing possible
        parent/child relationships.  If this value is too small, you will miss
        possible relationships.
    FOF_directory : str, optional
        Directory where FOF files are located

    Returns
    -------
    pfrac : dict
        This is a dict of dicts.  The first key is the parent halo id, the
        second is the child halo id.  The values are the percent contributed
        from parent to child and the percent of a child that came from the
        parent.
    """
    mylog.info("Parsing Halo Catalog %04i", output1_id)
    HC1 = HaloCatalog(output1_id, False, external_FOF=external_FOF, \
                      FOF_directory=FOF_directory)
    mylog.info("Parsing Halo Catalog %04i", output2_id)
    HC2 = HaloCatalog(output2_id, True, external_FOF=external_FOF, \
                      FOF_directory=FOF_directory)
    mylog.info("Calculating fractions")
    pfrac = HC1.calculate_parentage_fractions(HC2)

    if output_basename is not None and pfrac != {}:
        f = open("%s.txt" % (output_basename), "w")
        for hid1 in sorted(pfrac):
            for hid2 in sorted(pfrac[hid1]):
                if not str(hid2).isdigit(): continue
                p1, p2, npart = pfrac[hid1][hid2]
                if p1 == 0.0: continue
                f.write(
                    "Halo %s (%s) contributed %0.3e of its particles to %s (%s), which makes up %0.3e of that halo\n"
                    % (hid1, output1_id, p2, hid2, output2_id, p1))
        f.close()

        cPickle.dump(pfrac, open("%s.cpkl" % (output_basename), "wb"))

    return HC1.redshift, HC2.redshift, pfrac
コード例 #9
0
 def save_tree(self, filename):
     cPickle.dump((self.redshifts, self.relationships),
                  open(filename, "wb"))
コード例 #10
0
 def dump(self, storage):
     with open(storage, 'w') as fh:
         pickle.dump(self, fh)
コード例 #11
0
 def dump(self, storage):
     with open(storage, 'w') as fh:
         pickle.dump(self, fh)
コード例 #12
0
def find_halo_relationships(output1_id, output2_id, output_basename = None,
                            radius = 0.10, external_FOF=True, 
                            FOF_directory='FOF'):
    r"""Calculate the parentage and child relationships between two EnzoFOF
    halo catalogs.

    This function performs a very simple merger tree calculation between two
    sets of halos.  For every halo in the second halo catalog, it looks to the
    first halo catalog to find the parents by looking at particle IDs.  The
    particle IDs from the child halos are identified in potential parents, and
    then both percent-of-parent and percent-to-child values are recorded.

    Note that this works with catalogs constructed by Enzo's FOF halo
    when used in external_FOF=True mode, whereas it will work with 
    catalogs constructed by yt using external_FOF=False mode.

    Parameters
    ----------
    output1_id : int
        This is the integer output id of the (first) halo catalog to parse and
        load.
    output2_id : int
        This is the integer output id of the (second) halo catalog to parse and
        load.
    output_basename : string
        If provided, both .cpkl and .txt files containing the parentage
        relationships will be output.
    radius : float, default to 0.10
        In absolute units, the radius to examine when guessing possible
        parent/child relationships.  If this value is too small, you will miss
        possible relationships.
    FOF_directory : str, optional
        Directory where FOF files are located

    Returns
    -------
    pfrac : dict
        This is a dict of dicts.  The first key is the parent halo id, the
        second is the child halo id.  The values are the percent contributed
        from parent to child and the percent of a child that came from the
        parent.
    """
    mylog.info("Parsing Halo Catalog %04i", output1_id)
    HC1 = HaloCatalog(output1_id, False, external_FOF=external_FOF, \
                      FOF_directory=FOF_directory)
    mylog.info("Parsing Halo Catalog %04i", output2_id)
    HC2 = HaloCatalog(output2_id, True, external_FOF=external_FOF, \
                      FOF_directory=FOF_directory)
    mylog.info("Calculating fractions")
    pfrac = HC1.calculate_parentage_fractions(HC2)

    if output_basename is not None and pfrac != {}:
        f = open("%s.txt" % (output_basename), "w")
        for hid1 in sorted(pfrac):
            for hid2 in sorted(pfrac[hid1]):
                if not str(hid2).isdigit(): continue
                p1, p2, npart = pfrac[hid1][hid2]
                if p1 == 0.0: continue
                f.write( "Halo %s (%s) contributed %0.3e of its particles to %s (%s), which makes up %0.3e of that halo\n" % (
                            hid1, output1_id, p2, hid2, output2_id, p1))
        f.close()

        cPickle.dump(pfrac, open("%s.cpkl" % (output_basename), "wb"))

    return HC1.redshift, HC2.redshift, pfrac
コード例 #13
0
 def save_tree(self, filename):
     cPickle.dump((self.redshifts, self.relationships),
                  open(filename, "wb"))