コード例 #1
0
 def preloop(self, f, db_timestep):
     if BlackHolesLog.can_load(db_timestep.filename):
         self.log = BlackHolesLog.get_existing_or_new(db_timestep.filename)
     elif ShortenedOrbitLog.can_load(db_timestep.filename):
         self.log = ShortenedOrbitLog.get_existing_or_new(db_timestep.filename)
     else:
         raise RuntimeError("cannot find recognizable log file")
コード例 #2
0
def test_bhlog():
    sim = pynbody.load(_sim_path)
    mass_unit_conv = sim.infer_original_units('Msol').in_units('Msol')
    mdot_unit_conv = sim.infer_original_units('Msol yr**-1').in_units(
        'Msol yr**-1')
    vunit_conv = sim.infer_original_units('km s**-1').in_units('km s**-1',
                                                               a=0.666)
    posunit_conv = sim.infer_original_units('kpc').in_units('kpc', a=0.666)

    assert (BlackHolesLog.can_load(_sim_path))
    bhlog = BlackHolesLog(_sim_path)
    assert (np.abs(
        bhlog.get_at_stepnum_for_id(2.0, 12345)['mass'] / mass_unit_conv - 400)
            < 1e-6)
    assert (np.abs(
        bhlog.get_at_stepnum_for_id(1.0, 12345)['mass'] / mass_unit_conv - 200)
            < 1e-6)
    assert (np.abs(
        bhlog.get_at_stepnum_for_id(1.0, 12345)['mdot'] / mdot_unit_conv - 100)
            < 1e-6)
    assert (np.abs(
        bhlog.get_at_stepnum_for_id(2.0, 12346)['mdot'] / mdot_unit_conv - 25)
            < 1e-6)

    assert (np.abs(
        bhlog.get_at_stepnum_for_id(2.0, 12345)['mdotmean'] / mdot_unit_conv -
        200) < 1e-6)
    assert (np.abs(
        bhlog.get_at_stepnum_for_id(1.0, 12346)['mdotmean'] / mdot_unit_conv -
        50) < 1e-6)

    assert (np.abs(
        bhlog.get_at_stepnum_for_id(1.0, 12345)['x'] / posunit_conv - 1.0) <
            1e-6)
    assert (np.abs(
        bhlog.get_at_stepnum_for_id(1.0, 12345)['vx'] / vunit_conv - 1.0) <
            1e-6)

    assert (bhlog.get_last_entry_for_id(12345)['step'] == 2.0)
    assert (bhlog.get_last_entry_for_id(12346)['step'] == 2.0)
コード例 #3
0
def generate_halolinks(session, fname, pairs):
    for ts1, ts2 in parallel_tasks.distributed(pairs):
        bh_log = None
        if BlackHolesLog.can_load(ts2.filename):
            bh_log = BlackHolesLog(ts2.filename)
        elif ShortenedOrbitLog.can_load(ts2.filename):
            bh_log = ShortenedOrbitLog(ts2.filename)
        if bh_log is None:
            logger.error("Warning! No orbit file found!")
        links = []
        mergers_links = []
        bh_map = {}
        logger.info("Gathering BH tracking information for steps %r and %r",
                    ts1, ts2)
        with parallel_tasks.ExclusiveLock("bh"):
            dict_obj = db.core.get_or_create_dictionary_item(
                session, "tracker")
            dict_obj_next = db.core.get_or_create_dictionary_item(
                session, "BH_merger_next")
            dict_obj_prev = db.core.get_or_create_dictionary_item(
                session, "BH_merger_prev")

        track_links_n, idf_n, idt_n = db.tracking.get_tracker_links(
            session, dict_obj_next)
        bh_objects_1, nums1, id1 = get_bh_objs_numbers_and_dbids(ts1)
        bh_objects_2, nums2, id2 = get_bh_objs_numbers_and_dbids(ts2)
        tracker_links, idf, idt = db.tracking.get_tracker_links(
            session, dict_obj)

        idf_n = np.array(idf_n)
        idt_n = np.array(idt_n)

        if len(nums1) == 0 or len(nums2) == 0:
            logger.info("No BHs found in either step %r or %r... moving on",
                        ts1, ts2)
            continue

        logger.info("Generating BH tracker links between steps %r and %r", ts1,
                    ts2)
        o1 = np.where(np.in1d(nums1, nums2))[0]
        o2 = np.where(np.in1d(nums2, nums1))[0]
        if len(o1) == 0 or len(o2) == 0:
            continue
        with session.no_autoflush:
            for ii, jj in zip(o1, o2):
                if nums1[ii] != nums2[jj]:
                    raise RuntimeError("BH iords are mismatched")
                exists = np.where((idf == id1[ii]) & (idt == id2[jj]))[0]
                if len(exists) == 0:
                    links.append(
                        tangos.core.halo_data.HaloLink(bh_objects_1[ii],
                                                       bh_objects_2[jj],
                                                       dict_obj, 1.0))
                    links.append(
                        tangos.core.halo_data.HaloLink(bh_objects_2[jj],
                                                       bh_objects_1[ii],
                                                       dict_obj, 1.0))
        logger.info("Generated %d tracker links between steps %r and %r",
                    len(links), ts1, ts2)

        logger.info("Generating BH Merger information for steps %r and %r",
                    ts1, ts2)
        for l in open(fname[0]):
            l_split = l.split()
            t = float(l_split[6])
            bh_dest_id = int(l_split[0])
            bh_src_id = int(l_split[1])
            ratio = float(l_split[4])

            # ratios in merger file are ambiguous (since major progenitor may be "source" rather than "destination")
            # re-establish using the log file:
            try:
                ratio = bh_log.determine_merger_ratio(bh_src_id, bh_dest_id)
            except (ValueError, AttributeError) as e:
                logger.debug(
                    "Could not calculate merger ratio for %d->%d from the BH log; assuming the .BHmergers-asserted value is accurate",
                    bh_src_id, bh_dest_id)

            if t > ts1.time_gyr and t <= ts2.time_gyr:
                bh_map[bh_src_id] = (bh_dest_id, ratio)

        resolve_multiple_mergers(bh_map)
        logger.info("Gathering BH merger links for steps %r and %r", ts1, ts2)
        with session.no_autoflush:
            for src, (dest, ratio) in bh_map.items():
                if src not in nums1 or dest not in nums2:
                    logger.warning(
                        "Can't link BH %r -> %r; missing BH objects in database",
                        src, dest)
                    continue
                bh_src_before = bh_objects_1[nums1.index(src)]
                bh_dest_after = bh_objects_2[nums2.index(dest)]

                if ((idf_n == bh_src_before.id) &
                    (idt_n == bh_dest_after.id)).sum() == 0:
                    mergers_links.append(
                        tangos.core.halo_data.HaloLink(bh_src_before,
                                                       bh_dest_after,
                                                       dict_obj_next, 1.0))
                    mergers_links.append(
                        tangos.core.halo_data.HaloLink(bh_dest_after,
                                                       bh_src_before,
                                                       dict_obj_prev, ratio))

        logger.info("Generated %d BH merger links for steps %r and %r",
                    len(mergers_links), ts1, ts2)

        with parallel_tasks.ExclusiveLock("bh"):
            logger.info("Committing total %d BH links for steps %r and %r",
                        len(mergers_links) + len(links), ts1, ts2)
            session.add_all(links)
            session.add_all(mergers_links)
            session.commit()
            logger.info("Finished committing BH links for steps %r and %r",
                        ts1, ts2)