def _add_two_properties_different_ranges(): for i in pt.distributed(list(range(1, 10))): tangos.get_halo(i)['my_test_property_2'] = i tangos.core.get_default_session().commit() for i in pt.distributed(list(range(1, 8))): tangos.get_halo(i)['my_test_property_3'] = i tangos.core.get_default_session().commit()
def generate_tracker_halo_links(sim, session): dict_obj = get_or_create_dictionary_item(session, "tracker") links = [] for ts1, ts2 in parallel_tasks.distributed( list(zip(sim.timesteps[1:], sim.timesteps[:-1]))): print("generating links for", ts1, ts2) halos_1, nums1, id = get_tracker_halos(ts1) halos_2, nums2, id = get_tracker_halos(ts2) tracker_links, idf, idt = get_tracker_links(session, dict_obj) if len(nums1) == 0 or len(nums2) == 0: continue o1 = np.where(np.in1d(nums1, nums2))[0] o2 = np.where(np.in1d(nums2, nums1))[0] if len(o1) == 0 or len(o2) == 0: continue for ii, jj in zip(o1, o2): if halos_1[ii].halo_number != halos_2[jj].halo_number: raise RuntimeError("ERROR mismatch of BH iords") exists = np.where((idf == halos_1[ii].id) & (idt == halos_2[jj].id))[0] if len(exists) == 0: links.append(HaloLink(halos_1[ii], halos_2[jj], dict_obj, 1.0)) links.append(HaloLink(halos_2[jj], halos_1[ii], dict_obj, 1.0)) session.add_all(links) session.commit()
def _test_not_run_twice(): import time # For this test we want a staggered start time.sleep(pt.backend.rank() * 0.05) for i in pt.distributed(list(range(3))): with pt.ExclusiveLock("lock"): tangos.get_halo(1)['test_count'] += 1 tangos.get_default_session().commit()
def test_function(): lock = pt.ExclusiveLock("hello") print("Hello from rank", pt.backend.rank()) for i in pt.distributed(range(10)): with lock: print("Task", i) time.sleep(0.1) if pt.backend.rank() == 1: print() print("OK")
def _get_array(): test_filter = pynbody.filt.Sphere('5000 kpc') for fname in pt.distributed(["tiny.000640", "tiny.000832"]): ps.RequestLoadPynbodySnapshot((handler, fname)).send(0) ps.ConfirmLoadPynbodySnapshot.receive(0) ps.RequestPynbodyArray(test_filter, "pos").send(0) f_local = pynbody.load(tangos.config.base+"/test_simulations/test_tipsy/"+fname) f_local.physical_units() remote_result = ps.ReturnPynbodyArray.receive(0).contents assert (f_local[test_filter]['pos']==remote_result).all() ps.ReleasePynbodySnapshot().send(0)
def run_calculation_loop(self): parallel_tasks.database.synchronize_creator_object() pair_list = self._generate_timestep_pairs() if len(pair_list) == 0: logger.error("No timesteps found to link") return pair_list = parallel_tasks.distributed(pair_list) object_type = core.halo.Halo.object_typecode_from_tag(self.args.type_) for s_x, s in pair_list: logger.info("Linking %r and %r", s_x, s) if self.args.force or self.need_crosslink_ts(s_x, s, object_type): self.crosslink_ts(s_x, s, 0, self.args.hmax, self.args.dmonly, object_typecode=object_type)
def _test_empty_then_non_empty_loop(): for _ in pt.distributed([]): pass for _ in pt.distributed([1, 2, 3]): pass
def _test_empty_loop(): for _ in pt.distributed([]): assert False
def _add_property(): for i in pt.distributed(list(range(1, 10))): with pt.ExclusiveLock('insert', 0.05): tangos.get_halo(i)['my_test_property'] = i tangos.core.get_default_session().commit()
def scan_for_BHs(files, session): for timestep in parallel_tasks.distributed(files): logger.info("Processing %s", timestep) try: timestep_particle_data = timestep.load() except: logger.warning("File not found - continuing") continue if len(timestep_particle_data.star) < 1: logger.warning("No stars - continuing") continue timestep_particle_data.physical_units() logger.info( "Gathering existing BH halo information from database for step %r", timestep) bhobjs = timestep.bhs.all() existing_bh_nums = [x.halo_number for x in bhobjs] logger.info("...found %d existing BHs", len(existing_bh_nums)) logger.info("Gathering BH info from simulation for step %r", timestep) bh_iord_this_timestep = timestep_particle_data.star['iord'][np.where( timestep_particle_data.star['tform'] < 0)[0]] bh_mass_this_timestep = timestep_particle_data.star['mass'][np.where( timestep_particle_data.star['tform'] < 0)[0]] logger.info("Found %d black holes for %r", len(bh_iord_this_timestep), timestep) logger.info( "Updating BH trackdata and BH objects using on-disk information from %r", timestep) add_missing_trackdata_and_BH_objects(timestep, bh_iord_this_timestep, existing_bh_nums, session) session.expire_all() logger.info("Calculating halo associations for BHs in timestep %r", timestep) bh_cen_halos, bh_halos = bh_halo_assign(timestep_particle_data) # re-order our information so that links refer to BHs in descending order of mass bh_order_by_mass = np.argsort(bh_mass_this_timestep)[::-1] bh_iord_this_timestep = bh_iord_this_timestep[bh_order_by_mass] if bh_halos is not None: bh_halos = bh_halos[bh_order_by_mass] if bh_cen_halos is not None: bh_cen_halos = bh_cen_halos[bh_order_by_mass] logger.info("Freeing the timestep particle data") with check_deleted(timestep_particle_data): del (timestep_particle_data) if bh_halos is not None: assign_bh_to_halos(bh_halos, bh_iord_this_timestep, timestep, "BH") if bh_cen_halos is not None: assign_bh_to_halos(bh_cen_halos, bh_iord_this_timestep, timestep, "BH_central", "host_halo")
def generate_halolinks(session, fname, pairs): for ts1, ts2 in parallel_tasks.distributed(pairs): bh_log = None if BlackHolesLog.can_load(ts2.filename): bh_log = BlackHolesLog(ts2.filename) elif ShortenedOrbitLog.can_load(ts2.filename): bh_log = ShortenedOrbitLog(ts2.filename) if bh_log is None: logger.error("Warning! No orbit file found!") links = [] mergers_links = [] bh_map = {} logger.info("Gathering BH tracking information for steps %r and %r", ts1, ts2) with parallel_tasks.ExclusiveLock("bh"): dict_obj = db.core.get_or_create_dictionary_item( session, "tracker") dict_obj_next = db.core.get_or_create_dictionary_item( session, "BH_merger_next") dict_obj_prev = db.core.get_or_create_dictionary_item( session, "BH_merger_prev") track_links_n, idf_n, idt_n = db.tracking.get_tracker_links( session, dict_obj_next) bh_objects_1, nums1, id1 = get_bh_objs_numbers_and_dbids(ts1) bh_objects_2, nums2, id2 = get_bh_objs_numbers_and_dbids(ts2) tracker_links, idf, idt = db.tracking.get_tracker_links( session, dict_obj) idf_n = np.array(idf_n) idt_n = np.array(idt_n) if len(nums1) == 0 or len(nums2) == 0: logger.info("No BHs found in either step %r or %r... moving on", ts1, ts2) continue logger.info("Generating BH tracker links between steps %r and %r", ts1, ts2) o1 = np.where(np.in1d(nums1, nums2))[0] o2 = np.where(np.in1d(nums2, nums1))[0] if len(o1) == 0 or len(o2) == 0: continue with session.no_autoflush: for ii, jj in zip(o1, o2): if nums1[ii] != nums2[jj]: raise RuntimeError("BH iords are mismatched") exists = np.where((idf == id1[ii]) & (idt == id2[jj]))[0] if len(exists) == 0: links.append( tangos.core.halo_data.HaloLink(bh_objects_1[ii], bh_objects_2[jj], dict_obj, 1.0)) links.append( tangos.core.halo_data.HaloLink(bh_objects_2[jj], bh_objects_1[ii], dict_obj, 1.0)) logger.info("Generated %d tracker links between steps %r and %r", len(links), ts1, ts2) logger.info("Generating BH Merger information for steps %r and %r", ts1, ts2) for l in open(fname[0]): l_split = l.split() t = float(l_split[6]) bh_dest_id = int(l_split[0]) bh_src_id = int(l_split[1]) ratio = float(l_split[4]) # ratios in merger file are ambiguous (since major progenitor may be "source" rather than "destination") # re-establish using the log file: try: ratio = bh_log.determine_merger_ratio(bh_src_id, bh_dest_id) except (ValueError, AttributeError) as e: logger.debug( "Could not calculate merger ratio for %d->%d from the BH log; assuming the .BHmergers-asserted value is accurate", bh_src_id, bh_dest_id) if t > ts1.time_gyr and t <= ts2.time_gyr: bh_map[bh_src_id] = (bh_dest_id, ratio) resolve_multiple_mergers(bh_map) logger.info("Gathering BH merger links for steps %r and %r", ts1, ts2) with session.no_autoflush: for src, (dest, ratio) in bh_map.items(): if src not in nums1 or dest not in nums2: logger.warning( "Can't link BH %r -> %r; missing BH objects in database", src, dest) continue bh_src_before = bh_objects_1[nums1.index(src)] bh_dest_after = bh_objects_2[nums2.index(dest)] if ((idf_n == bh_src_before.id) & (idt_n == bh_dest_after.id)).sum() == 0: mergers_links.append( tangos.core.halo_data.HaloLink(bh_src_before, bh_dest_after, dict_obj_next, 1.0)) mergers_links.append( tangos.core.halo_data.HaloLink(bh_dest_after, bh_src_before, dict_obj_prev, ratio)) logger.info("Generated %d BH merger links for steps %r and %r", len(mergers_links), ts1, ts2) with parallel_tasks.ExclusiveLock("bh"): logger.info("Committing total %d BH links for steps %r and %r", len(mergers_links) + len(links), ts1, ts2) session.add_all(links) session.add_all(mergers_links) session.commit() logger.info("Finished committing BH links for steps %r and %r", ts1, ts2)
def _add_property(): for i in pt.distributed(list(range(1, 10))): tangos.get_halo(i)['my_test_property'] = i tangos.core.get_default_session().commit()