def test_ambiguous_link_made_explicit(): assert db.get_halo("sim/ts1/1").calculate( 'link(testlink).halo_number()') == 2 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval).halo_number()') == 4 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval,"min").halo_number()') == 5
def test_summed_reconstruction_across_simulations(): ts2_h1 = db.get_halo("sim/ts2/1") session = db.core.get_default_session() sim2 = tangos.core.simulation.Simulation("sim2") ts2 = db.query.get_timestep("sim/ts2") session.add(sim2) try: # assign timestep 2 to another simulation ts2.simulation = sim2 session.commit() # in default construction mode, the earlier branch will not be found manual_reconstruction = ts2_h1.get_objects("dummy_histogram")[0].get_data_with_reassembly_options('place') reconstructed = ts2_h1.get_objects("dummy_histogram")[0].get_data_with_reassembly_options('major') npt.assert_almost_equal(reconstructed, manual_reconstruction) # when reassmebling across simulations, the timestep should be found reconstructed = ts2_h1.get_objects("dummy_histogram")[0].get_data_with_reassembly_options('major_across_simulations') manual_reconstruction = ts2_h1['dummy_histogram'] added_bit = tangos.get_halo("sim/ts1/1")['dummy_histogram'] manual_reconstruction[:len(added_bit)]=added_bit npt.assert_almost_equal(reconstructed, manual_reconstruction) finally: ts2.simulation = db.query.get_simulation("sim")
def Tangos_load(sim, redshift): global path print('Loading %s at z=%.2f' % (sim, redshift)) tangos.core.init_db('{}/{}.db'.format(path, sim.split('_')[0])) session = tangos.core.get_default_session() # Find the output closest to this redshift: times = tangos.get_simulation('%s' % sim, session).timesteps final_time = times[-1].__dict__['extension'] h = tangos.get_halo('{}/{}/halo_{:d}'.format(sim, final_time, 1), session) halo, z = h.calculate_for_progenitors('halo_number()', 'z()') z_ = np.array([times[i].__dict__['redshift'] for i in range(len(times))])[-len(halo):] output = np.array([times[i].__dict__['extension'] for i in range(len(times))])[-len(halo):] output = output[abs(z_ - redshift).argmin()] redshift = z_[abs(z_ - redshift).argmin()] # Find the progenitor halo number closest to this redshift: halo = halo[abs(z - redshift).argmin()] h = tangos.get_halo('{}/{}/halo_{:d}'.format(sim, output, halo)) return h, session, output, halo, redshift
def test_BH_redirection_function(): halo = tangos.get_halo("sim/ts1/1") bh_dbid = halo.calculate("BH('BH_mass','max','BH').dbid()") assert bh_dbid == tangos.get_halo("sim/ts1/1.1").id bh_dbid = halo.calculate("BH('BH_mass','min','BH').dbid()") assert bh_dbid == tangos.get_halo("sim/ts1/1.2").id
def test_for_loop_is_not_run_twice(): """This test checks for an issue where if the number of CPUs exceeded the number of jobs for a task, the entire task could be run twice""" tangos.get_halo(1)['test_count'] = 0 tangos.get_default_session().commit() pt.launch(_test_not_run_twice, 5) assert tangos.get_halo(1)['test_count'] == 3
def get_tstart(self): import tangos as db self.data['tstart'] = np.ones(len(self.data['time']))*-1 self.data['Mvir1_start'] = np.ones(len(self.data['time']))*-1 self.data['Mvir2_start'] = np.ones(len(self.data['time']))*-1 self.data['Mstar1_start'] = np.ones(len(self.data['time']))*-1 self.data['Mstar2_start'] = np.ones(len(self.data['time']))*-1 self.data['Mgas1_start'] = np.ones(len(self.data['time']))*-1 self.data['Mgas2_start'] = np.ones(len(self.data['time']))*-1 for ii in range(len(self.data['time'])): if ii % 100 == 0: print float(ii)/float(len(self.data['time'])) * 100, "% done" h1 = db.get_halo(self.data['dbstep'][ii]+'/'+str(self.data['N1'][ii])) h2 = db.get_halo(self.data['dbstep'][ii]+'/'+str(self.data['N2'][ii])) pos1, Rvir1, time1, Mvir1, Mstar1, Mgas1 = h1.reverse_property_cascade('SSC', 'Rvir', 't()', 'Mvir', 'Mstar', 'Mgas') pos2, Rvir2, time2, Mvir2, Mstar2, Mgas2 = h2.reverse_property_cascade('SSC', 'Rvir', 't()', 'Mvir', 'Mstar', 'Mgas') ll = min(len(time1), len(time2)) dist = np.sqrt(np.sum((pos1[0:ll] - pos2[0:ll])**2,axis=1)) sep = np.where(dist>Rvir1[0:ll]+Rvir2[0:ll])[0] if len(sep)==0: #print "cannot find output when objects were not close" continue self.data['tstart'][ii] = time1[sep[0]] self.data['Mvir1_start'][ii] = Mvir1[sep[0]] self.data['Mvir2_start'][ii] = Mvir2[sep[0]] self.data['Mstar1_start'][ii] = Mstar1[sep[0]] self.data['Mstar2_start'][ii] = Mstar2[sep[0]] self.data['Mgas1_start'][ii] = Mgas1[sep[0]] self.data['Mgas2_start'][ii] = Mgas2[sep[0]]
def test_ahf_merger_tree_import(): importer = tools.ahf_merger_tree_importer.AHFTreeImporter() importer.parse_command_line("--for test_ahf_merger_tree".split()) with log.LogCapturer(): importer.run_calculation_loop() assert (np.array([ x.finder_offset for x in tangos.get_timestep( "test_ahf_merger_tree/tiny_000832").halos.all() ]) == [1, 2, 3, 4, 5, 6, 7, 8, 9]).all() testing.assert_halolists_equal( tangos.get_timestep("test_ahf_merger_tree/tiny.000832").calculate_all( "earlier(1)", object_typetag='halo')[0], [ "test_ahf_merger_tree/tiny.000640/halo_1", "test_ahf_merger_tree/tiny.000640/halo_2", "test_ahf_merger_tree/tiny.000640/halo_3", "test_ahf_merger_tree/tiny.000640/halo_4", "test_ahf_merger_tree/tiny.000640/halo_5", "test_ahf_merger_tree/tiny.000640/halo_6", "test_ahf_merger_tree/tiny.000640/halo_1", "test_ahf_merger_tree/tiny.000640/halo_2" ]) assert tangos.get_halo("%/%640/halo_7").next == tangos.get_halo( "%/%832/halo_1") assert tangos.get_halo("%/%832/halo_1").previous == tangos.get_halo( "%/%640/halo_1")
def get_halo_info(self,dbsim,halo_props = ['halo_number()','Mvir', 'Mstar', 'Mgas']): import tangos as db self.rawdat['snap_after'] = np.zeros(len(self.rawdat['ID1'])).astype('S100') self.rawdat['snap_before'] = np.zeros(len(self.rawdat['ID1'])).astype('S100') self.rawdat['sat_flag'] = np.zeros(len(self.rawdat['ID1'])) zsteps = [] for step in dbsim.timesteps: zsteps.append(step.redshift) zsteps = np.array(zsteps) for p in halo_props: self.rawdat[p] = np.ones(len(self.rawdat['ID1'])) * -1 for i in range(len(self.rawdat['ID1'])): if i%100 == 0: print float(i)/len(self.rawdat['ID1']) * 100, '% done' id1 = self.rawdat['ID1'][i] id2 = self.rawdat['ID2'][i] if self.rawdat['redshift'][i]<zsteps.min(): continue ind = np.where(self.rawdat['redshift'][i]>=zsteps)[0][0] self.rawdat['snap_after'][i] = dbsim.timesteps[ind].extension if ind > 0: self.rawdat['snap_before'][i] = dbsim.timesteps[ind-1].extension bh = db.get_halo(str(dbsim.timesteps[ind].path)+'/1.'+str(id1)) if bh is None: if ind > 0: bh = db.get_halo(str(dbsim.timesteps[ind-1].path)+'/1.'+str(id1)) if bh is not None: bh = bh.next if bh is None: if ind > 0: bh = db.get_halo(str(dbsim.timesteps[ind-1].path)+'/1.'+str(id2)) if bh is not None: bh = bh.next if bh is None: ii = np.where(self.rawdat['ID2']==self.rawdat['ID1'][i])[0] if len(ii)>0: id3 = self.rawdat['ID1'][ii[0]] bh = db.get_halo(str(dbsim.timesteps[ind].path)+'/1.'+str(id3)) if bh is None: continue if 'host_halo' in bh.keys(): hosth = bh['host_halo'] else: hosth = None if hosth is not None: if "BH_central" in hosth.keys(): if type(hosth['BH_central'])==list: cenbhs = np.array([halobh.halo_number for halobh in hosth['BH_central']]) else: cenbhs = np.array([hosth['BH_central']]) if bh.halo_number not in cenbhs: self.rawdat['sat_flag'][i] = 1 else: self.rawdat['sat_flag'][i] = 1 else: self.rawdat['sat_flag'][i] = -1 for p in halo_props: try: self.rawdat[p][i] = bh.calculate('host_halo.'+p) except: continue
def test_load_tracker_iord_halo(): add_test_simulation_to_db() h_direct = db.get_halo("test_tipsy/tiny.000640/tracker_1").load( mode='partial') h_iord = db.get_halo("test_tipsy/tiny.000640/tracker_2").load( mode='partial') assert (h_direct['iord'] == h_iord['iord']).all()
def test_default_tree_has_correct_structure(): mt = tree.MergerTree(tangos.get_halo("%/ts6/1")) mt.construct() assert mt.summarise()=="1(1(1(1(1(1),2(2))),6(6(7(7)))))" mt = tree.MergerTree(tangos.get_halo("%/ts6/2")) mt.construct() assert mt.summarise() == "2(2(2(2(3(3)))))"
def test_calculate_for_progenitors(): h = tangos.get_halo("sim/ts3/1") objs, = h.calculate_for_progenitors("dbid()") assert len(objs) == 3 assert all([ objs[i] == tangos.get_halo(x).id for i, x in enumerate(("sim/ts3/1", "sim/ts2/1", "sim/ts1/1")) ])
def test_blocking_avoids_exception(): assert tangos.get_halo("sim/ts1/6") is None db.core.get_default_session().commit() with log.LogCapturer(): pt.launch(_perform_test, 3, (True, )) assert tangos.get_halo("sim/ts1/6") is not None
def _add_two_properties_different_ranges(): for i in pt.distributed(list(range(1, 10))): tangos.get_halo(i)['my_test_property_2'] = i tangos.core.get_default_session().commit() for i in pt.distributed(list(range(1, 8))): tangos.get_halo(i)['my_test_property_3'] = i tangos.core.get_default_session().commit()
def test_synchronize_db_creator(): pt.launch(_test_synchronize_db_creator, 3) assert tangos.get_halo(1)['db_creator_test_property'] == 1.0 assert tangos.get_halo(2)['db_creator_test_property'] == 1.0 creator_1, creator_2 = ( tangos.get_halo(i).get_objects('db_creator_test_property')[0].creator for i in (1, 2)) assert creator_1 == creator_2
def test_link_repr(): h1 = db.get_halo('dummy_sim_1/step.1/1') h2 = db.get_halo('dummy_sim_1/step.1/2') d_test = db.core.get_or_create_dictionary_item(db.get_default_session(), "test") l_obj = link.HaloLink(h1, h2, d_test, 1.0) assert repr(l_obj)=="<HaloLink test dummy_sim_1/step.1/halo_1 to dummy_sim_1/step.1/halo_2 weight=1.00>" l_obj = link.HaloLink(h1, h2, d_test, None) assert repr(l_obj) == "<HaloLink test dummy_sim_1/step.1/halo_1 to dummy_sim_1/step.1/halo_2 weight=None>"
def test_timestep_linking(): tl = crosslink.TimeLinker() tl.parse_command_line([]) with log.LogCapturer(): tl.run_calculation_loop() assert db.get_halo("dummy_sim_1/step.1/1").next==db.get_halo("dummy_sim_1/step.2/1") assert db.get_halo("dummy_sim_1/step.2/2").previous == db.get_halo("dummy_sim_1/step.1/2") assert db.get_halo("dummy_sim_1/step.1/1").links.count()==2
def test_link_can_be_used_within_calculation(): assert db.get_halo("sim/ts1/1").calculate( 'link(testlink).testval2') == 10.0 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval).testval2') == 30.0 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval,"min").testval2') == 40.0 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval,"max",testval2>10,testval2<40).testval2') == 30
def test_add_two_properties_different_ranges(): pt.launch(_add_two_properties_different_ranges, 3) for i in range(1, 10): assert tangos.get_halo(i)['my_test_property_2'] == i if i < 8: assert 'my_test_property_3' in tangos.get_halo(i) assert tangos.get_halo(i)['my_test_property_3'] == i else: assert 'my_test_property_3' not in tangos.get_halo(i)
def test_parse_raw_psuedofunction(): parsed = tangos.live_calculation.parser.parse_property_name( "raw(dummy_property_1)") assert isinstance(parsed._inputs[0]._extraction_pattern, extraction_patterns.HaloPropertyRawValueGetter) assert all( tangos.get_halo("sim/ts1/1").calculate(parsed) == tangos.get_halo( "sim/ts1/1")['dummy_property_1'])
def test_summed_reconstruction(): ts2_h1 = db.get_halo("sim/ts2/1") reconstructed = ts2_h1.get_objects("dummy_histogram")[0].get_data_with_reassembly_options('sum') manual_reconstruction = ts2_h1['dummy_histogram'] added_bit = tangos.get_halo("sim/ts1/2")['dummy_histogram'] manual_reconstruction[:len(added_bit)]+=added_bit npt.assert_almost_equal(reconstructed, manual_reconstruction)
def test_error_ignoring(fresh_database): log = run_writer_with_args("dummy_property", "dummy_property_with_exception") assert "Uncaught exception during property calculation" in log assert db.get_halo("dummy_sim_1/step.1/1")['dummy_property'] == 1.0 assert db.get_halo("dummy_sim_1/step.1/2")['dummy_property'] == 2.0 assert db.get_halo("dummy_sim_1/step.2/1")['dummy_property'] == 2.0 assert 'dummy_property' in list(db.get_halo("dummy_sim_1/step.1/1").keys()) assert 'dummy_property_with_exception' not in list(db.get_halo("dummy_sim_1/step.1/1").keys())
def test_blocking_avoids_exception(): if testing_db_backend != "sqlite": skip("This test is only relevant for sqlite databases") assert tangos.get_halo("sim/ts1/6") is None db.core.get_default_session().commit() with log.LogCapturer(): pt.launch(_perform_test,3, (True,)) assert tangos.get_halo("sim/ts1/6") is not None
def test_renumbering(fresh_database_no_contents): manager = add_simulation.SimulationAdderUpdater( output_testing.TestInputHandlerReverseHaloNDM("dummy_sim_2")) assert not manager.simulation_exists() with log.LogCapturer(): manager.scan_simulation_and_add_all_descendants() assert db.get_halo("dummy_sim_2/step.1/halo_2").halo_number == 2 assert db.get_halo("dummy_sim_2/step.1/halo_2").finder_id == 7 assert db.get_halo("dummy_sim_2/step.1/halo_2").NDM == 2006
def _test_not_run_twice(): import time # For this test we want a staggered start time.sleep(pt.backend.rank() * 0.05) for i in pt.distributed(list(range(3))): with pt.ExclusiveLock("lock"): tangos.get_halo(1)['test_count'] += 1 tangos.get_default_session().commit()
def test_link_returned_halo_is_usable(): assert db.get_halo("sim/ts1/1").calculate( 'link(testlink)')["testval"] == 1.0 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval)')["testval"] == 3.0 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval,"min")')["testval"] == 0.0 assert db.get_halo("sim/ts1/1").calculate( 'link(testlink,testval,"max",testval2>10,testval2<40)' )["testval"] == 3.0
def test_writer_sees_raw_properties(fresh_database): # regression test for issue #121 run_writer_with_args("dummy_property_with_reconstruction") assert db.get_halo(2)['dummy_property_with_reconstruction']==2.0 assert db.get_halo(2).calculate('raw(dummy_property_with_reconstruction)')==1.0 def raise_exception(obj): raise RuntimeError("reconstruct has been called") DummyPropertyWithReconstruction.callback = raise_exception run_writer_with_args("dummy_property_with_reconstruction") # should not try to reconstruct the existing data stream
def test_historical_value_finding(): vals = db.get_halo("sim/ts3/1").calculate_for_progenitors("testval") halo = db.get_halo("sim/ts3/1") assert_halolists_equal([halo.calculate("find_progenitor(testval, 'max')")], ["sim/ts2/1"]) assert_halolists_equal([halo.calculate("find_progenitor(testval, 'min')")], ["sim/ts3/1"]) timestep = db.get_timestep("sim/ts3") assert_halolists_equal(timestep.calculate_all("find_progenitor(testval, 'max')")[0], ["sim/ts2/1", "sim/ts3/2"]) assert_halolists_equal(timestep.calculate_all("find_progenitor(testval, 'min')")[0], ["sim/ts3/1", "sim/ts1/3"]) assert_halolists_equal(db.get_timestep("sim/ts1").calculate_all("find_descendant(testval, 'min')")[0], ["sim/ts3/1", "sim/ts1/3", "sim/ts1/4", "sim/ts1/5"])
def test_renumbering_disabled(): testing.init_blank_db_for_testing() manager = add_simulation.SimulationAdderUpdater(output_testing.TestInputHandlerReverseHaloNDM("dummy_sim_2"), renumber=False) assert not manager.simulation_exists() with log.LogCapturer(): manager.scan_simulation_and_add_all_descendants() assert db.get_halo("dummy_sim_2/step.1/halo_2").halo_number==2 assert db.get_halo("dummy_sim_2/step.1/halo_2").finder_id == 2 assert db.get_halo("dummy_sim_2/step.1/halo_2").NDM==2001
def _setup_dummy_histogram_data(ts1, ts2): global test_histogram test_histogram = np.arange(0.0,1000.0,1.0) ts1_h1 = db.get_halo("sim/ts1/1") ts1_h2 = db.get_halo("sim/ts1/2") ts2_h1 = db.get_halo("sim/ts2/1") property = DummyHistogramProperty(db.get_simulation("sim")) ts1_h1['dummy_histogram'] = test_histogram[property.store_slice(ts1.time_gyr)] ts1_h2['dummy_histogram'] = test_histogram[property.store_slice(ts1.time_gyr)] * 0.5 ts2_h1['dummy_histogram'] = test_histogram[property.store_slice(ts2.time_gyr)] db.core.get_default_session().commit()
def test_single_match_from_multiple_routes(): """Test that even if multiple possible matches for an object in another simulation exist, only the highest weight result is returned""" # multiple routes in time tracing calculation (didn't previously fail) h = tangos.get_halo("sim3/ts3/1") testing.assert_halolists_equal([h.calculate("match('sim3/ts1')")], [tangos.get_halo("sim3/ts1/1")]) # multiple routes in tracing to another simulation (did previously fail) # The multiple routes here are sim3/ts1/1 -> sim2/ts1/1, sim2/ts1/2 -> sim/ts1/1 h = tangos.get_halo("sim3/ts1/1") testing.assert_halolists_equal([h.calculate("match('sim')")], [tangos.get_halo("sim/ts1/1")])
def test_delete_property_one_simulation(fresh_database): assert 'dummy_property' in db.get_halo("dummy_sim_1/step.1/halo_1") tool = property_deleter.PropertyDeleter() tool.parse_command_line("dummy_property --for dummy_sim_1 -f".split()) tool.run_calculation_loop() # check deleted: assert 'dummy_property' not in db.get_halo("dummy_sim_1/step.1/halo_1") assert 'dummy_property' not in db.get_halo("dummy_sim_1/step.1/halo_2") assert 'dummy_property' not in db.get_halo("dummy_sim_1/step.2/halo_1") # hasn't affected other simulations: assert 'dummy_property' in db.get_halo("dummy_sim_2/step.1/halo_1")
def test_crosslinking(): cl = crosslink.CrossLinker() cl.parse_command_line(["dummy_sim_2","dummy_sim_1"]) with log.LogCapturer(): assert cl.need_crosslink_ts(db.get_timestep("dummy_sim_1/step.1"), db.get_timestep("dummy_sim_2/step.1")) cl.run_calculation_loop() assert not cl.need_crosslink_ts(db.get_timestep("dummy_sim_1/step.1"), db.get_timestep("dummy_sim_2/step.1")) assert db.get_halo('dummy_sim_1/step.1/1').calculate('match("dummy_sim_2").dbid()')==db.get_halo('dummy_sim_2/step.1/1').id assert db.get_halo('dummy_sim_2/step.2/3').calculate('match("dummy_sim_1").dbid()') == db.get_halo( 'dummy_sim_1/step.2/3').id with assert_raises(live_calculation.NoResultsError): result = db.get_halo('dummy_sim_2/step.3/1').calculate('match("dummy_sim_1").dbid()')
def dtBHmerge(self): import tangos as db self.data['dtBHmerge'] = np.ones(len(self.data['time']))*-1 self.data['dtBHmerge_min'] = np.ones(len(self.data['time']))*-1 if 'time_next' not in self.data.keys(): self.get_tnext(self) for ii in range(len(self.data['time'])): if ii % 100 == 0: print float(ii)/float(len(self.data['time'])) * 100, "% done" bh1 = db.get_halo(self.data['dbstep'][ii]+'/1.'+str(self.data['bh().halo_number()1'][ii])) bh2 = db.get_halo(self.data['dbstep'][ii]+'/1.'+str(self.data['bh().halo_number()2'][ii])) bhc1, time1 = bh1.property_cascade('halo_number()', 't()') bhc2, time2 = bh2.property_cascade('halo_number()', 't()') im = np.where(bhc1==bhc2)[0] if len(im)>0: tmerge = time1[im[0]] if tmerge != time2[im[0]]: print "WEIRD ONE" self.data['dtBHmerge'][ii] = tmerge - self.data['time'][ii] self.data['dtBHmerge_min'][ii] = self.data['dtBHmerge'][ii] - self.data['time_next'][ii]
def get_halo_merger(self,dbsim,overwrite=False, detail=False): import tangos as db if 'dt_hmerger' not in self.rawdat.keys() or overwrite==True: self.rawdat['dt_hmerger'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['dt_hmerger_min'] = np.ones(len(self.rawdat['ID1']))*-1 if detail == True: self.rawdat['hmerger_mvir_1'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mvir_2'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mgas_1'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mgas_2'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mstar_1'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mstar_2'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mbh_1'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_mbh_2'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_dbh_1'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_dbh_2'] = np.ones(len(self.rawdat['ID1']))*-1 else: self.rawdat['hmerger_ndm_1'] = np.ones(len(self.rawdat['ID1']))*-1 self.rawdat['hmerger_ndm_2'] = np.ones(len(self.rawdat['ID1']))*-1 nodiff = 0 badmatch = 0 for i in range(len(self.rawdat['ID1'])): if i%30 == 0: print float(i)/float(len(self.rawdat['ID1']))*100, '% done' if self.rawdat['dt_hmerger'][i] >= 0 and overwrite == False: continue try: bh1 = db.get_halo(str(dbsim.path)+'/%'+str(self.rawdat['snap_before'][i])+'/1.'+str(self.rawdat['ID1'][i])) bh2 = db.get_halo(str(dbsim.path)+'/%'+str(self.rawdat['snap_before'][i])+'/1.'+str(self.rawdat['ID2'][i])) except: continue if bh1 is None or bh2 is None: self.rawdat['dt_hmerger'][i] = self.rawdat['time'][i] - min(self.rawdat['tform1'][i],self.rawdat['tform2'][i]) self.rawdat['dt_hmerger_min'][i] = 0 continue try: if detail==True: time1, hn1, mv1, mg1, ms1, mbh1, dbh1 = bh1.reverse_property_cascade('t()', 'host_halo.halo_number()', 'host_halo.Mvir', 'host_halo.Mgas','host_halo.Mstar', 'BH_mass','BH_central_distance') time2, hn2, mv2, mg2, ms2, mbh2, dbh2 = bh2.reverse_property_cascade('t()', 'host_halo.halo_number()', 'host_halo.Mvir', 'host_halo.Mgas','host_halo.Mstar', 'BH_mass','BH_central_distance') else: time1, hn1, ndm1 = bh1.reverse_property_cascade('t()', 'host_halo.halo_number()', 'host_halo.NDM()') time2, hn2, ndm2 = bh2.reverse_property_cascade('t()', 'host_halo.halo_number()', 'host_halo.NDM()') except: badmatch +=1 continue match1 = np.where(np.in1d(time1,time2))[0] match2 = np.where(np.in1d(time2,time1))[0] if not np.array_equal(time1[match1],time2[match2]): print "WARNING time arrays don't match!" if len(match1)==0 or len(match2)==0: badmatch += 1 continue diff = np.where(hn1[match1]!=hn2[match2])[0] if len(diff)==0: nodiff += 1 self.rawdat['dt_hmerger'][i] = self.rawdat['time'][i] - max(self.rawdat['tform1'][i],self.rawdat['tform2'][i]) self.rawdat['dt_hmerger_min'][i] = self.rawdat['time'][i] - max(time1.min(),time2.min()) if detail == True: self.rawdat['hmerger_mvir_1'][i] = mv1[-1] self.rawdat['hmerger_mgas_1'][i] = mg1[-1] self.rawdat['hmerger_mstar_1'][i] = ms1[-1] self.rawdat['hmerger_mbh_1'][i] = mbh1[-1] self.rawdat['hmerger_mbh_2'][i] = mbh2[-1] self.rawdat['hmerger_dbh_1'][i] = dbh1[-1] self.rawdat['hmerger_dbh_2'][i] = dbh2[-1] else: self.rawdat['hmerger_ndm_1'][i] = ndm1[-1] continue th1 = time1[match1[diff[0]]] th2 = time2[match2[diff[0]]] if diff[0] != 0: th1p = time1[match1[diff[0]-1]] else: th1p = self.rawdat['time'][i] if th1 != th2: print "WARNING halo merge times not correct" self.rawdat['dt_hmerger'][i] = self.rawdat['time'][i] - th1 if self.rawdat['time'][i] - th1p > 0: self.rawdat['dt_hmerger_min'][i] = self.rawdat['time'][i] - th1p else: self.rawdat['dt_hmerger_min'][i] = 0 if detail == True: self.rawdat['hmerger_mvir_1'][i] = mv1[match1[diff[0]]] self.rawdat['hmerger_mvir_2'][i] = mv2[match2[diff[0]]] self.rawdat['hmerger_mgas_1'][i] = mg1[match1[diff[0]]] self.rawdat['hmerger_mgas_2'][i] = mg2[match2[diff[0]]] self.rawdat['hmerger_mstar_1'][i] = ms1[match1[diff[0]]] self.rawdat['hmerger_mstar_2'][i] = ms2[match2[diff[0]]] self.rawdat['hmerger_mbh_1'][i] = mbh1[match1[diff[0]]] self.rawdat['hmerger_mbh_2'][i] = mbh2[match2[diff[0]]] self.rawdat['hmerger_dbh_1'][i] = dbh1[match1[diff[0]]-1] self.rawdat['hmerger_dbh_2'][i] = dbh2[match2[diff[0]]-1] else: self.rawdat['hmerger_ndm_1'][i] = ndm1[match1[diff[0]]] self.rawdat['hmerger_ndm_2'][i] = ndm2[match2[diff[0]]] print "finished with ", nodiff, "BHs having never been in different halos and ", badmatch, "bad matches"
plt.clf() del sm del cbar #Plotting the raw star formation rate and calculating the average star formation rate over the last 100 Myrs. for each halo. SFR = [] for i in numbers: hn_SFR = step1.halos[i-1]['SFR_histogram'] plt.hist(hn_SFR) plt.savefig('/oasis/scratch/comet/drenniks/temp_project/final_IRX/z_0/IRX_b/test_IRX/raw_SFR/SFR_' + str(i) + '.png') plt.clf() string = '"cosmo25%/%8192/' + str(i-1) + '"' print 'string is ', string halo_call = string[1:-1] halo_SFR = db.get_halo(halo_call) raw_SFR = halo_SFR.calculate("raw(SFR_histogram)")/(1e9) #Msun/yr #Taking SFR for the last 100 Myrs (from the raw data) (in bins of 10 Myr) SFR_100 = np.sum(raw_SFR[-10:]) avg_SFR = SFR_100 / 10 print 'avg_SFR = ', avg_SFR SFR.append(avg_SFR) plt.hist(raw_SFR, bins = range(11)) plt.savefig('/oasis/scratch/comet/drenniks/temp_project/final_IRX/z_0/IRX_b/test_IRX/raw_SFR/SFR_hist_' + str(i) + '.png') plt.clf() SFR = np.array(SFR) print 'SFR = ', SFR print 'np.min(SFR) = ', np.min(SFR) print 'np.max(SFR) = ', np.max(SFR) print 'np.min(np.log10(SFR)) = ', np.min(np.log10(SFR)) print 'np.max(np.log10(SFR)) = ', np.max(np.log10(SFR))