def get_data(): """Get survey populations.""" # Don't always regenerate a population if REMAKE is False: # Check where a possible population would be located path = '' surv_pops = [] for telescope in TELESCOPES: if telescope == 'askap': telescope = 'askap-fly' name = f'{telescope}' path = paths.populations() + f'complex_{name}.p' surv_pops.append(unpickle(path)) return surv_pops cosmic_pop = CosmicPopulation.complex(SIZE, generate=False) surveys = [] for telescope in TELESCOPES: pattern = 'airy' if telescope == 'parkes': pattern = telescope s = telescope if telescope == 'askap': s = 'askap-fly' surveys.append(Survey(s, gain_pattern=pattern, n_sidelobes=1)) return LargePopulation(cosmic_pop, *surveys).pops
def iter_alpha(i, surveys=surveys, parallel=None): alpha = ALPHAS[i] pop = CosmicPopulation.complex(SIZE) pop.set_dist(model='vol_co', z_max=1.0, alpha=alpha) pop.set_lum(model='powerlaw', low=1e40, high=1e45, power=-1) pop.generate() for li in LIS: pop.set_lum(model='powerlaw', low=1e40, high=1e45, power=li) pop.gen_lum() for si in SIS: pop.set_si(model='constant', value=si) pop.gen_si() pop.name = f'complex_alpha_{alpha}_lum_{li}_si_{si}' for survey in surveys: surv_pop = SurveyPopulation(pop, survey) print(surv_pop.name) surv_pop.save() sr = surv_pop.source_rate rate = sr.det / sr.days mask = (df.alpha == alpha) & (df.li == li) & (df.si == si) if parallel is not None: i = df[mask].index j = SURVEY_NAMES.index(survey.name) parallel[i, j] = rate else: df.loc[mask, survey.name] = rate
def complex_rates(remake=REMAKE, alphas=ALPHAS, size=SIZE, surveys=SURVEYS): """Calculate expected rates for a complex populations.""" rates = defaultdict(list) # Don't always regenerate a population if remake is False: for alpha in alphas: for s in surveys: surv_rates = unpickle(f'complex_alpha_{alpha}_{s}').source_rate pprint(f'Alpha:{alpha:.2}, Survey: {s}, Det: {surv_rates.det}') rate = (surv_rates.det / surv_rates.days) rates[s].append(rate) else: pops = [] for alpha in alphas: if alpha <= -1.0 and ADAPTATIVE_SCALING: size = 1e7 if alpha <= -1.5 and ADAPTATIVE_SCALING: size = 1e8 pop = CosmicPopulation.complex(size) pop.set_dist(model='vol_co', z_max=2.5, alpha=alpha, H_0=67.74, W_m=0.3089, W_v=0.6911) pop.set_lum(model='powerlaw', low=1e40, high=1e45, power=-1) pop.name = f'complex_alpha_{alpha}' pops.append(pop) # Set up surveys ss = [] for s in surveys: survey = Survey(name=s) survey.set_beam(model='airy', n_sidelobes=1) ss.append(survey) surv_pops = LargePopulation(pop, *ss).pops for i, s in enumerate(surveys): surv_rates = surv_pops[i].source_rate pprint(f'Alpha:{alpha:.2}, Survey: {s}, Det: {surv_rates.det}') rate = (surv_rates.det / surv_rates.days) rates[s].append(rate) # Scale rates to first survey in list for s in surveys: if s != surveys[0]: norm = [] for i, r in enumerate(rates[s]): norm.append(r / rates[surveys[0]][i]) rates[s] = norm rates[surveys[0]] = [r / r for r in rates[surveys[0]]] return rates
def iter_alpha(i): alpha = alphas[i] pop = CosmicPopulation.complex(self.pop_size) pop.set_dist(model='vol_co', z_max=1.0, alpha=alpha) pop.set_lum(model='constant', value=1) if not np.isnan(w_mean): pop.set_w(model='lognormal', mean=w_mean, std=w_std) if not np.isnan(dm_igm_slope): pop.set_dm_igm(model='ioka', slope=dm_igm_slope) pop.set_dm_host(model='constant', value=dm_host) pop.generate() for si in sis: pop.set_si(model='constant', value=si) pop.gen_si() for li in lis: pop.set_lum(model='powerlaw', low=1e40, high=1e45, power=li) if not np.isnan(lum_min): pop.set_lum(model='powerlaw', low=lum_min, high=lum_max, index=li) pop.gen_lum() for survey in self.surveys: surv_pop = SurveyPopulation(pop, survey) # Get unique identifier mask = (self.so.df.par_set == 1) mask &= (self.so.df.run == run) mask &= (self.so.df.alpha == alpha) mask &= (self.so.df.si == si) mask &= (self.so.df.li == li) mask &= (self.so.df.survey == survey.name) uuid = self.so.df[mask].uuid.iloc[0] surv_pop.name = f'mc/run_{run}/{uuid}' surv_pop.save()
def complex_rates(remake=REMAKE, alphas=ALPHAS, size=SIZE, surveys=SURVEYS): """Calculate expected rates for a complex populations.""" rates = defaultdict(list) # Don't always regenerate a population if remake is False: for alpha in alphas: for s in surveys: surv_rates = unpickle(f'complex_alpha_{alpha}_{s}').rates() pprint(f'Alpha:{alpha:.2}, Survey: {s}, Det: {surv_rates.det}') rate = (surv_rates.det / surv_rates.days) rates[s].append(rate) else: pops = [] for alpha in alphas: pop = CosmicPopulation.complex(size) pop.alpha = alpha pop.name = f'complex_alpha_{alpha}' pops.append(pop) # Set up surveys ss = [] for s in surveys: survey = Survey(name=s, gain_pattern='airy', n_sidelobes=1) ss.append(survey) surv_pops = LargePopulation(pop, *ss).pops for i, s in enumerate(surveys): surv_rates = surv_pops[i].rates() pprint(f'Alpha:{alpha:.2}, Survey: {s}, Det: {surv_rates.det}') rate = (surv_rates.det / surv_rates.days) rates[s].append(rate) # Scale rates to HTRU for s in surveys: if s != 'htru': norm = [] for i, r in enumerate(rates[s]): norm.append(r / rates['htru'][i]) rates[s] = norm rates['htru'] = [r / r for r in rates['htru']] return rates
"""Calculate the expected detection rates for apertif.""" import numpy as np import matplotlib.pyplot as plt from tqdm import tqdm from frbpoppy import CosmicPopulation, Survey, SurveyPopulation, hist from tests.convenience import plot_aa_style, rel_path from alpha_real import EXPECTED, poisson_interval N_DAYS = 1 # Not used in eventual result SCALE_TO = 'parkes-htru' pop = CosmicPopulation.complex(n_srcs=1e5, n_days=N_DAYS) pop.generate() apertif = Survey('wsrt-apertif', n_days=N_DAYS) apertif.set_beam(model='apertif_real') if SCALE_TO == 'parkes-htru': htru = Survey('parkes-htru', n_days=N_DAYS) htru.set_beam(model='parkes') if SCALE_TO == 'askap': askap = Survey('askap-fly', n_days=N_DAYS) askap.set_beam(model='gaussian', n_sidelobes=0.5) days_per_frbs = [] for i in tqdm(range(2000), desc='Survey Run'): apertif_pop = SurveyPopulation(pop, apertif, mute=True) if SCALE_TO == 'parkes-htru':
"""Check the log N log F slope for future surveys.""" import numpy as np import matplotlib.pyplot as plt from frbpoppy import CosmicPopulation, Survey, SurveyPopulation, hist from frbpoppy import unpickle, pprint from tests.convenience import plot_aa_style, rel_path MAKE = True SURVEYS = ('parkes-htru', 'fast-crafts', 'puma-full', 'chord', 'ska1-low', 'ska1-mid') if MAKE: surv_pops = [] pop = CosmicPopulation.complex(1e5, generate=False) pop.generate() for name in SURVEYS: survey = Survey(name) surv_pop = SurveyPopulation(pop, survey) surv_pop.save() surv_pops.append(surv_pop) else: surv_pops = [] for name in SURVEYS: surv_pops.append(unpickle(f'complex_{name}')) # Start plot plot_aa_style() fig, ax1 = plt.subplots(1, 1)
# Dispersion measure properties pop.set_dm_host(model='gauss', mean=100, std=200) pop.set_dm_igm(model='ioka', slope=1000, std=None) pop.set_dm_mw(model='ne2001') # Emission range of FRB sources pop.set_emission_range(low=100e6, high=10e9) # Luminsity of FRBs # See the per_source argument? That allows you to give different properties # to different bursts from the same source. You can do that for the luminosity, # or any of the following parameters pop.set_lum(model='powerlaw', low=1e38, high=1e38, power=0, per_source='different') # Pulse width pop.set_w(model='uniform', low=10, high=10) # Spectral index pop.set_si(model='gauss', mean=0, std=0) # If repeaters, how they repeat pop.set_time(model='regular', rate=2) # And then generate the population! pop.generate() # Or simply use some predefined models pop_simple = CosmicPopulation.simple(1e4, generate=True) pop_complex = CosmicPopulation.complex(1e4, generate=True)
"""Short example of how frbpoppy works. The first time you run frbpoppy, a series of cosmological databases will be constructed to set up subsequent runs. This first run can take ~2h on a 4 core machine. Subsequent runs will take mere seconds. """ from frbpoppy import CosmicPopulation, Survey, SurveyPopulation, plot # Set up an FRB population of one-offs # Add repeaters=True to turn into an FRB population of repeaters cosmic_pop = CosmicPopulation.complex(1e5, n_days=0.01) # Generate your FRB population cosmic_pop.generate() # Setup a survey survey = Survey('parkes-htru') survey.set_beam(model='parkes-htru') # Observe the FRB population survey_pop = SurveyPopulation(cosmic_pop, survey) # Check the detection rates print(survey_pop.source_rate) # Plot populations in a browser plot(cosmic_pop, survey_pop, tns='parkes')
"""How to access frb population parameters.""" from frbpoppy import CosmicPopulation, Survey, SurveyPopulation cosmic_pop = CosmicPopulation.complex(1e5, generate=True) dm = cosmic_pop.frbs.dm # Get dispersion measure values survey_pop = SurveyPopulation(cosmic_pop, Survey('apertif')) survey_dm = survey_pop.frbs.dm # Also works for SurveyPopulations
def gen_par_set_4(self, parallel=True, alpha=-1.5, si=0, li=-1, lum_min=1e40, lum_max=1e40, w_mean=np.nan, w_std=np.nan, run=np.nan): dm_igm_slopes = np.linspace(800, 1200, 11) dm_hosts = np.linspace(0, 500, 11) # Put all options into a dataframe self.so.df = self.so.df[self.so.df.run != run] opt = np.meshgrid(dm_igm_slopes, dm_hosts, self.survey_ix) options = np.array(opt).T.reshape(-1, 3) cols = ('dm_igm_slope', 'dm_host', 'survey') df = pd.DataFrame(options, columns=cols) df['run'] = run df['par_set'] = 4 df['uuid'] = [uuid.uuid4() for _ in range(len(df.index))] df['date'] = datetime.today() self.so.append(df) self.so.map_surveys(self.survey_ix, self.survey_names) self.so.save() # Remove previous par_set of the same number if not self.set_up_dirs(run=run): fs = f'{frbpoppy.paths.populations()}mc/run_{run}/*' for f in glob(fs): os.remove(f) pop = CosmicPopulation.complex(self.pop_size) if not np.isnan(alpha): pop.set_dist(model='vol_co', z_max=1.0, alpha=alpha) pop.set_si(model='constant', value=si) if not np.isnan(lum_min): pop.set_lum(model='powerlaw', low=lum_min, high=lum_max, index=li) if not np.isnan(w_mean): pop.set_w(model='lognormal', mean=w_mean, std=w_std) pop.generate() def adapt_pop(e): dm_igm_slope, dm_host = e t_pop = deepcopy(pop) t_pop.set_dm_igm(model='ioka', slope=dm_igm_slope) t_pop.gen_dm_igm() t_pop.set_dm_host(model='constant', value=dm_host) t_pop.gen_dm_host() t_pop.frbs.dm = t_pop.frbs.dm_mw + t_pop.frbs.dm_igm t_pop.frbs.dm += t_pop.frbs.dm_host for survey in self.surveys: surv_pop = SurveyPopulation(t_pop, survey) # Get unique identifier mask = (self.so.df.par_set == 4) mask &= (self.so.df.run == run) mask &= (self.so.df.dm_igm_slope == dm_igm_slope) mask &= (self.so.df.dm_host == dm_host) mask &= (self.so.df.survey == survey.name) uuid = self.so.df[mask].uuid.iloc[0] surv_pop.name = f'mc/run_{run}/{uuid}' surv_pop.save() n_cpu = min([4, os.cpu_count() - 1]) pprint(f'{os.cpu_count()} CPUs available') mg = np.meshgrid(dm_igm_slopes, dm_hosts) loop = np.array(mg).T.reshape(-1, 2) if parallel: Parallel(n_jobs=n_cpu)(delayed(adapt_pop)(e) for e in tqdm(loop)) else: [adapt_pop(e) for e in tqdm(loop)]