Example #1
0
def pdf_for_model(sink, str_beta,mdot_low,mdot_hi,nsinks=64,corr=True):
	'''returns mdot pdf of 64*(tend-tbeg)/dt points following Peter's idea
	Returns: array of mdots for pdf, list of non nan indices of that arraydict of models, each model is a dict of the models it was ttest compared to
	'''
	#time steady state begins from KS 2 samp stat
	tbeg= steady_state_begins(str_beta,units_tBH=True)
	tend= tbeg+3.
	if corr: sample= steady_rates(sink.mdot_vcorr,sink.tnorm(),tend-tbeg,tend,units_tBH=True,fill_nan=True)
	else: sample= steady_rates(sink.mdot,sink.tnorm(),tend-tbeg,tend,units_tBH=True,fill_nan=True)
	#check data is good
	assert( np.all(np.isfinite(sample)) ) #robust, either data exist in above range or not, will not have good data and nans in this sample
	#is desired, take subset of sinks
	sink_ids=np.arange(64)
	np.random.shuffle(sink_ids)
	sample= sample[ sink_ids[:nsinks] ]
	#can safely take log10 of rates now
	sample= np.log10(sample)
	#equal log spaced bins covering entire range of mdots
	bins= np.linspace(np.log10(mdot_low/2),np.log10(mdot_hi*2),num=41) # N bins = num-1
	pdf,cdf= get_pdf_cdf(sample,bins)
	print 'sample= ',sample
	print 'bins= ',bins
	print 'pdf= ',pdf
	print 'cdf= ',cdf
	return sample,bins,pdf,cdf
Example #2
0
def pdf_for_model_64_x_N(sink, str_beta,mdot_low,mdot_hi,nsinks=64,corr=True,last_2tBH=True,dt=2.,median=True):
	'''default: compute 64 point PDF taking median each sink over last 2 tBH
	can change dt = 1 to take 64x2 point PDF over last 2tBH
	or even dt=1 and last_2tBH=False to take 64xN point pdf between tsteady and tend of every model
	'''
	if last_2tBH: 
		tbeg= -2.+ min(19.8,sink.tnorm()[-1])
		assert(tbeg >= steady_state_begins(str_beta,units_tBH=True))
	else: tbeg= steady_state_begins(str_beta,units_tBH=True) #time steady state begins from KS 2 samp stat
	sample=[]
	for cnt in range(1,100):
		tend= tbeg+cnt*dt
		if tend > min(20.,sink.tnorm()[-1]): break
		if median:
			# Take median mdot over steady state time range for each sink
			if corr: sample+= list( steady_rates(sink.mdot_vcorr,sink.tnorm(),dt,tend,units_tBH=True,fill_nan=True,str_beta=str_beta,median=median) )  
			else: sample+= list( steady_rates(sink.mdot,sink.tnorm(),dt,tend,units_tBH=True,fill_nan=True,str_beta=str_beta,median=median) )  
		else:
			# Take mean
			if corr: sample+= list( steady_rates(sink.mdot_vcorr,sink.tnorm(),dt,tend,units_tBH=True,fill_nan=True,str_beta=str_beta,median=False) )  
			else: sample+= list( steady_rates(sink.mdot,sink.tnorm(),dt,tend,units_tBH=True,fill_nan=True,str_beta=str_beta,median=False) )  
		print 'inside loop pdf_for_model_64_x_N, dt= ',dt,'tend= ',tend
	sample= np.array(sample)
	#sanity check
	print 'finished pdf_for_model_64_x_N, model=%s, len(sample)=%d' % (str_beta,len(sample))
	if last_2tBH and dt > 1.01: assert(len(sample) == 64)
	elif last_2tBH and dt < 1.01: assert(len(sample) == 128)
	else: assert(len(sample) >= 128)
	#check data is good
	assert( np.all(np.isfinite(sample)) ) #robust, either data exist in above range or not
	#can safely take log10 of rates now
	sample= np.log10(sample)
	#equal log spaced bins covering entire range of mdots
	bins= np.linspace(np.log10(mdot_low/2),np.log10(mdot_hi*2),num=41) # N bins = num-1
	pdf,cdf= get_pdf_cdf(sample,bins)
	print 'bins[[0,-1]]=  ',bins[[0,-1]]
	print 'pdf= ',pdf
	print 'cdf= ',cdf
	return sample,bins,pdf,cdf
Example #3
0
def pdf_for_model_mean_of_many_pdfs(sink, str_beta,mdot_low,mdot_hi, tend=20.,dt=0.1):
	'''returns mdot pdf of 64*(tend-tbeg)/dt points following Peter's idea
	Returns: array of mdots for pdf, list of non nan indices of that arraydict of models, each model is a dict of the models it was ttest compared to
	'''
	#corr mdots	
	end_t,end_mach= t_rms_mach_near_end(str_beta)
	sink.rm_mdot_systematics(end_t,end_mach)
	#time steady state begins from KS 2 samp stat
	tbeg= steady_state_begins(str_beta,units_tBH=True)
	#median accretion rate in 0.1 bins between t=2tBH and t=end
	n_intervals= get_num_intervals(sink,tbeg,dt, tend=tend)
	pdfs= np.empty((n_intervals,64))*np.nan
	for ibin in range(n_intervals):
		#get 64 steady mdots for this time range
		pdfs[ibin,:]= steady_rates(sink.mdot_vcorr,sink.tnorm(),dt,tbeg+(ibin+1)*dt,units_tBH=True,fill_nan=True)
	#mask the array where have nans
	pdfs= ma.array(pdfs)
	pdfs.mask= np.isfinite(pdfs) == False
	#can safely take log10 of rates now
	pdfs= np.log10(pdfs)
	#equal log spaced bins covering entire range of mdots
	bins= np.linspace(np.log10(mdot_low/2),np.log10(mdot_hi*2),num=41) # N bins = num-1
	#bins= bins[::-1] #start with smallest number first (most negative)
	print 'bins= ',bins
	#bin up data
	hist_vals= np.empty((n_intervals,len(bins)-1))*np.nan
	for i in range(n_intervals):	
		if np.any(pdfs.mask[i,:]): continue 
		hist_vals[i,:],b,c=plt.hist(pdfs[i,:],bins=bins)
	plt.close()
	hist_vals= ma.array(hist_vals)
	hist_vals.mask= np.isfinite(hist_vals) == False
	#compute averaged histogram
	final_hist= np.round(np.mean(hist_vals,axis=0)).astype(int) #average each bin count over n_intervals, round to nearest count, save as integer
	#convert histogram into array of values
	bin_c= (bins[1:]+bins[:-1])/2
	final_arr=[]
	for cnt,c in enumerate(bin_c): final_arr+= [c]*final_hist[cnt]
	assert(len(final_arr) == final_hist.sum())
	pdf,junk1,junk2= plt.hist(final_arr,bins=bins,normed=True)
	cdf,j1,j2=plt.hist(final_arr,bins=bins,cumulative=True,normed=True)
	plt.close()
	try: assert( ((bins[1:]-bins[:-1])*pdf).sum() == 1. )
	except AssertionError: 
		print '---- WARNING  sum not 1, what is it? ------------ '
		print '((bins[1:]-bins[:-1])*pdf).sum()= ',((bins[1:]-bins[:-1])*pdf).sum()
		#raise AssertionError
	#return array of values, bins, pdf and cdf
	return np.array(final_arr),bins,pdf,cdf
Example #4
0
#raw_t= master_sink.old_t[-master_sink.tnorm().size:].copy()
print "%s" % args.str_beta
print "mdotTime, tnorm()"
for raw_t,norm in zip(master_sink.mdotTime,master_sink.tnorm()):
	print raw_t,norm
print 'exiting early'
sys.exit()

#tend
print 'model: %s, last t/tBH= %.5f' % (args.str_beta,master_sink.tnorm()[-1])
if False: # print machrms at end
	t_end= min(20.,master_sink.tnorm()[-1])
	i_end= index_before_tBH(master_sink,t_end,units_tBH=True)
	mach_end= master_sink.f_rms_mach(master_sink.mdotTime[i_end])
	print 'model= %s, tsteady/tBH= %.2f, tend/tBH= %.2f, rms Mach end= %.2f' % \
			(args.str_beta, steady_state_begins(args.str_beta,True),t_end,mach_end) 
#corr mdots	
end_t,end_mach= t_rms_mach_near_end(args.str_beta)
master_sink.rm_mdot_systematics(end_t,end_mach)
#final PDF
final_pdf= dict(arr={},bins={},pdf={},cdf={})  #PDF USING, 64 pdf, over last 2tBH
low,hi= low_hi_mdot_all_models()
final_pdf['arr'][args.str_beta],final_pdf['bins'][args.str_beta],final_pdf['pdf'][args.str_beta],final_pdf['cdf'][args.str_beta]= pdf_for_model_64_x_N(master_sink,args.str_beta, low,hi,nsinks=64,corr=True,last_2tBH=True,dt=2.)
#make plots
just_median(master_sink,args)
median_and_when_equil_begins(master_sink,args)
steady_pdf_cdf(master_sink,args)
all_rates(master_sink,args,corr=True)  
bootstrap_sigma_vs_nsink(final_pdf,args.str_beta)
print 'finished'
    "-str_beta",
    nargs=7,
    choices=["bInf", "bInfsd3", "b100", "b10", "b1", "b1e-1", "b1e-2"],
    action="store",
    help="directory containing parsed mdot files",
    required=True,
)
parser.add_argument("-outdir", action="store", help="directory to save plots to", required=True)
parser.add_argument("-ylim", nargs=2, type=float, action="store", help="", required=False)
parser.add_argument("-xlim", nargs=2, type=float, action="store", help="", required=False)
args = parser.parse_args()


print "BEFORE CALC, fixed t_steady: "
for str_beta in args.str_beta:
    tsteady = steady_state_begins(str_beta, True)
    print "STEADY STATE: model=%s, t/tBH=%.2f, t/tABH=%.2f" % (str_beta, tsteady, tBH_to_tBHA(tsteady, str_beta))

sinks = {}
for fn, str_beta in zip(args.sinks, args.str_beta):
    fin = open(fn, "r")
    sinks[str_beta] = load(fin)
    fin.close()
    # corr mdots
    end_t, end_mach = stats.t_rms_mach_near_end(str_beta)
    sinks[str_beta].rm_mdot_systematics(end_t, end_mach)
# deterine min and max mdot of all models
low, hi = 100, 1.0e-10
for key in sinks.keys():
    istart = stats.index_before_tBH(sinks[key], steady_state_begins(key, True))
    iend = stats.index_before_tBH(sinks[key], 20.0)