def lucy_replicator(iters,phi): #Initialize arrays that will hold goodness-of-fit information #ks_zdist_psi = np.zeros(iters+1) #cr_val_zdist_psi = np.zeros(iters+1) #ks_phitilde_c = np.zeros(iters+1) #cr_val_phi_tilde_c = np.zeros(iters+1) chisq_psi_zdist = np.zeros(iters+1) chisq_c_phitilde = np.zeros(iters+1) #Use the same initial guess as in Lucy (1974) initial_guess = (np.sqrt(2)/np.pi)/(1+zm**4) #initial_guess=np.ones(200) #Turn our observed data into a histogram phi_tilde = np.histogram(phi,bins=bins,density=True)[0] #Run the first iteration of the RLD algorithm z_dist = df.integral_calc(initial_guess,lucy_conditional_dists,phi_tilde) z_dist = z_dist/sum(z_dist)/.025 #Calculate the phi^r term, which we call c c = np.dot(z_dist,lucy_conditional_dists) c=c/sum(c)/.025 #Perform the KS tests, saving p values and test statistics #ks_zdist_psi[0] = ks_test(z_dist,norm_vals,num_obs)[0] #cr_val_zdist_psi[0] = ks_test(z_dist,norm_vals,num_obs)[1] #ks_phitilde_c[0] = ks_test(phi_tilde,c,num_obs)[0] #cr_val_phi_tilde_c[0] = ks_test(phi_tilde,c,num_obs)[1] #Perform the chi-squared test expected_psi = np.round(num_obs*norm_vals*.025) observed_zdist = np.round(num_obs*z_dist*.025) expected_c = np.round(num_obs*c*.025) obs_phi = np.round(num_obs*phi_tilde*.025) chisq_psi_zdist[0] = chisquare(observed_zdist,expected_psi)[0] chisq_c_phitilde[0] = chisquare(obs_phi,expected_c)[0] for i in range(0,iters): #Run the RLD algorithm z_dist = df.integral_calc(z_dist,lucy_conditional_dists,phi_tilde) z_dist = z_dist/sum(z_dist)/.025 #Calculate the phi^r term, which we call c c = np.dot(z_dist,lucy_conditional_dists) c=c/sum(c)/.025 #Perform the KS tests, saving p values and test statistics #ks_zdist_psi[i+1] = ks_test(z_dist,norm_vals,num_obs)[0] #cr_val_zdist_psi[i+1] = ks_test(z_dist,norm_vals,num_obs)[1] #ks_phitilde_c[i+1] = ks_test(phi_tilde,c,num_obs)[0] #cr_val_phi_tilde_c[i+1] = ks_test(phi_tilde,c,num_obs)[1] #Perform the chi-squared test #expected_psi = (num_obs*norm_vals*.025) observed_zdist = (num_obs*z_dist*.025) expected_c = (num_obs*c*.025) obs_phi = np.round(num_obs*phi_tilde*.025) chisq_psi_zdist[i+1] = chisquare(observed_zdist,expected_psi)[0] chisq_c_phitilde[i+1] = chisquare(obs_phi,expected_c)[0] #Find the best iteration number to stop at, according the Lucy (1974) ideal_chisq_stop = np.max(np.where(chisq_c_phitilde>233)[0]) #ideal_ks_stop = np.max(np.where(ks_phitilde_c > cr_val_phi_tilde_c[0])) #Find best iteration number to stop at, in terms of "distance" between estimate and truth min_chisq = np.argmin(chisq_psi_zdist) #min_ks = np.argmin(ks_zdist_psi) return z_dist, ideal_chisq_stop, min_chisq
del best_cells ################### #for chi2 expected = sample_size*phi_tilde_pdf*.01 ################ z_dist = df.max_finder(psf_base_single,phi_tilde_point) base_point_dist.append(z_dist) ################ z_dist, c_val = df.integral_calc(initial_guess,psf_base_t,phi_tilde_pdf) z_temp = z_dist for j in xrange(max_iters): #expected = sample_size*phi_tilde_pdf*.01 observed = sample_size*c_val*.01 chisq_stat = chisquare(observed,expected)[0] if chisq_stat < 233: base_pdf_dist.append(z_temp) num_cut[0]+= 1 break else: z_temp = z_dist z_dist, c_val = df.integral_calc(z_dist,psf_base_t,phi_tilde_pdf)