def main(): star_list = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, STAR_POP_STARTING_FIELDNAME) print print "Star count: %s" % len(star_list) element_count = 15 print "First %s elements in star list:" % (element_count) for star in star_list[:element_count]: dist = star["Dist"] * units.kpc mass = star["Mass"] * units.solMass print "Dist: %s Mass: %s" % (dist, mass) print lens_index = 5200 source_index = 5470 lens = star_list[lens_index] source = star_list[source_index] theta_E = get_angular_Einstein_radius(lens=lens, source=source) omega = get_relative_angular_velocity(lens=lens, source=source) print "For lens %s:\n%s" % (lens_index, lens) print "Distance: %s Mass: %s" % (lens["Dist"] * units.kpc, lens["Mass"] * units.solMass) print "mu_l: %s mu_b: %s" % (lens["mul"] * units.mas/units.yr, lens["mub"] * units.mas/units.yr) print "mu_l: %s mu_b: %s" % (convert_angular_velocity_units(lens["mul"]), convert_angular_velocity_units(lens["mub"])) print print "And for source %s:\n%s" % (source_index, source) print "Distance: %s Mass: %s" % (source["Dist"] * units.kpc, source["Mass"] * units.solMass) print "mu_l: %s mu_b: %s" % (source["mul"] * units.mas/units.yr, source["mub"] * units.mas/units.yr) print "mu_l: %s mu_b: %s" % (convert_angular_velocity_units(source["mul"]), convert_angular_velocity_units(source["mub"])) print print "Angular Einstein radius is: %s" % theta_E
def testing_lightcurve_functions(): star_catalogue = read_star_pop(STAR_POP_FILEPATH, is_csv=True) star_pop = star_catalogue["star_pop"] star_pop_segment = star_pop[0:10] field_of_vew = 9.5 * units.deg**2 survey_area = 18000 * units.deg**2 grid_space_num = survey_area / field_of_vew visit_duration = 34 * units.s period = (grid_space_num * visit_duration).to(units.h) duration = 20 * 24 * units.h period = 17.7 * units.h night_duration = 10 * units.h for star in star_pop_segment: baseline_lightcurve_dict = make_baseline_lightcurve( star, duration, period=period, night_duration=night_duration, old_err_omission=False) logger.debug( "Magnitude error threshold: {}".format(MAG_ERROR_THRESHOLD)) logger.debug("GAUSSIAN_MAG_DEBUG: {}".format(GAUSSIAN_MAG_DEBUG)) plot_lightcurve(baseline_lightcurve_dict, connect_all=False, show_error_bars=True, true_times=True, convert_to_days=True)
def get_example_catalogue_lists_2(): """Set up the example source and lens catalogue lists For now each catalogue lists consists of a single catalogue """ star_catalogue_example_lens = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) star_catalogue_example_lens["solid_angle"] = SOLID_ANGLE_DEFAULT star_catalogue_example_lens["star_pop"] = star_catalogue_example_lens[ "star_pop"] star_catalogue_lens_list = [star_catalogue_example_lens] star_example_source = { "Dist": str(DIST_SOURCE_DEFAULT.value), "V": str(24.5) } solid_angle_example_source = 1 # This intentionally unitless because we are effectively # Removing the solid_angle_source factor from the summation star_catalogue_example_source = { "star_pop": [star_example_source], "solid_angle": solid_angle_example_source } star_catalogue_lens_list = [star_catalogue_example_lens] star_catalogue_source_list = [star_catalogue_example_source] star_catalogue_list_dict = { "lens": star_catalogue_lens_list, "source": star_catalogue_source_list } return star_catalogue_list_dict
def testing_lightcurve_functions(): star_catalogue = read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_pop = star_catalogue["star_pop"] star_pop_segment = star_pop[0:10] field_of_vew = 9.5 * units.deg**2 survey_area = 18000 * units.deg**2 grid_space_num = survey_area / field_of_vew visit_duration = 34 * units.s period = (grid_space_num * visit_duration).to(units.h) duration = 20*24 * units.h period = 17.7 * units.h night_duration = 10*units.h for star in star_pop_segment: baseline_lightcurve_dict = make_baseline_lightcurve(star, duration, period=period, night_duration=night_duration, old_err_omission=False) logger.debug("Magnitude error threshold: {}".format(MAG_ERROR_THRESHOLD)) logger.debug("GAUSSIAN_MAG_DEBUG: {}".format(GAUSSIAN_MAG_DEBUG)) plot_lightcurve(baseline_lightcurve_dict, connect_all=False, show_error_bars=True, true_times=True, convert_to_days=True)
def calculate_tau_alt_test(): star_info_dict = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) tau_info_dict = calculate_tau_alt(star_info_dict) tau_sum = tau_info_dict["tau"] logger.info("tau_sum: {}".format(tau_sum)) plot_tau_info_alt(tau_info_dict)
def get_example_catalogue_lists(): """Set up the example source and lens catalogue lists. For now each catalogue lists consists of a single catalogue. """ star_catalogue_example = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_catalogue_example["solid_angle"] = SOLID_ANGLE_DEFAULT star_catalogue_lens_list = [star_catalogue_example] star_catalogue_source_list = [star_catalogue_example] star_catalogue_list_dict = {"lens": star_catalogue_lens_list, "source": star_catalogue_source_list} return star_catalogue_list_dict
def calculate_optical_depth_alt(): star_info_dict = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) star_pop = star_info_dict["star_pop"] if star_info_dict.has_key("coordinates_gal") and star_info_dict[ "coordinates_gal"] is not None: coord_gal = float(star_info_dict["coordinates_gal"]) * units.deg else: coord_gal = None tau_sum = 0 dist_source = get_dist_source(coord_gal) dist_lens_list = [] tau_sum_list = [] tau_addition_term_list = [] for star in star_pop: dist_lens = float(star["Dist"]) * units.kpc mass = float(star["Mass"]) * units.solMass dist_rel = 1 / ((1 / dist_lens) - (1 / dist_source)) solid_angle_dimensionless = SOLID_ANGLE_DEFAULT.to( units.dimensionless_unscaled, equivalencies=units.dimensionless_angles()) tau_addition_term = (4 * np.pi * G * mass / c**2 / dist_rel) / solid_angle_dimensionless tau_addition_term = tau_addition_term.decompose() tau_sum += tau_addition_term dist_lens_list.append(dist_lens.copy()) tau_sum_list.append(tau_sum.copy()) tau_addition_term_list.append(tau_addition_term.copy()) print tau_sum dist_lens_list = units.Quantity(dist_lens_list) tau_sum_list = units.Quantity(tau_sum_list) tau_addition_term_list = units.Quantity(tau_addition_term_list) plt.plot(dist_lens_list, tau_sum_list, "ro") plt.xlabel("lens distance (%s)" % dist_lens_list.unit) plt.ylabel( "tau sum value after addition of term at this lens distance (%s)" % tau_sum_list.unit) plt.show() plt.plot(dist_lens_list, tau_addition_term_list, "ro") plt.xlabel("lens distance (%s)" % dist_lens_list.unit) plt.ylabel("term added to tau value at this lens distance (%s)" % tau_addition_term_list.unit) plt.show() return tau_sum
def testing_band_functions(): star_catalogue = read_star_pop(STAR_POP_FILEPATH, is_csv=True) star_pop = star_catalogue["star_pop"] star = star_pop[0] mag_V = float(star["V"]) logger.debug("mag_V: {}".format(mag_V)) mag_V_error = simulate_mag_error(mag_V)["mag_err"] logger.debug("mag_V_error: {}".format(mag_V_error)) gaussian_mag_V = np.random.normal(mag_V, mag_V_error) logger.debug("gaussian_mag_V: {}".format(gaussian_mag_V)) logger.debug("") gaussian_star = get_gaussian_star(star) mag_dict = get_mags(star) #print gaussian_star #print get_mags(gaussian_star) gaussian_mag_dict = get_mags(gaussian_star) #print mag_dict #print gaussian_mag_dict gaussian_mag_dict_2 = get_gaussian_mags(mag_dict) gaussian_mag_dict_alt = get_gaussian_mags_alt(mag_dict) for band in mag_dict: mag = mag_dict[band] logger.debug("mag_{}: {}".format(band, mag)) # log gaussian mags computed by acquiring magnitudes from a star # whose V mag has been gaussian randomized for band in gaussian_mag_dict: gaussian_mag = gaussian_mag_dict[band] logger.debug("gaussian_mag_{}: {}".format(band, gaussian_mag)) # log gaussian mags computed by gaussian randomizing V mag of a mag dict # taken from an unmodified star, and then adding the same sigma to each # mag from the other bands for band in gaussian_mag_dict_2: gaussian_mag_2 = gaussian_mag_dict_2[band] logger.debug("gaussian_mag_{}_2: {}".format(band, gaussian_mag_2)) # log gaussian mags computed by gaussian randomizing the mag from each band # in a mag dict taken from an unmodified star for band in gaussian_mag_dict_alt: gaussian_mag_alt = gaussian_mag_dict_alt[band] logger.debug("gaussian_mag_{}_alt: {}".format(band, gaussian_mag_alt)) plot_gaussian_histogram(star)
def testing_band_functions(): star_catalogue = read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_pop = star_catalogue["star_pop"] star = star_pop[0] mag_V = float(star["V"]) logger.debug("mag_V: {}".format(mag_V)) mag_V_error = simulate_mag_error(mag_V)["mag_err"] logger.debug("mag_V_error: {}".format(mag_V_error)) gaussian_mag_V = np.random.normal(mag_V, mag_V_error) logger.debug("gaussian_mag_V: {}".format(gaussian_mag_V)) logger.debug("") gaussian_star = get_gaussian_star(star) mag_dict = get_mags(star) #print gaussian_star #print get_mags(gaussian_star) gaussian_mag_dict = get_mags(gaussian_star) #print mag_dict #print gaussian_mag_dict gaussian_mag_dict_2 = get_gaussian_mags(mag_dict) gaussian_mag_dict_alt = get_gaussian_mags_alt(mag_dict) for band in mag_dict: mag = mag_dict[band] logger.debug("mag_{}: {}".format(band, mag)) # log gaussian mags computed by acquiring magnitudes from a star # whose V mag has been gaussian randomized for band in gaussian_mag_dict: gaussian_mag = gaussian_mag_dict[band] logger.debug("gaussian_mag_{}: {}".format(band, gaussian_mag)) # log gaussian mags computed by gaussian randomizing V mag of a mag dict # taken from an unmodified star, and then adding the same sigma to each # mag from the other bands for band in gaussian_mag_dict_2: gaussian_mag_2 = gaussian_mag_dict_2[band] logger.debug("gaussian_mag_{}_2: {}".format(band, gaussian_mag_2)) # log gaussian mags computed by gaussian randomizing the mag from each band # in a mag dict taken from an unmodified star for band in gaussian_mag_dict_alt: gaussian_mag_alt = gaussian_mag_dict_alt[band] logger.debug("gaussian_mag_{}_alt: {}".format(band, gaussian_mag_alt)) plot_gaussian_histogram(star)
def get_example_catalogue_lists(): """Set up the example source and lens catalogue lists. For now each catalogue lists consists of a single catalogue. """ star_catalogue_example = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) star_catalogue_example["solid_angle"] = SOLID_ANGLE_DEFAULT star_catalogue_lens_list = [star_catalogue_example] star_catalogue_source_list = [star_catalogue_example] star_catalogue_list_dict = { "lens": star_catalogue_lens_list, "source": star_catalogue_source_list } return star_catalogue_list_dict
def make_csv(filepath): if os.path.isfile(filepath): star_info = reading_in_star_population.read_star_pop(filepath) star_pop = star_info["star_pop"] fieldnames = star_info["fieldnames"] filename_no_extension = os.path.splitext(os.path.basename(filepath))[0] output_filename = filename_no_extension + ".csv" output_filepath = os.path.join(OUTPUT_DIR, output_filename) with open (output_filepath, "w") as output_file: writer = csv.DictWriter(output_file, fieldnames = fieldnames) writer.writeheader() for star_dict in star_pop: writer.writerow(star_dict) else: print "File does not exist at path %s" % filepath
def calculate_optical_depth_alt(): star_info_dict = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_pop = star_info_dict["star_pop"] if star_info_dict.has_key("coordinates_gal") and star_info_dict["coordinates_gal"] is not None: coord_gal = float(star_info_dict["coordinates_gal"]) * units.deg else: coord_gal = None tau_sum = 0 dist_source = get_dist_source(coord_gal) dist_lens_list = [] tau_sum_list = [] tau_addition_term_list = [] for star in star_pop: dist_lens = float(star["Dist"]) * units.kpc mass = float(star["Mass"]) * units.solMass dist_rel = 1 / ( (1/dist_lens) - (1/dist_source) ) solid_angle_dimensionless = SOLID_ANGLE_DEFAULT.to(units.dimensionless_unscaled, equivalencies=units.dimensionless_angles()) tau_addition_term = ( 4*np.pi*G*mass/c**2 / dist_rel ) / solid_angle_dimensionless tau_addition_term = tau_addition_term.decompose() tau_sum += tau_addition_term dist_lens_list.append(dist_lens.copy()) tau_sum_list.append(tau_sum.copy()) tau_addition_term_list.append(tau_addition_term.copy()) print tau_sum dist_lens_list = units.Quantity(dist_lens_list) tau_sum_list = units.Quantity(tau_sum_list) tau_addition_term_list = units.Quantity(tau_addition_term_list) plt.plot(dist_lens_list, tau_sum_list, "ro") plt.xlabel("lens distance (%s)" % dist_lens_list.unit) plt.ylabel("tau sum value after addition of term at this lens distance (%s)" % tau_sum_list.unit) plt.show() plt.plot(dist_lens_list, tau_addition_term_list, "ro") plt.xlabel("lens distance (%s)" % dist_lens_list.unit) plt.ylabel("term added to tau value at this lens distance (%s)" % tau_addition_term_list.unit) plt.show() return tau_sum
def make_csv_sample_alt(filepath, sample_fraction = 0.01): if os.path.isfile(filepath): star_info = reading_in_star_population.read_star_pop(filepath) star_pop = star_info["star_pop"] fieldnames = star_info["fieldnames"] sample_size = len(star_pop) / sample_fraction star_pop_sample = random.sample(star_pop, sample_size) filename_no_extension = os.path.splitext(os.path.basename(filepath))[0] output_filename = filename_no_extension + "_sample_alt" + str(sample_fraction) + ".csv" output_filepath = os.path.join(OUTPUT_DIR, output_filename) with open (output_filepath, "w") as output_file: writer = csv.DictWriter(output_file, fieldnames = star_pop_fieldnames) writer.writeheader() for star_dict in star_pop: writer.writerow(star_dict) else: print "File does not exist at path %s" % filepath
def get_example_catalogue_lists_2(): """Set up the example source and lens catalogue lists For now each catalogue lists consists of a single catalogue """ star_catalogue_example_lens = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_catalogue_example_lens["solid_angle"] = SOLID_ANGLE_DEFAULT star_catalogue_example_lens["star_pop"] = star_catalogue_example_lens["star_pop"] star_catalogue_lens_list = [star_catalogue_example_lens] star_example_source = {"Dist": str(DIST_SOURCE_DEFAULT.value), "V": str(24.5)} solid_angle_example_source = 1 # This intentionally unitless because we are effectively # Removing the solid_angle_source factor from the summation star_catalogue_example_source = {"star_pop": [star_example_source], "solid_angle": solid_angle_example_source} star_catalogue_lens_list = [star_catalogue_example_lens] star_catalogue_source_list = [star_catalogue_example_source] star_catalogue_list_dict = {"lens": star_catalogue_lens_list, "source": star_catalogue_source_list} return star_catalogue_list_dict
def main(): star_list = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, STAR_POP_STARTING_FIELDNAME) print print "Star count: %s" % len(star_list) element_count = 15 print "First %s elements in star list:" % (element_count) for star in star_list[:element_count]: dist = star["Dist"] * units.kpc mass = star["Mass"] * units.solMass print "Dist: %s Mass: %s" % (dist, mass) print lens_index = 5200 source_index = 5470 lens = star_list[lens_index] source = star_list[source_index] theta_E = get_angular_Einstein_radius(lens=lens, source=source) omega = get_relative_angular_velocity(lens=lens, source=source) print "For lens %s:\n%s" % (lens_index, lens) print "Distance: %s Mass: %s" % (lens["Dist"] * units.kpc, lens["Mass"] * units.solMass) print "mu_l: %s mu_b: %s" % ( lens["mul"] * units.mas / units.yr, lens["mub"] * units.mas / units.yr) print "mu_l: %s mu_b: %s" % (convert_angular_velocity_units( lens["mul"]), convert_angular_velocity_units(lens["mub"])) print print "And for source %s:\n%s" % (source_index, source) print "Distance: %s Mass: %s" % (source["Dist"] * units.kpc, source["Mass"] * units.solMass) print "mu_l: %s mu_b: %s" % (source["mul"] * units.mas / units.yr, source["mub"] * units.mas / units.yr) print "mu_l: %s mu_b: %s" % (convert_angular_velocity_units( source["mul"]), convert_angular_velocity_units(source["mub"])) print print "Angular Einstein radius is: %s" % theta_E
def calculate_optical_depth_alt_with_impact_param(): # Set up the example source and lens catalogue lists # For now each catalogue lists consists of a single catalogue star_catalogue_example = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) star_catalogue_example["solid_angle"] = SOLID_ANGLE_DEFAULT star_catalogue_lens_list = [star_catalogue_example] star_catalogue_source_list = [star_catalogue_example] # Iterate over each source catalogue #tau_sum_list = [] tau_addition_term_list = [] tau_sum_catalogue_source = 0 for star_catalogue_source in star_catalogue_source_list: star_pop_source = star_catalogue_source["star_pop"] solid_angle_source = star_catalogue_source["solid_angle"] # Iterate over each source in the catalogue tau_sum_source = 0 for star_source in star_pop_source: mag_V_source = float(star_source["V"]) dist_source = float(star_source["Dist"]) * units.kpc # Turning debug flag on always returns a weight of 1, # for testing in case something is wrong with the simulated weight impact_param_weight = \ calculating_impact_param.simulate_impact_param_weight(mag_V_source, \ precision_model=PRECISION_MODEL, debug=IMPACT_PARAM_WEIGHT_DEBUG) if impact_param_weight != 1: print "Impact parameter weight != 1" print "Impact parameter weight: %s" % impact_param_weight print "mag: %s" % mag_V_source #print impact_param_weight # Iterate over each lens catalogue tau_sum_catalogue_lens = 0 for star_catalogue_lens in star_catalogue_lens_list: star_pop_lens = star_catalogue_lens["star_pop"] solid_angle_lens = star_catalogue_lens["solid_angle"] # Iterate over each lens in the catalogue tau_sum_lens = 0 for star_lens in star_pop_lens: mass_lens = float(star_lens["Mass"]) * units.solMass dist_lens = float(star_lens["Dist"]) * units.kpc #print "dist_lens: %s dist_source: %s" % (dist_lens, dist_source) #print "mass_lens: %s" % mass_lens # Get tau addition term if lens is closer than source, # using source properties and lens catalogue's solid angle if dist_lens < dist_source: angular_einstein_radius = \ get_angular_einstein_radius(mass_lens, dist_lens, dist_source) #print "angular Einstein radius: %s" % angular_einstein_radius tau_addition_term_lens = np.pi * angular_einstein_radius * angular_einstein_radius / solid_angle_lens tau_sum_lens += tau_addition_term_lens tau_addition_term_list.append( tau_addition_term_lens.decompose()) #print "tau_addition_term_lens: %s" % tau_addition_term_lens else: pass #print "no Einstein radius" #print "tau_addition_term_lens: 0" # Add sum over lenses to sum over lens catalogues tau_sum_catalogue_lens += tau_sum_lens #print "tau_addition_term_catalogue_lens: %s" % tau_sum_lens # Multiply sum over lens catalogues by impact param weight (U), # and add to sum over sources tau_sum_source += tau_sum_catalogue_lens * impact_param_weight #print "tau_addition_term_source: %s" % (tau_sum_catalogue_lens * impact_param_weight) # Divide the sum over sources by the source catalogue's solid angle, # and add to sum over souce catalogues tau_sum_catalogue_source += tau_sum_source / solid_angle_source #print "tau_addition_term_catalogue_source: %s" % (tau_sum_source / solid_angle_source) # Multiply sum over source catalogues by square of the maximum impact parameter for a microlensing event # and store as tau sum tau_sum = tau_sum_catalogue_source * u_MAX * u_MAX # Get inverse weight, which iterates of source catalogues, and multiply tau sum by it tau_sum_inverse_weight = get_inverse_weight(star_catalogue_source_list) tau_sum *= tau_sum_inverse_weight tau_addition_term_list = units.Quantity(tau_addition_term_list).value print "tau: %s" % tau_sum #print tau_addition_term_list plt.plot(tau_addition_term_list, "ro") plt.xlabel("index") plt.ylabel("term added to tau") plt.show() return tau_sum
def calculate_tau_alt_test(): star_info_dict = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) tau_info_dict = calculate_tau_alt(star_info_dict) tau_sum = tau_info_dict["tau"] logger.info("tau_sum: {}".format(tau_sum)) plot_tau_info_alt(tau_info_dict)
def calculate_optical_depth(): #star_info_dict = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = False) star_info_dict = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) star_pop = star_info_dict["star_pop"] if star_info_dict.has_key("coordinates_gal") and star_info_dict[ "coordinates_gal"] is not None: coord_gal = float(star_info_dict["coordinates_gal"]) * units.deg else: coord_gal = None last_bin_dist = 0 * units.kpc mass_density_bin = [] star_bins = [] tau_sum = 0 dist_source = get_dist_source(coord_gal) logger.info("dist_source set to default value: %s" % DIST_SOURCE_DEFAULT) if not CALCULATE_SOLID_ANGLE: logger.info("solid_angle set to default value: %s" % SOLID_ANGLE_DEFAULT) #error_counter = 0 for i in xrange(len(star_pop)): star = star_pop[i] dist = float(star["Dist"]) * units.kpc mass = float(star["Mass"]) * units.solMass logger.debug("dist: %s mass: %s" % (dist, mass)) # If this is the first iteration, the previous distance is set to 0 if i > 0: last_dist = float(star_pop[i - 1]["Dist"]) * units.kpc else: last_dist = 0 * units.kpc logger.debug("last_dist: %s" % last_dist) logger.debug("last_bin_dist: %s" % last_bin_dist) logger.debug("Comparing dist to last_dist...") """ If current and previous distance don't match, we've moved on to another bin of stars. Averages mass density values from the bin of stars that was just completed; calculates a tau term from this density, the source distance, and the distances of the completed star bin and the previous star bin; and adds term to the tau sum. Finally, move on to the next bin by updating the last bin distance and emptying the current mass density bin. """ if dist != last_dist: if len(mass_density_bin) > 0: bin_size = len(mass_density_bin) logger.debug("Final mass bin size: %s" % bin_size) ro_average = units.Quantity(mass_density_bin).mean() logger.debug("Averaged ro: %s" % ro_average) logger.debug("dist_source: %s" % dist_source) delta_dist = last_dist - last_bin_dist logger.debug("delta_dist: %s" % delta_dist) tau_addition_term = get_tau_addition_term( ro_average, last_dist, dist_source, delta_dist) logger.debug("Adding to tau: %s" % tau_addition_term) tau_sum += tau_addition_term bin_dict = {"dist": last_dist, "mass_density_average": ro_average, "delta_dist": delta_dist, \ "tau_addition_term": tau_addition_term.copy(), "tau_value_after_addition": tau_sum.copy(), "size": bin_size} star_bins.append(bin_dict) #print "star bin added, tau value: %s" % star_bins[-1]["tau_value_after_addition"] #print "tau sum: %s" % tau_sum #print "tau value after addition: %s" % bin_dict["tau_value_after_addition"] last_bin_dist = last_dist mass_density_bin = [] """ logger.debug("last_bin_dist: %s" % last_bin_dist) if len(star_bins) > 0 and i > len(star_pop)/2: latest_star_bin = star_bins[-1] latest_tau = latest_star_bin["tau_value_after_addition"] #print "latest star bin tau: %s error count: %s" % (latest_tau, error_counter) #if latest_tau <= 3.68105603883e-14: #error_counter += 1 #print "!!!" #print latest_star_bin #if error_counter >= 0: #sys.exit() """ # Calculate mass density for from, current bin distance, and last bin distance and append # to mass density bin. mass_density = get_mass_density(mass, last_bin_dist, dist) logger.debug("mass_density: %s" % mass_density) mass_density_bin.append(mass_density) logger.debug("updated mass_density_bin: %s" % mass_density_bin) logger.debug("tau_sum: %s" % tau_sum) logger.info("") logger.info("") #if len(star_bins) > 0: #print "First star bin tau value: %s" % star_bins[0]["tau_value_after_addition"] logger.info("Final tau_sum: %s" % tau_sum) logger.info("Number of bins: %s" % len(star_bins)) print "Final tau_sum: %s" % tau_sum print "Number of bins: %s" % len(star_bins) with open(STAR_BIN_FILEPATH, "w") as star_bin_file: writer = csv.DictWriter(star_bin_file, fieldnames=STAR_BIN_FIELDNAMES) writer.writeheader() for bin_dict in star_bins: writer.writerow(bin_dict) if len(star_bins) > 0: plot_star_bins(star_bins) return tau_sum
def calculate_tau_test(): star_info_dict = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) calculate_tau(star_info_dict)
def main(): pass """ pseudocode: -read in star pop data -time advance star pop data for a given period of time with a given time step size -from this, get set of advanced star populations for each step from start-time to end-time -calculate whether any source stars passed within a corresponding len's einstein ring -for each time step: -for each pair of lens and source: -Get Einstein ring radius -get angular separation between source and lens: magnitude of ((l_source, b_source) - (l_lens, b_lens)) -if angular separation is less than the einstein ring radius, this counts as an event... -...but somehow don't count the same event occuring over multiple steps? -Or, alternatively: -calculate only the start and end points for time advancement of a start_pop: inital and final star_pop -have a function that determines both the angular separation and Einstein ring radius between star pops at any given time between start and end point -Also have the function deliver the minimum angular separation - einstein ring difference -for each pair of lens and source: -call angular separation einstein radius minimum difference function and check if it is less than the 0 (if so it is a lensing event) -Say for lens star AB and source star XY we have initial positions ab_i = (a_i, b_i) and xy_i = (x_i, y_i) and final positions ab_f = (a_f, b_f) and xy_f = (x_f, y_f) -final position calculated by p_ab(u_ab, ab_i, t_f) = (a_f, b_f) = (v_ab, w_ab) * t_f + (a_i, b_i) where (v_ab, w_ab) is AB velocity u_ab and t_f is user-specified time period -equivalent done for XY -angular separation at time t is ang_separation(u_ab, ab_i, u_xy, xy_i, t) = p_ab(u_ab, ab_i, t) - p_xy(u_xy, xy_i, t) -einstein ring radius at time t is get_einstein_rad(dist_ab(v_r_ab, ab_i, t), dist_xy(v_r_xy, xy_i, t), m_ab) -dist_ab(t) ~= dist_ab_i (basically constant?) -OR actually we have radial velocity v_r, so we can time advance distance as well: dist_ab(v_r_ab, ab_i, t) = v_r * t + ab_i -get_einstein_rad(d_ab, d_xy, m_ab) = sqrt( (4*g*m_ab/c**2) * (1/d_ab - 1/d_xy) ) -difference_function(u_ab, v_r_ab, ab_i, u_xy, v_r_xy, xy_i, t) \ = ang_separation(u_ab, ab_i, u_xy, xy_i, t) - get_einstein_rad(dist_ab(v_r_ab, ab_i, t), dist_xy(v_r_xy, xy_i, t), m_ab) -if min(difference_function(~) <= 0: -event_counter += 1 ... Ok, so actually getting the minimization function by solving d(difference_function)/dt = 0 for t and plugging the t value(s) back into the original difference function (and picking whichever result is smaller if their are multiple t solutions to avoid getting max)... is going to be a pain mathematically, might need mathematica, then we can compare this to computational method later. """ star_pop = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH) print #advanced_star_pop = time_advancing.time_advance(star_pop) lens_index = 5200 source_index = 5470 lens = star_pop[lens_index] source = star_pop[source_index] microlensing_happened = do_they_microlens(lens, source) print microlensing_happened
def calculate_optical_depth_alt_with_impact_param(): # Set up the example source and lens catalogue lists # For now each catalogue lists consists of a single catalogue star_catalogue_example = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_catalogue_example["solid_angle"] = SOLID_ANGLE_DEFAULT star_catalogue_lens_list = [star_catalogue_example] star_catalogue_source_list = [star_catalogue_example] # Iterate over each source catalogue #tau_sum_list = [] tau_addition_term_list = [] tau_sum_catalogue_source = 0 for star_catalogue_source in star_catalogue_source_list: star_pop_source = star_catalogue_source["star_pop"] solid_angle_source = star_catalogue_source["solid_angle"] # Iterate over each source in the catalogue tau_sum_source = 0 for star_source in star_pop_source: mag_V_source = float(star_source["V"]) dist_source = float(star_source["Dist"]) * units.kpc # Turning debug flag on always returns a weight of 1, # for testing in case something is wrong with the simulated weight impact_param_weight = \ calculating_impact_param.simulate_impact_param_weight(mag_V_source, \ precision_model=PRECISION_MODEL, debug=IMPACT_PARAM_WEIGHT_DEBUG) if impact_param_weight != 1: print "Impact parameter weight != 1" print "Impact parameter weight: %s" % impact_param_weight print "mag: %s" % mag_V_source #print impact_param_weight # Iterate over each lens catalogue tau_sum_catalogue_lens = 0 for star_catalogue_lens in star_catalogue_lens_list: star_pop_lens = star_catalogue_lens["star_pop"] solid_angle_lens = star_catalogue_lens["solid_angle"] # Iterate over each lens in the catalogue tau_sum_lens = 0 for star_lens in star_pop_lens: mass_lens = float(star_lens["Mass"]) * units.solMass dist_lens = float(star_lens["Dist"]) * units.kpc #print "dist_lens: %s dist_source: %s" % (dist_lens, dist_source) #print "mass_lens: %s" % mass_lens # Get tau addition term if lens is closer than source, # using source properties and lens catalogue's solid angle if dist_lens < dist_source: angular_einstein_radius = \ get_angular_einstein_radius(mass_lens, dist_lens, dist_source) #print "angular Einstein radius: %s" % angular_einstein_radius tau_addition_term_lens = np.pi * angular_einstein_radius*angular_einstein_radius / solid_angle_lens tau_sum_lens += tau_addition_term_lens tau_addition_term_list.append(tau_addition_term_lens.decompose()) #print "tau_addition_term_lens: %s" % tau_addition_term_lens else: pass #print "no Einstein radius" #print "tau_addition_term_lens: 0" # Add sum over lenses to sum over lens catalogues tau_sum_catalogue_lens += tau_sum_lens #print "tau_addition_term_catalogue_lens: %s" % tau_sum_lens # Multiply sum over lens catalogues by impact param weight (U), # and add to sum over sources tau_sum_source += tau_sum_catalogue_lens * impact_param_weight #print "tau_addition_term_source: %s" % (tau_sum_catalogue_lens * impact_param_weight) # Divide the sum over sources by the source catalogue's solid angle, # and add to sum over souce catalogues tau_sum_catalogue_source += tau_sum_source / solid_angle_source #print "tau_addition_term_catalogue_source: %s" % (tau_sum_source / solid_angle_source) # Multiply sum over source catalogues by square of the maximum impact parameter for a microlensing event # and store as tau sum tau_sum = tau_sum_catalogue_source * u_MAX * u_MAX # Get inverse weight, which iterates of source catalogues, and multiply tau sum by it tau_sum_inverse_weight = get_inverse_weight(star_catalogue_source_list) tau_sum *= tau_sum_inverse_weight tau_addition_term_list = units.Quantity(tau_addition_term_list).value print "tau: %s" % tau_sum #print tau_addition_term_list plt.plot(tau_addition_term_list, "ro") plt.xlabel("index") plt.ylabel("term added to tau") plt.show() return tau_sum
def calculate_optical_depth(): #star_info_dict = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = False) star_info_dict = reading_in_star_population.read_star_pop(STAR_POP_FILEPATH, is_csv = True) star_pop = star_info_dict["star_pop"] if star_info_dict.has_key("coordinates_gal") and star_info_dict["coordinates_gal"] is not None: coord_gal = float(star_info_dict["coordinates_gal"]) * units.deg else: coord_gal = None last_bin_dist = 0 * units.kpc mass_density_bin = [] star_bins = [] tau_sum = 0 dist_source = get_dist_source(coord_gal) logger.info("dist_source set to default value: %s" % DIST_SOURCE_DEFAULT) if not CALCULATE_SOLID_ANGLE: logger.info("solid_angle set to default value: %s" % SOLID_ANGLE_DEFAULT) #error_counter = 0 for i in xrange(len(star_pop)): star = star_pop[i] dist = float(star["Dist"]) * units.kpc mass = float(star["Mass"]) * units.solMass logger.debug("dist: %s mass: %s" % (dist, mass)) # If this is the first iteration, the previous distance is set to 0 if i > 0: last_dist = float(star_pop[i - 1]["Dist"]) * units.kpc else: last_dist = 0 * units.kpc logger.debug("last_dist: %s" % last_dist) logger.debug("last_bin_dist: %s" % last_bin_dist) logger.debug("Comparing dist to last_dist...") """ If current and previous distance don't match, we've moved on to another bin of stars. Averages mass density values from the bin of stars that was just completed; calculates a tau term from this density, the source distance, and the distances of the completed star bin and the previous star bin; and adds term to the tau sum. Finally, move on to the next bin by updating the last bin distance and emptying the current mass density bin. """ if dist != last_dist: if len(mass_density_bin) > 0: bin_size = len(mass_density_bin) logger.debug("Final mass bin size: %s" % bin_size) ro_average = units.Quantity(mass_density_bin).mean() logger.debug("Averaged ro: %s" % ro_average) logger.debug("dist_source: %s" % dist_source) delta_dist = last_dist - last_bin_dist logger.debug("delta_dist: %s" % delta_dist) tau_addition_term = get_tau_addition_term(ro_average, last_dist, dist_source, delta_dist) logger.debug("Adding to tau: %s" % tau_addition_term) tau_sum += tau_addition_term bin_dict = {"dist": last_dist, "mass_density_average": ro_average, "delta_dist": delta_dist, \ "tau_addition_term": tau_addition_term.copy(), "tau_value_after_addition": tau_sum.copy(), "size": bin_size} star_bins.append(bin_dict) #print "star bin added, tau value: %s" % star_bins[-1]["tau_value_after_addition"] #print "tau sum: %s" % tau_sum #print "tau value after addition: %s" % bin_dict["tau_value_after_addition"] last_bin_dist = last_dist mass_density_bin = [] """ logger.debug("last_bin_dist: %s" % last_bin_dist) if len(star_bins) > 0 and i > len(star_pop)/2: latest_star_bin = star_bins[-1] latest_tau = latest_star_bin["tau_value_after_addition"] #print "latest star bin tau: %s error count: %s" % (latest_tau, error_counter) #if latest_tau <= 3.68105603883e-14: #error_counter += 1 #print "!!!" #print latest_star_bin #if error_counter >= 0: #sys.exit() """ # Calculate mass density for from, current bin distance, and last bin distance and append # to mass density bin. mass_density = get_mass_density(mass, last_bin_dist, dist) logger.debug("mass_density: %s" % mass_density) mass_density_bin.append(mass_density) logger.debug("updated mass_density_bin: %s" % mass_density_bin) logger.debug("tau_sum: %s" % tau_sum) logger.info("") logger.info("") #if len(star_bins) > 0: #print "First star bin tau value: %s" % star_bins[0]["tau_value_after_addition"] logger.info("Final tau_sum: %s" % tau_sum) logger.info("Number of bins: %s" % len(star_bins)) print "Final tau_sum: %s" % tau_sum print "Number of bins: %s" % len(star_bins) with open(STAR_BIN_FILEPATH, "w") as star_bin_file: writer = csv.DictWriter(star_bin_file, fieldnames=STAR_BIN_FIELDNAMES) writer.writeheader() for bin_dict in star_bins: writer.writerow(bin_dict) if len(star_bins) > 0: plot_star_bins(star_bins) return tau_sum
def calculate_tau_test(): star_info_dict = reading_in_star_population.read_star_pop( STAR_POP_FILEPATH, is_csv=True) calculate_tau(star_info_dict)