Example #1
0
def write_star(x, y, z, data):
    insertion_value_str = ",".join(data)
    insertion_str = "INSERT INTO gaia (" + insert_into_table_columns + ") VALUES (%s)" % insertion_value_str

    # Gives the names for the database folders
    x_idx = int(x/cell_size_pc)
    y_idx = int(y/cell_size_pc)
    z_idx = int(z/cell_size_pc)
    cell_dir = utils_path.append(dest_dir, "%+d/%+d/%+d" % (x_idx, y_idx, z_idx))

    if not os.path.isdir(cell_dir):
        os.makedirs(cell_dir)

    cell_db_filename = utils_path.append(cell_dir, "cell.db")
    new_db = not os.path.isfile(cell_db_filename)
    c = db_connection_cache.get(cell_db_filename, open_connections)

    if new_db:
        c.execute(create_table_str)

    c.execute(insertion_str)
Example #2
0
def get_neighbouring_cell_databases(cur_x_idx, cur_y_idx, cur_z_idx, min_x,
                                    max_x, min_y, max_y, min_z, max_z):
    min_x_idx = int(min_x / cell_size_pc)
    max_x_idx = int(max_x / cell_size_pc)
    min_y_idx = int(min_y / cell_size_pc)
    max_y_idx = int(max_y / cell_size_pc)
    min_z_idx = int(min_z / cell_size_pc)
    max_z_idx = int(max_z / cell_size_pc)
    to_add = set()

    for x_idx in range(min_x_idx, max_x_idx + 1):
        for y_idx in range(min_y_idx, max_y_idx + 1):
            for z_idx in range(min_z_idx, max_z_idx + 1):
                if cur_x_idx == x_idx and cur_y_idx == y_idx and cur_z_idx == z_idx:
                    continue  # Do not include self!!

                db_name = utils_path.append(
                    db_folder, "%+d/%+d/%+d/cell.db" % (x_idx, y_idx, z_idx))

                if os.path.isfile(db_name):
                    to_add.add(db_name)

    return to_add
Example #3
0
def find_comoving_to_star(star, in_group_sids):
    if star_sid_to_comoving_group_index.get(star[i_sid]) != None:
        return []  # Star already in other group!

    x = star[i_x]
    y = star[i_y]
    z = star[i_z]
    x_idx = int(x / cell_size_pc)
    y_idx = int(y / cell_size_pc)
    z_idx = int(z / cell_size_pc)

    conn_filename = utils_path.append(
        db_folder, "%+d/%+d/%+d/cell.db" % (x_idx, y_idx, z_idx))
    conn = db_connection_cache.get(conn_filename, open_db_connections)

    min_x = max(x - maximum_broad_separation_pc, -max_distance_pc)
    max_x = min(x + maximum_broad_separation_pc, max_distance_pc)
    min_y = max(y - maximum_broad_separation_pc, -max_distance_pc)
    max_y = min(y + maximum_broad_separation_pc, max_distance_pc)
    min_z = max(z - maximum_broad_separation_pc, -max_distance_pc)
    max_z = min(z + maximum_broad_separation_pc, max_distance_pc)

    vx = star[i_vx]
    vy = star[i_vy]
    vz = star[i_vz]
    min_vx = vx - maximum_broad_velocity_diff_km_per_s
    max_vx = vx + maximum_broad_velocity_diff_km_per_s
    min_vy = vy - maximum_broad_velocity_diff_km_per_s
    max_vy = vy + maximum_broad_velocity_diff_km_per_s
    min_vz = vz - maximum_broad_velocity_diff_km_per_s
    max_vz = vz + maximum_broad_velocity_diff_km_per_s

    # Just boxes in everything in an oversized box
    # (compare value of maximum_broad_separation_pc to maximum_final_separation_pc).
    # Makes sure to not compare stars with themselves by excluding using source_ids.
    find_nearby_query = '''
        SELECT %s
        FROM gaia
        WHERE source_id NOT IN (%s)
        AND (x > %f AND x < %f)
        AND (y > %f AND y < %f)
        AND (z > %f AND z < %f)
        AND (vx > %f AND vx < %f)
        AND (vy > %f AND vy < %f)
        AND (vz > %f AND vz < %f)''' % (columns_to_fetch, ",".join(
        map(lambda x: str(x),
            in_group_sids)), min_x, max_x, min_y, max_y, min_z, max_z, min_vx,
                                        max_vx, min_vy, max_vy, min_vz, max_vz)

    maybe_comoving_to_star = conn.execute(find_nearby_query).fetchall()
    neighbour_cells_to_include = get_neighbouring_cell_databases(
        x_idx, y_idx, z_idx, min_x, max_x, min_y, max_y, min_z, max_z)

    for neighbour_cell_db_name in neighbour_cells_to_include:
        neighbour_conn = db_connection_cache.get(neighbour_cell_db_name,
                                                 open_db_connections)
        maybe_comoving_to_star.extend(
            neighbour_conn.execute(find_nearby_query).fetchall())

    if len(maybe_comoving_to_star) == 0:
        return []

    comoving_to_star = []

    pos = [star[i_x], star[i_y], star[i_z]]
    vel_km_per_s = [star[i_vx], star[i_vy], star[i_vz]]

    ra = star[i_ra] * conv.deg_to_rad
    dec = star[i_dec] * conv.deg_to_rad
    dist = star[i_dist]
    ra_error = star[i_ra_error] * conv.deg_to_rad
    dec_error = star[i_dec_error] * conv.deg_to_rad
    dist_error = star[i_dist_error]
    pmra = star[i_pmra] * conv.mas_per_yr_to_rad_per_s
    pmdec = star[i_pmdec] * conv.mas_per_yr_to_rad_per_s
    rv = star[i_rv]
    pmra_error = star[i_pmra_error] * conv.mas_per_yr_to_rad_per_s
    pmdec_error = star[i_pmdec_error] * conv.mas_per_yr_to_rad_per_s
    error_rv = star[i_rv_error]

    for mcs in maybe_comoving_to_star:
        if star_sid_to_comoving_group_index.get(mcs[i_sid]) != None:
            continue

        if mcs[i_parallax_over_error] < cut_parallax_over_error:
            continue

        mcs_pos = [mcs[i_x], mcs[i_y], mcs[i_z]]
        pos_diff_len = vec3.mag(vec3.sub(mcs_pos, pos))

        mcs_ra = mcs[i_ra] * conv.deg_to_rad
        mcs_dec = mcs[i_dec] * conv.deg_to_rad
        mcs_dist = mcs[i_dist]
        mcs_ra_error = mcs[i_ra_error] * conv.deg_to_rad
        mcs_dec_error = mcs[i_dec_error] * conv.deg_to_rad
        mcs_dist_error = mcs[i_dist_error]

        pos_diff_len_error = vec3.celestial_magnitude_of_position_difference_error(
            ra, dec, dist, ra_error, dec_error, dist_error, mcs_ra, mcs_dec,
            mcs_dist, mcs_ra_error, mcs_dec_error, mcs_dist_error)

        # Position cut, with added 3*error
        if pos_diff_len > maximum_final_separation_pc + 3 * pos_diff_len_error:
            continue

        mcs_pmra = mcs[i_pmra] * conv.mas_per_yr_to_rad_per_s
        mcs_pmdec = mcs[i_pmdec] * conv.mas_per_yr_to_rad_per_s
        mcs_rv = mcs[i_rv]
        mcs_pmra_error = mcs[i_pmra_error] * conv.mas_per_yr_to_rad_per_s
        mcs_pmdec_error = mcs[i_pmdec_error] * conv.mas_per_yr_to_rad_per_s
        mcs_error_rv = mcs[i_rv_error]

        mcs_vel_km_per_s = [mcs[i_vx], mcs[i_vy], mcs[i_vz]]
        speed_diff = vec3.mag(vec3.sub(mcs_vel_km_per_s, vel_km_per_s))

        speed_diff_error = vec3.celestial_magnitude_of_velocity_difference_error(
            mcs_ra, mcs_dec, mcs_dist * conv.pc_to_km, mcs_ra_error,
            mcs_dec_error, mcs_dist_error * conv.pc_to_km, mcs_pmra, mcs_pmdec,
            mcs_rv, mcs_pmra_error, mcs_pmdec_error, mcs_error_rv, ra, dec,
            dist * conv.pc_to_km, ra_error, dec_error,
            dist_error * conv.pc_to_km, pmra, pmdec, rv, pmra_error,
            pmdec_error, error_rv)

        # Velocity cut, with added 3*error
        if speed_diff > maximum_final_velocity_diff_km_per_s + 3 * speed_diff_error:
            continue

        comoving_to_star.append(mcs)

    if len(comoving_to_star) == 0:
        return []

    # Important! Makes sure that recursive call further down does not re-use already used star.
    in_group_sids.update(map(lambda x: x[i_sid], comoving_to_star))
    resulting_stars = comoving_to_star.copy()

    # Look for comoving stars to the comoving stars, creating a network of comoving stars.
    for cm_star in comoving_to_star:
        resulting_stars.extend(find_comoving_to_star(cm_star, in_group_sids))

    return resulting_stars
Example #4
0
        return False

    if os.path.isfile(sys.argv[2]) or os.path.isdir(sys.argv[2]):
        return False

    return True


assert verify_arguments(), "Usage: find_comoving_cut1.py db_folder output.cms"

db_folder = sys.argv[1]
output_filename = sys.argv[2]
start_time = time.time()

# Get metadata that was written alongside db
metadata_filename = utils_path.append(db_folder, "metadata")
assert os.path.isfile(
    metadata_filename), "metadata mising: %s" % metadata_filename
metadata_fh = open(metadata_filename, "r")
metadata_lines = metadata_fh.readlines()
metadata_fh.close()
metadata = {}

for mdl in metadata_lines:
    key_value_pair = mdl.split(":")

    if len(key_value_pair) != 2:
        error("metadata in %s is of incorrect format" % db_folder)

    metadata[key_value_pair[0]] = eval(key_value_pair[1])
Example #5
0
    plt.clf()
    legend = []
    for metallicity, iso in isos_per_metallicity.items():
        iso_lteff = []
        iso_gaia_G = []
        iso_index_log_Teff = iso["columns"].index("log_Teff")
        iso_index_gaia_G = iso["columns"].index("Gaia_G_DR2Rev")

        for d in iso["data"]:
            iso_lteff.append(d[iso_index_log_Teff])
            iso_gaia_G.append(d[iso_index_gaia_G])

        plt.plot(iso_lteff, iso_gaia_G)
        plt.xlabel('log $T_{eff}$')
        plt.ylabel('Gaia G absolute magnitude')
        legend.append(str(metallicity))

    for i in range(0, len(teff)):
        plt.plot(teff[i], abs_mags[i], '.', markersize=5)

    legend.extend(
        ["5921751752101964416", "5921751408511009024", "5921376578117781760"])
    plt.legend(legend)
    plt.gca().invert_xaxis()
    plt.gca().invert_yaxis()
    plt.savefig(utils_path.append(output_dir,
                                  "%d_%s.eps" % (file_index, str(age))),
                bbox_inches='tight',
                pad_inches=0.02)
    file_index = file_index + 1
    isos_per_metallicity = iba["isos_per_metallicity"]

    plt.clf()
    legend = []
    for metallicity, iso in isos_per_metallicity.items():
        iso_lteff = []
        iso_gaia_G = []
        iso_index_log_Teff = iso["columns"].index("log_Teff")
        iso_index_gaia_G = iso["columns"].index("Gaia_G_DR2Rev")

        for d in iso["data"]:
            iso_lteff.append(d[iso_index_log_Teff])
            iso_gaia_G.append(d[iso_index_gaia_G])

        plt.plot(iso_lteff, iso_gaia_G)
        plt.xlabel('log $T_{eff}$')
        plt.ylabel('Gaia G absolute magnitude')
        legend.append(str(metallicity))

    for i in range(0, len(teff)):
        plt.plot(teff[i], abs_mags[i], '.', markersize=5)

    legend.extend(["5281825062636445696", "5213358473574080896"])
    plt.title("Age: 10^%f (%s) years" % (age, str(int(10**age))))
    plt.legend(legend)
    plt.gca().invert_xaxis()
    plt.gca().invert_yaxis()
    plt.savefig(
        utils_path.append(output_dir, "%d_%s.png" % (file_index, str(age))))
    file_index = file_index + 1
Example #7
0
        os.makedirs(cell_dir)

    cell_db_filename = utils_path.append(cell_dir, "cell.db")
    new_db = not os.path.isfile(cell_db_filename)
    c = db_connection_cache.get(cell_db_filename, open_connections)

    if new_db:
        c.execute(create_table_str)

    c.execute(insertion_str)

for file in os.listdir(source_dir):
    if not file.endswith(".csv"):
        continue

    csv_fh = open(utils_path.append(source_dir, file), "r")
    csv_lines = csv_fh.readlines()
    csv_fh.close()

    for i in range(1, len(csv_lines)): # skip header line
        csv_line = csv_lines[i]
        source_values = csv_line.split(",")

        if not is_valid(source_values):
            continue

        dest_values = [None] * len(all_columns)

        # throws in all columns that we take from Gaia as-is (compared to distance etc, calculated below)
        for idx, val in enumerate(source_values):
            dt = gaia_columns.data_types[idx]