Пример #1
0
def create_catalog():
    """

    :return:
    """
    cats_d = extract_cats_d()  # extracts dataframes from catalogues
    full_d = create_full_cats(cats_d)  # creates dataframe from CCDs catalogues
    inputs_d = extract_inputs_d()
    save = True

    unique_sources = inputs_d['stars']['IDX']
    total_stars = inputs_d['stars']['IDX'].size

    sub_list_size = total_stars / 18

    sub_list_l = []
    for idx_sub_list in range(0, 18, 1):
        if idx_sub_list != (18 - 1):
            idx_down = sub_list_size * idx_sub_list
            idx_up = sub_list_size * (idx_sub_list + 1)
            sub_list_l.append(unique_sources[idx_down:idx_up])
        else:
            idx_down = sub_list_size * idx_sub_list
            sub_list_l.append(unique_sources[idx_down:])

    areas_j = []
    for idx_l in range(0, 18, 1):
        areas_p = Process(target=create_stars_catalog_thread,
                          args=(idx_l, sub_list_l[idx_l], inputs_d, full_d))
        areas_j.append(areas_p)
        areas_p.start()

    active_areas = list([job.is_alive() for job in areas_j])
    while True in active_areas:
        active_areas = list([job.is_alive() for job in areas_j])
        pass

    # Merges areas
    # Merges catalogs
    stars_list = []
    for idx_csv in range(0, 18, 1):
        stars_ = read_csv('tmp_stars/stars_{}.csv'.format(idx_csv),
                          index_col=0)
        stars_list.append(stars_)

    stars_df = concat(stars_list)
    positions_table = concat([stars_df['X_WORLD'], stars_df['Y_WORLD']],
                             axis=1)
    if save:
        positions_table.to_csv('regions_detected/stars.reg',
                               index=False,
                               header=False,
                               sep=" ")

    return stars_df
Пример #2
0
def create_catalog():
    """

    :return:
    """
    stars_df = extract_stars_df()
    cats_d = extract_cats_d()  # extracts dataframes from catalogues
    full_d = create_full_cats(cats_d)  # creates dataframe from CCDs catalogues

    scamp_df = create_scamp_df()

    unique_sources = stars_df['IDX']
    total_stars = stars_df['IDX'].size

    sub_list_size = total_stars / 18

    sub_list_l = []
    for idx_sub_list in range(0, 18, 1):
        if idx_sub_list != (18 - 1):
            idx_down = sub_list_size * idx_sub_list
            idx_up = sub_list_size * (idx_sub_list + 1)
            sub_list_l.append(unique_sources[idx_down:idx_up])
        else:
            idx_down = sub_list_size * idx_sub_list
            sub_list_l.append(unique_sources[idx_down:])

    extract_j = []
    for idx_l in range(0, 18, 1):
        extract_p = Process(target=create_stars_catalog_thread,
                            args=(idx_l, sub_list_l[idx_l], stars_df, full_d,
                                  scamp_df))
        extract_j.append(extract_p)
        extract_p.start()

    active_extract = list([job.is_alive() for job in extract_j])
    while True in active_extract:
        active_extract = list([job.is_alive() for job in extract_j])
        pass

    # Merges areas
    # Merges catalogs
    stars_list = []
    for idx_csv in range(0, 18, 1):
        stars_ = read_csv('tmp_stars/stars_{}.csv'.format(idx_csv),
                          index_col=0)
        stars_list.append(stars_)

    stars_df = concat(stars_list)
    stars_df.to_csv('catalogues_detected/scamp_stars.csv')

    return stars_df
Пример #3
0
def create_catalog():
    """

    :return:
    """
    cats_d = extract_cats_d()  # extracts dataframes from catalogues
    full_d = create_full_cats(cats_d)  # creates dataframe from CCDs catalogues
    galaxies_df = extract_galaxies_df()
    save = True

    unique_sources = galaxies_df['IDX']
    total_galaxies = galaxies_df['IDX'].size

    sub_list_size = total_galaxies / 18

    sub_list_l = []
    for idx_sub_list in range(0, 18, 1):
        if idx_sub_list != (18 - 1):
            idx_down = sub_list_size * idx_sub_list
            idx_up = sub_list_size * (idx_sub_list + 1)
            sub_list_l.append(unique_sources[idx_down:idx_up])
        else:
            idx_down = sub_list_size * idx_sub_list
            sub_list_l.append(unique_sources[idx_down:])

    areas_j = []
    for idx_l in range(0, 18, 1):
        areas_p = Process(target=create_galaxies_catalog_thread,
                          args=(idx_l, sub_list_l[idx_l], galaxies_df, full_d))
        areas_j.append(areas_p)
        areas_p.start()

    active_areas = list([job.is_alive() for job in areas_j])
    while True in active_areas:
        active_areas = list([job.is_alive() for job in areas_j])
        pass

    # Merges areas
    # Merges catalogs
    galaxies_list = []
    for idx_csv in range(0, 18, 1):
        galaxies_ = read_csv('tmp_galaxies/galaxies_{}.csv'.format(idx_csv),
                             index_col=0)
        galaxies_list.append(galaxies_)

    galaxies_df = concat(galaxies_list)

    if save:
        galaxies_df.to_csv('catalogues_detected/galaxies.csv')

    return galaxies_df
Пример #4
0
def create_catalog():
    """

    :return:
    """
    cats_d = extract_cats_d()  # extracts dataframes from catalogues
    full_d = create_full_cats(cats_d)  # creates dataframe from CCDs catalogues
    ssos_df = propagate_dithers()
    ssos_clean_df = filter_by_position(ssos_df)

    unique_sources = list(set(ssos_clean_df['SOURCE'].tolist()))
    total_ssos = len(list(set(ssos_clean_df['SOURCE'].tolist())))

    sub_list_size = total_ssos / 10
    sub_list_l = []
    for idx_sub_list in range(0, 10, 1):
        if idx_sub_list != (10 - 1):
            idx_down = sub_list_size * idx_sub_list
            idx_up = sub_list_size * (idx_sub_list + 1)
            sub_list_l.append(unique_sources[idx_down:idx_up])
        else:
            idx_down = sub_list_size * idx_sub_list
            sub_list_l.append(unique_sources[idx_down:])

    areas_j = []
    for idx_l in range(0, 10, 1):
        areas_p = Process(target=create_ssos_catalog_thread,
                          args=(idx_l, sub_list_l[idx_l], ssos_clean_df,
                                full_d))
        areas_j.append(areas_p)
        areas_p.start()

    active_areas = list([job.is_alive() for job in areas_j])
    while True in active_areas:
        active_areas = list([job.is_alive() for job in areas_j])
        pass

    # Merges areas
    # Merges catalogs
    ssos_list = []
    for idx_csv in range(0, 10, 1):
        ssos_ = read_csv('tmp_ssos/ssos_{}.csv'.format(idx_csv),
                         index_col=0)
        ssos_list.append(ssos_)

    ssos_df = concat(ssos_list)
    ssos_df.to_csv('catalogues_detected/ssos.csv')

    return ssos_df