def create_catalog(): """ :return: """ save = True stars_df = extract_stars_df() if save: stars_df.to_csv('catalogues_input/stars.csv')
def create_catalog(): """ :return: """ cats_d = extract_cats_d() # extracts dataframes from catalogues full_d = create_full_cats(cats_d) # creates dataframe from CCDs catalogues stars_df = extract_stars_df() save = True unique_sources = stars_df['stars']['IDX'] total_stars = stars_df['stars']['IDX'].size sub_list_size = total_stars / 18 sub_list_l = [] for idx_sub_list in range(0, 18, 1): if idx_sub_list != (18 - 1): idx_down = sub_list_size * idx_sub_list idx_up = sub_list_size * (idx_sub_list + 1) sub_list_l.append(unique_sources[idx_down:idx_up]) else: idx_down = sub_list_size * idx_sub_list sub_list_l.append(unique_sources[idx_down:]) areas_j = [] for idx_l in range(0, 18, 1): areas_p = Process(target=create_stars_catalog_thread, args=(idx_l, sub_list_l[idx_l], stars_df, full_d)) areas_j.append(areas_p) areas_p.start() active_areas = list([job.is_alive() for job in areas_j]) while True in active_areas: active_areas = list([job.is_alive() for job in areas_j]) pass # Merges areas # Merges catalogs stars_list = [] for idx_csv in range(0, 18, 1): stars_ = read_csv('tmp_stars/stars_{}.csv'.format(idx_csv), index_col=0) stars_list.append(stars_) stars_df = concat(stars_list) positions_table = concat([stars_df['X_WORLD'], stars_df['Y_WORLD']], axis=1) if save: positions_table.to_csv('regions_detected/stars.reg', index=False, header=False, sep=" ") return stars_df
def create_catalog(): """ :return: """ stars_df = extract_stars_df() cats_d = extract_cats_d() # extracts dataframes from catalogues full_d = create_full_cats(cats_d) # creates dataframe from CCDs catalogues scamp_df = create_scamp_df() unique_sources = stars_df['IDX'] total_stars = stars_df['IDX'].size sub_list_size = total_stars / 18 sub_list_l = [] for idx_sub_list in range(0, 18, 1): if idx_sub_list != (18 - 1): idx_down = sub_list_size * idx_sub_list idx_up = sub_list_size * (idx_sub_list + 1) sub_list_l.append(unique_sources[idx_down:idx_up]) else: idx_down = sub_list_size * idx_sub_list sub_list_l.append(unique_sources[idx_down:]) extract_j = [] for idx_l in range(0, 18, 1): extract_p = Process(target=create_stars_catalog_thread, args=(idx_l, sub_list_l[idx_l], stars_df, full_d, scamp_df)) extract_j.append(extract_p) extract_p.start() active_extract = list([job.is_alive() for job in extract_j]) while True in active_extract: active_extract = list([job.is_alive() for job in extract_j]) pass # Merges areas # Merges catalogs stars_list = [] for idx_csv in range(0, 18, 1): stars_ = read_csv('tmp_stars/stars_{}.csv'.format(idx_csv), index_col=0) stars_list.append(stars_) stars_df = concat(stars_list) stars_df.to_csv('catalogues_detected/scamp_stars.csv') return stars_df
def create_catalog(): """ :return: """ save = True stars_df = extract_stars_df() positions_table = concat( [stars_df['RA2000(Gaia)'], stars_df['DEC2000(Gaia)']], axis=1) if save: positions_table.to_csv('regions_input/stars.reg', index=False, header=False, sep=" ")