def perform_annotation(som: str, save: bool = True, resume: bool = False, results: str = None): """A simple driver to perform annotation of a `SOM` Arguments: som {str} -- Path to desired SOM for annotation Keyword Arguments: save {bool} -- Save the annotations as they are being performed (default: {True}) resume {bool} -- Continue the annotation process from the first un-annotated neuron (default: {False}) """ annotator = pu.Annotator(som, save=save, results=results) annotator.interactive_annotate(resume=resume)
def update_annotation(som: str, key: Tuple[Any, ...], results: str = None): """Load an existing annotation set and update a single neuron. To do this a annotation set has to be loaded. TODO: Build in a custom path for the `results` argument Arguments: som {str} -- Path to SOM binary file key {Tuple[Any, ...]} -- Key of the `results` attribute to update Keyword Arguments: results {str} -- Path to existing results Annotator set. Default will atempt to automatically find one. (default: {None}) """ print(results) results_path = True if results is None else results print(results_path) annotator = pu.Annotator(som, results=results_path) annotator.annotate_neuron(key, update=True, labeling=True) results_path = None if results_path == True else results annotator.save_annotation_results(results_path)
# /// Select Sample (single tile) \\\ print("Loading the VLASS catalogue, specifying complex components") radio_cat_file = os.path.join(radio_path, radio_component_catalogue) radio_cat = vdl.load_vlass_catalogue(radio_cat_file, complex=True, NN_dist=72, SNR_min=None) print("Creating the tile catalogue") subtile_file = os.path.join(radio_path, vlass_subtiles) subtile_cat = Table.read(subtile_file, format="csv").to_pandas() tile_cat = vdl.vlass_tile(subtile_cat) print("Loading the SOM and annotations") som = pu.SOM(som_file) annotation = pu.Annotator(som.path, results=annotations_file) # sdss = sky_chunk(df, (120, 240), (-10, 50)) tiles = tile_cat.Tile.values ti = int(sys.argv[1]) if len(sys.argv) >= 2 else 0 num_tiles = int(sys.argv[2]) if len(sys.argv) >= 3 else 1 tiles = tiles[ti:ti + num_tiles] for tile_id in tiles: print(f"Processing tile {tile_id}") imbin_file, map_file, trans_file = cxc.binary_names(tile_id, path=out_bin_path) # Load VLASS and unWISE catalogues radio_sample, ir_cat = load_tile_catalogues(out_cat_path, radio_cat,
def run_all( catalogues, som_file, unique_id, image_cutout_path, bin_path="", img_size=(2, 150, 150), numthreads=10, annotation=None, annotations_file=None, remove_tile_cutouts=False, cpu_only=False, sorter_mode="area_ratio", pix_scale=0.6, som_mask=None, ): """Run the preprocess, map, and collate steps for a single sample. Args: catalogues (tuple): DataFrames of the radio and ir catalogues som_file (str): Name of the SOM file unique_id (str): Unique identifier for the sample (tile id, ssid, etc) image_cutout_path (str): Path to the directory containing the image cutouts annotations_file (str, optional): Name of the SOM annotations file. Defaults to a name based on the SOM file name. Returns: [type]: [description] """ radio_cat, ir_cat = catalogues # Preprocess imbin_file, map_file, trans_file = binary_names(unique_id, bin_path) print(f"Preprocessing the sample: {imbin_file}") imgs = preprocess( radio_cat, imbin_file, img_size=img_size, tile_cutout_path=image_cutout_path, remove_tile_cutouts=remove_tile_cutouts, ) print("...done") # Map print(f"Mapping...") som = pu.SOM(som_file) w, h, d = som.som_shape somset = map( imbin_file, som_file, map_file, trans_file, w, h, numthreads=numthreads, cpu_only=cpu_only, ) somset.som.bmu_mask = som_mask print("...done") # Collate if annotation is None: if annotations_file is None: annotations_file = f"{som_file}.results.pkl" annotation = pu.Annotator(som.path, results=annotations_file) print(f"Collating...") comp_cat, src_cat = collate( radio_cat, ir_cat, imgs, somset, annotation, sorter_mode=sorter_mode, pix_scale=pix_scale, ) print("...done") return comp_cat, src_cat