def format_data(input_dir, output_dir): primary_image_dimensions = { Axes.ROUND: 4, Axes.CH: 4, Axes.ZPLANE: 1, } aux_name_to_dimensions = { 'nuclei': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, 'dots': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, } } write_experiment_json( path=output_dir, fov_count=16, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=ISSCroppedBreastPrimaryTileFetcher(input_dir), aux_tile_fetcher={ 'nuclei': ISSCroppedBreastAuxTileFetcher(input_dir, 'nuclei'), 'dots': ISSCroppedBreastAuxTileFetcher(input_dir, 'dots'), }, )
def format_data(input_dir, output_dir): def add_codebook(experiment_json_doc): experiment_json_doc['codebook'] = "codebook.json" return experiment_json_doc num_fovs = 496 primary_image_dimensions = { Axes.ROUND: 8, Axes.CH: 2, Axes.ZPLANE: 1, } aux_name_to_dimensions = { 'nuclei': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1 } } write_experiment_json(output_dir, num_fovs, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=MERFISHTileFetcher( input_dir, is_dapi=False), aux_tile_fetcher={ 'nuclei': MERFISHTileFetcher(input_dir, is_dapi=True), }, postprocess_func=add_codebook, default_shape=SHAPE)
def format_data(image_dir: Path, primary_image_dimensions: Mapping[Union[Axes, str], int], aux_name_to_dimensions: Mapping[str, Mapping[Union[Axes, str], int]], num_fovs): def add_codebook(experiment_json_doc): experiment_json_doc['codebook'] = "codebook.json" return experiment_json_doc enable_inplace_mode() write_experiment_json( path=os.fspath(image_dir), fov_count=num_fovs, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=InplaceFetcher(image_dir, FieldOfView.PRIMARY_IMAGES), aux_tile_fetcher={ aux_img_name: InplaceFetcher(image_dir, aux_img_name) for aux_img_name in aux_name_to_dimensions.keys() }, postprocess_func=add_codebook, default_shape=SHAPE, fov_path_generator=fov_path_generator, tile_opener=inplace_tile_opener, )
def cli(input_dir: str, output_dir: str, codebook_csv: str) -> int: """CLI entrypoint for spaceTx format construction for SeqFISH data Parameters ---------- input_dir : str directory containing input multi-page TIFF files for a single field of view, separated by the imaging round they were acquired in and named <1-index round>.tif output_dir : str directory containing output files. Will be created if it does not exist. codebook_csv : str name of the codebook csv file containing barcode information for this field of view. Notes ----- - each round is organized as [z, ch, [x|y], [x|y]] -- the order of x and y are not known, but since this script uses dummy coordinates, this distinction is not important. - The spatial organization of the field of view is not known to the starfish developers, so they are filled by dummy coordinates - Raw data (input for this tool) for this experiment can be found at: s3://spacetx.starfish.data.public/browse/raw/seqfish/ - Processed data (output of this tool) can be found at: s3://spacetx.starfish.data.public/browse/formatted/20181211/seqfish/ and accessed in `starfish.data.SeqFISH` Returns ------- int : Returns 0 if successful """ os.makedirs(output_dir, exist_ok=True) primary_tile_fetcher = SeqFISHTileFetcher(os.path.expanduser(input_dir)) # This is hardcoded for this example data set primary_image_dimensions: Mapping[Union[str, Axes], int] = { Axes.ROUND: 5, Axes.CH: 12, Axes.ZPLANE: 29, } write_experiment_json( path=output_dir, fov_count=1, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions={}, primary_tile_fetcher=primary_tile_fetcher, tile_format=ImageFormat.TIFF, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE) ) # Note: this must trigger AFTER write_experiment_json, as it will clobber the codebook with # a placeholder. codebook = parse_codebook(codebook_csv) codebook.to_json("codebook.json") return 0
def format_data(input_dir, output_dir) -> None: """Format a BaristaSeq Tile Parameters ---------- input_dir : str Input directory containing data. Example data for a single FoV can be downloaded from s3://spacetx.starfish.data.public/browse/raw/20181231/barista-seq-mouse-cortex-cropped output_dir : str Output directory containing formatted data in SpaceTx format. Example output data can be downloaded from https://d2nhj9g34unfro.cloudfront.net/browse/formatted/20181028/ \ BaristaSeq/cropped_formatted/experiment.json" """ num_fovs = 1 primary_image_dimensions: Mapping[Union[str, Axes], int] = { Axes.ROUND: 3, Axes.CH: 4, Axes.ZPLANE: 17, } aux_name_to_dimensions: Mapping[str, Mapping[Union[str, Axes], int]] = { "nuclei": { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 17, } } os.makedirs(output_dir, exist_ok=True) write_experiment_json( path=output_dir, fov_count=num_fovs, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=BaristaSeqTileFetcher(input_dir), aux_tile_fetcher={ "nuclei": BaristaSeqNucleiTileFetcher(input_dir, "nuclei"), }, tile_format=ImageFormat.TIFF, default_shape=DEFAULT_TILE_SHAPE ) shutil.copyfile( src=os.path.join(input_dir, "codebook.json"), dst=os.path.join(output_dir, "codebook.json") )
def format_data(input_dir, output_dir): input_dir = os.path.abspath(input_dir) output_dir = os.path.abspath(output_dir) def add_scale_factors(experiment_json_doc): filename = os.path.join(input_dir, "scale_factors.json") with open(filename, 'r') as f: data = json.load(f) experiment_json_doc['extras'] = {"scale_factors": data} return experiment_json_doc num_fovs = 496 primary_image_dimensions = { Axes.ROUND: 8, Axes.CH: 2, Axes.ZPLANE: 1, } aux_name_to_dimensions = { 'nuclei': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1 } } write_experiment_json(output_dir, num_fovs, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=MERFISHTileFetcher( input_dir, is_dapi=False), aux_tile_fetcher={ 'nuclei': MERFISHTileFetcher(input_dir, is_dapi=True), }, postprocess_func=add_scale_factors, default_shape=SHAPE)
def cli(input_dir, output_dir): """CLI entrypoint for spaceTx format construction for Imaging Mass Cytometry Raw data (input for this tool) for this experiment can be found at: s3://spacetx.starfish.data.public/browse/raw/20181015/imaging_cytof/\ BodenmillerBreastCancerSamples/ Processed data (output of this tool) can be found at: s3://spacetx.starfish.data.public/browse/formatted/20181023/imaging_cytof/\ BodenmillerBreastCancerSamples/ """ os.makedirs(output_dir, exist_ok=True) primary_tile_fetcher = ImagingMassCytometryTileFetcher( os.path.expanduser(input_dir)) primary_image_dimensions = { Axes.ROUND: 1, Axes.CH: len(primary_tile_fetcher._ch_dict), Axes.ZPLANE: 1 } def postprocess_func(experiment_json_doc): experiment_json_doc["codebook"] = "codebook.json" return experiment_json_doc with open(os.path.join(output_dir, "codebook.json"), 'w') as f: codebook = primary_tile_fetcher.generate_codebook() json.dump(codebook, f) write_experiment_json( path=output_dir, fov_count=len(primary_tile_fetcher._fov_map), tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions={}, primary_tile_fetcher=primary_tile_fetcher, postprocess_func=postprocess_func, )
def format_data(input_dir, output_dir, num_fovs): def add_codebook(experiment_json_doc): experiment_json_doc['codebook'] = "codebook.json" return experiment_json_doc primary_image_dimensions = { Axes.ROUND: 4, Axes.CH: 4, Axes.ZPLANE: 1, } aux_name_to_dimensions = { 'nuclei': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, 'dots': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, } } write_experiment_json( path=output_dir, fov_count=num_fovs, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=ISSCroppedBreastPrimaryTileFetcher(input_dir), aux_tile_fetcher={ 'nuclei': ISSCroppedBreastAuxTileFetcher(input_dir, 'nuclei'), 'dots': ISSCroppedBreastAuxTileFetcher(input_dir, 'dots'), }, postprocess_func=add_codebook, default_shape={ Axes.Y: 1044, Axes.X: 1390 })
def cli(input_dir, metadata_yaml, output_dir): """Reads osmFISH images from <input-dir> and experiment metadata from <metadata-yaml> and writes spaceTx-formatted data to <output-dir>. Raw data (input for this tool) for this experiment can be found at: s3://spacetx.starfish.data.upload/simone/ Processed data (output of this tool) can be found at: s3://spacetx.starfish.data.public/20181031/osmFISH/ """ os.makedirs(output_dir, exist_ok=True) primary_tile_fetcher = osmFISHTileFetcher(os.path.expanduser(input_dir), metadata_yaml) # This is hardcoded for this example data set primary_image_dimensions = { Axes.ROUND: 13, Axes.CH: len(primary_tile_fetcher.channel_map), Axes.ZPLANE: primary_tile_fetcher.num_z } def postprocess_func(experiment_json_doc): experiment_json_doc["codebook"] = "codebook.json" return experiment_json_doc with open(os.path.join(output_dir, "codebook.json"), "w") as f: codebook = primary_tile_fetcher.generate_codebook() json.dump(codebook, f) write_experiment_json(path=output_dir, fov_count=len(primary_tile_fetcher.fov_map), tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions={}, primary_tile_fetcher=primary_tile_fetcher, postprocess_func=postprocess_func, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE)) pass
def format_data(input_dir: str, output_dir: str, gene_name: str) -> None: primary_image_dimensions: Mapping[Axes, int] = { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1 } aux_name_to_dimensions: Mapping[str, Mapping[Union[str, Axes], int]] = { "nissl": { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1 } } write_experiment_json( path=output_dir, fov_count=1, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=StarMapTileFetcher(input_dir), aux_tile_fetcher={"nissl": StarMapTileFetcher(input_dir)}, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE)) codebook = [{ Features.CODEWORD: [{ Axes.ROUND.value: 0, Axes.CH.value: 0, Features.CODE_VALUE: 1 }], Features.TARGET: gene_name }] Codebook.from_code_array(codebook).to_json( os.path.join(output_dir, "codebook.json"))
def cli(input_dir, output_dir, metadata_yaml): """CLI entrypoint for spaceTx format construction for osmFISH data Raw data (input for this tool) for this experiment can be found at: s3://spacetx.starfish.data.upload/simone/ Processed data (output of this tool) can be found at: s3://spacetx.starfish.data.public/20181031/osmFISH/ """ os.makedirs(output_dir, exist_ok=True) primary_tile_fetcher = osmFISHTileFetcher(os.path.expanduser(input_dir), metadata_yaml) # This is hardcoded for this example data set primary_image_dimensions = { Indices.ROUND: 13, Indices.CH: len(primary_tile_fetcher.channel_map), Indices.Z: primary_tile_fetcher.num_z } def postprocess_func(experiment_json_doc): experiment_json_doc["codebook"] = "codebook.json" return experiment_json_doc with open(os.path.join(output_dir, "codebook.json"), "w") as f: codebook = primary_tile_fetcher.generate_codebook() json.dump(codebook, f) write_experiment_json(path=output_dir, fov_count=len(primary_tile_fetcher.fov_map), tile_format=ImageFormat.NUMPY, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions={}, primary_tile_fetcher=primary_tile_fetcher, postprocess_func=postprocess_func, dimension_order=(Indices.ROUND, Indices.CH, Indices.Z)) pass
def cli(input_dir, output_dir) -> None: """CLI entrypoint for spaceTx format construction for osmFISH data Raw data (input for this tool) for this experiment can be found at: s3://spacetx.starfish.data.public/browse/raw/20181031/starmap/ Processed data (output of this tool) can be found at: s3://spacetx.starfish.data.public/browse/formatted/20190111/starmap/ Parameters ---------- input_dir : str directory containing input data. See TileFetcher classes for expected directory structures. output_dir : str directory that 2-d images and SpaceTx metadata will be written to. """ abs_output_dir = os.path.expanduser(output_dir) abs_input_dir = os.path.expanduser(input_dir) os.makedirs(abs_output_dir, exist_ok=True) primary_tile_fetcher = StarMapTileFetcher(abs_input_dir) dapi_tile_fetcher = StarMapDapiTileFetcher(abs_input_dir) nissl_00_tile_fetcher = StarMapNisslTileFetcher(abs_input_dir, channels=(0, 0)) nissl_01_tile_fetcher = StarMapNisslTileFetcher(abs_input_dir, channels=(0, 1)) nissl_02_tile_fetcher = StarMapNisslTileFetcher(abs_input_dir, channels=(0, 2)) # This is hardcoded for this example data set primary_image_dimensions: Mapping[Union[str, Axes], int] = { Axes.ROUND: 6, Axes.CH: 4, Axes.ZPLANE: 28, } aux_images_dimensions: Mapping[str, Mapping[Union[str, Axes], int]] = { "nuclei": { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, "nissl_channels_0_0": { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, "nissl_channels_0_1": { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, "nissl_channels_0_2": { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, } write_experiment_json(path=output_dir, fov_count=1, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_images_dimensions, primary_tile_fetcher=primary_tile_fetcher, aux_tile_fetcher={ "nuclei": dapi_tile_fetcher, "nissl_channels_0_0": nissl_00_tile_fetcher, "nissl_channels_0_1": nissl_01_tile_fetcher, "nissl_hcannels_0_2": nissl_02_tile_fetcher }, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE)) primary_tile_fetcher.generate_codebook(abs_output_dir)
def format_data(input_dir, output_dir, d): if not input_dir.endswith("/"): input_dir += "/" if not output_dir.endswith("/"): output_dir += "/" if d: url = "http://d1zymp9ayga15t.cloudfront.net/content/Examplezips/ExampleInSituSequencing.zip" download(input_dir, url) input_dir += "ExampleInSituSequencing/" print("Data downloaded to: {}".format(input_dir)) else: input_dir += "ExampleInSituSequencing/" print("Using data in : {}".format(input_dir)) def add_codebook(experiment_json_doc): experiment_json_doc['codebook'] = "codebook.json" return experiment_json_doc # the magic numbers here are just for the ISS example data set. write_experiment_json( output_dir, 1, ImageFormat.TIFF, primary_image_dimensions={ Axes.ROUND: 4, Axes.CH: 4, Axes.ZPLANE: 1, }, aux_name_to_dimensions={ 'nuclei': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, }, 'dots': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: 1, } }, primary_tile_fetcher=ISSPrimaryTileFetcher(input_dir), aux_tile_fetcher={ 'nuclei': ISSAuxTileFetcher(os.path.join(input_dir, "DO", "c1.TIF")), 'dots': ISSAuxTileFetcher(os.path.join(input_dir, "DO", "c2.TIF")), }, postprocess_func=add_codebook, default_shape=SHAPE ) codebook_array = [ { Features.CODEWORD: [ {Axes.ROUND.value: 0, Axes.CH.value: 3, Features.CODE_VALUE: 1}, {Axes.ROUND.value: 1, Axes.CH.value: 3, Features.CODE_VALUE: 1}, {Axes.ROUND.value: 2, Axes.CH.value: 1, Features.CODE_VALUE: 1}, {Axes.ROUND.value: 3, Axes.CH.value: 2, Features.CODE_VALUE: 1} ], Features.TARGET: "ACTB_human" }, { Features.CODEWORD: [ {Axes.ROUND.value: 0, Axes.CH.value: 3, Features.CODE_VALUE: 1}, {Axes.ROUND.value: 1, Axes.CH.value: 1, Features.CODE_VALUE: 1}, {Axes.ROUND.value: 2, Axes.CH.value: 1, Features.CODE_VALUE: 1}, {Axes.ROUND.value: 3, Axes.CH.value: 2, Features.CODE_VALUE: 1} ], Features.TARGET: "ACTB_mouse" }, ] codebook = Codebook.from_code_array(codebook_array) codebook_json_filename = "codebook.json" codebook.to_json(os.path.join(output_dir, codebook_json_filename))
Z_NUM = stitched_img.shape[0] for z in range(Z_NUM): Z_FILE_NAME = f"{BASE_NAME}_CH{channel_num}_Z{z+1:03}.tif" Z_FILE = os.path.join(TX_ORIGINAL_DIR, Z_FILE_NAME) print(Z_FILE) io.imsave(Z_FILE, stitched_img[z, :, :]) print("Time to read and split image:" + str(time.time() - start_time)) # Create spacetx format for starfish primary_image_dimensions: Mapping[Union[str, Axes], int] = { Axes.ROUND: 1, # only change this if processing rounds together, otherwise, keep at 1 Axes.CH: CHANNEL_NUM, Axes.ZPLANE: Z_NUM, } start_time = time.time() write_experiment_json( path=TX_FORMATTED_DIR, fov_count=FOV_NUM, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, primary_tile_fetcher=ImageTileFetcher(TX_ORIGINAL_DIR), aux_name_to_dimensions={}, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE)) print("Time to convert all channels to spaceTx:" + str(time.time() - start_time))
def cli( input_dir: str, output_dir: str, file_format: str, file_vars: list, cache_read_order: List[str], counts: dict, aux_names: List[str] = [], aux_file_formats: List[str] = [], aux_file_vars: List[List[str]] = [], aux_cache_read_order: List[str] = [], aux_channel_count: List[int] = [], aux_channel_slope: List[float] = [], aux_channel_intercept: List[int] = [], locs: List[Mapping[Axes, float]] = None, shape: Mapping[Axes, int] = None, voxel: Mapping[Axes, float] = None, ) -> int: """CLI entrypoint for spaceTx format construction for SeqFISH data Parameters ---------- input_dir : str Directory containing folders for fovs. output_dir : str Directory containing output files. Will be created if it does not exist. file_format: str String format for individual image files of primary view. Appended to input_dir. Each "{}" within this string will be replaced the tile-specific values, as specified in the order of "file_vars" file_vars: list Variables to insert in file_format. The following values are accepted: - channel - offset_channel (channel + channel_offset) - round - offset_round (round + round_offset) - fov - offset_fov (fov + fov_offset) - zplane - offset_zplane (zplane + zplane_offset) cache_read_order: list Description of the order of the axes of the images. Each item in the list is one dimension in the image. counts: dict Dict with the counts for each dimension of the data. Expects values that correspond to keys of ["rounds","channels","zplanes","fovs"] aux_names: list A list containing the names of any auxilliary tile views. aux_file_formats: list The same as file_format, but for each individual aux view. Items within each list entry are semicolon (;) delimited. aux_file_vars: list The same as file_vars, but for each individual aux view. Items within each list entry are semicolon (;) delimited. aux_cache_read_order: list The same as cache_read_order, but for each individual aux view. Items within each list entry are semicolon (;) delimited. aux_channel_count: list The total number of channels per aux view. aux_channel_slope: list The slope for converting 0-indexed channel IDs to the channel ID within the image. aux_channel_intercept: list The intercept for converting 0-index channel IDs to the channel ID within the image. locs: List[Mapping[Axes, float]] Each list item refers to the fov of the same index. The start location of the image, mapped to the corresponding Axes object (X, Y, or ZPLANE) shape: Mapping[Axes, int] The offset for the size of the image, mapped to the corresponding Axes object (X, Y, ZPLANE) voxel: Mapping[Axes, float] The size of each image, mapped to the corresponding Axes object (X, Y, ZPLANE) Returns ------- int : Returns 0 if successful """ t0 = time() os.makedirs(output_dir, exist_ok=True) reportFile = os.path.join(output_dir, datetime.now().strftime("%Y%m%d_%H%M_TXconversion.log")) sys.stdout = open(reportFile, "w") image_dimensions: Mapping[Union[str, Axes], int] = { Axes.ROUND: counts["rounds"], Axes.CH: counts["channels"], Axes.ZPLANE: counts["zplanes"], } cache_read_order_formatted = [] for item in cache_read_order: if item == "Z": cache_read_order_formatted.append(Axes.ZPLANE) elif item == "CH": cache_read_order_formatted.append(Axes.CH) else: cache_read_order_formatted.append("other") primary_tile_fetcher = PrimaryTileFetcher( os.path.expanduser(input_dir), file_format, file_vars, cache_read_order_formatted, counts["zplane_offset"], counts["fov_offset"], counts["round_offset"], counts["channel_offset"], locs, shape, voxel, ) aux_name_to_dimensions = {} aux_tile_fetcher = {} if aux_names: for i in range(len(aux_names)): name = aux_names[i] aux_image_dimensions: Mapping[Union[str, Axes], int] = { Axes.ROUND: counts["rounds"], Axes.CH: int(aux_channel_count[i]), Axes.ZPLANE: counts["zplanes"], } aux_name_to_dimensions[name] = aux_image_dimensions aux_cache_read_order_raw = aux_cache_read_order[i].split(";") aux_cache_read_order_formatted = [] for item in aux_cache_read_order_raw: if item == "Z": aux_cache_read_order_formatted.append(Axes.ZPLANE) elif item == "CH": aux_cache_read_order_formatted.append(Axes.CH) else: aux_cache_read_order_formatted.append("other") aux_tile_fetcher[name] = AuxTileFetcher( os.path.expanduser(input_dir), aux_file_formats[i], aux_file_vars[i], aux_cache_read_order_formatted, aux_channel_slope[i], aux_channel_intercept[i], counts["zplane_offset"], counts["fov_offset"], counts["round_offset"], counts["channel_offset"], locs, shape, voxel, ) # aux_tile_fetcher = {"DAPI": AuxTileFetcher(os.path.expanduser(input_dir), file_format, file_vars, counts["fov_offset"], counts["round_offset"],3)} # aux_name_to_dimensions = {"DAPI": aux_image_dimensions} t1 = time() print("Elapsed time to make experiment", t1 - t0) write_experiment_json( path=output_dir, fov_count=counts["fovs"], aux_tile_fetcher=aux_tile_fetcher, primary_tile_fetcher=primary_tile_fetcher, primary_image_dimensions=image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, tile_format=ImageFormat.TIFF, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE), ) os.remove(output_dir + "/codebook.json") t2 = time() print("Elapsed time for .json manipulation", t2 - t1) print("Operation complete, total elapsed time", t2 - t0) sys.stdout = sys.__stdout__ return 0
Axes.CH: 1, Axes.ZPLANE: 3, } aux_images_dimensions: Mapping[str, Mapping[Union[str, Axes], int]] = { "nuclei": { Axes.ROUND: 2, Axes.CH: 1, Axes.ZPLANE: 3, }, } write_experiment_json( path=outputdir.name, fov_count=2, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_images_dimensions, primary_tile_fetcher=primary_tile_fetcher, aux_tile_fetcher={"nuclei": nuclei_tile_fetcher}, dimension_order=(Axes.ROUND, Axes.CH, Axes.ZPLANE) ) ################################################################################################### # Don't forget to replace the fake codebook.json # ---------------------------------------------- # There are no starfish tools for creating a codebook. You can write the JSON manually or write a # script to do it for you. Be sure the format matches the examples in # :ref:`SpaceTx Format<sptx_codebook_format>`. # this is the placeholder codebook.json with open(os.path.join(outputdir.name, "codebook.json"), "r") as fh: print(fh.read())
def format_data(input_dir, output_dir, root_dir_name, nameBeforeDot, fileType, seriesName, num_zplanes): def add_codebook(experiment_json_doc): experiment_json_doc['codebook'] = "codebook.json" return experiment_json_doc primary_image_dimensions = { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: num_zplanes, } aux_name_to_dimensions = { 'nuclei': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: num_zplanes, }, 'channel2': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: num_zplanes, }, 'dots': { Axes.ROUND: 1, Axes.CH: 1, Axes.ZPLANE: num_zplanes, } } write_experiment_json( path=output_dir, fov_count=1, tile_format=ImageFormat.TIFF, primary_image_dimensions=primary_image_dimensions, aux_name_to_dimensions=aux_name_to_dimensions, primary_tile_fetcher=RNAScopePrimaryTileFetcher(input_dir, root_dir_name, nameBeforeDot, fileType, seriesName, num_zplanes), aux_tile_fetcher={ 'nuclei': RNAScopeAuxTileFetcher(input_dir, 'nuclei', root_dir_name, nameBeforeDot, fileType, seriesName, num_zplanes), 'channel2': RNAScopeAuxTileFetcher(input_dir, 'channel2', root_dir_name, nameBeforeDot, fileType, seriesName, num_zplanes), 'dots': RNAScopeAuxTileFetcher(input_dir, 'dots', root_dir_name, nameBeforeDot, fileType, seriesName, num_zplanes), }, postprocess_func=add_codebook, default_shape=(1024, 1024) ) # mappings_array = [ # { # "codeword": [ # {"r": 0, "c": 0, "v": 1}, # ], # "target": "GFP" # }, # ] # codebook = { # "version": "0.0.0", # "mappings": mappings_array # } # codebook = Codebook.from_code_array(codebook) # codebook_json_filename = "codebook.json" # codebook.to_json(os.path.join(output_dir, codebook_json_filename)) # At some point, somebody might need to make a more copmlex codebook (with all the target genes, called Features.TARGET here, # and what round and channel they're in). When that time comes, model your codebook according to below, # based on what ROUND and CH (channel) your target gene was imaged in """