def build_parser(): parser = argparse.ArgumentParser() parser.add_argument("--profile", action="store_true", help="enable profiling") parser.add_argument("--noop", help=argparse.SUPPRESS, dest="starfish_command", action="store_const", const=noop) subparsers = parser.add_subparsers(dest="starfish_command") Registration._add_to_parser(subparsers) Filter._add_to_parser(subparsers) SpotFinder.add_to_parser(subparsers) Segmentation._add_to_parser(subparsers) TargetAssignment.add_to_parser(subparsers) Decoder.add_to_parser(subparsers) show_group = subparsers.add_parser("show") show_group.add_argument("in_json", type=FsExistsType()) show_group.add_argument("--sz", default=10, type=int, help="Figure size") show_group.set_defaults(starfish_command=show) build_group = subparsers.add_parser("build") BuilderCli.add_to_parser(build_group) validate_group = subparsers.add_parser("validate") ValidateCli.add_to_parser(validate_group) return parser
def iss_pipeline(fov, codebook): primary_image = fov.get_image(starfish.FieldOfView.PRIMARY_IMAGES) # register the raw image learn_translation = LearnTransform.Translation( reference_stack=fov.get_image('dots'), axes=Axes.ROUND, upsampling=100) transforms_list = learn_translation.run( primary_image.max_proj(Axes.CH, Axes.ZPLANE)) warp = ApplyTransform.Warp() registered = warp.run(primary_image, transforms_list=transforms_list, in_place=False, verbose=True) # filter raw data masking_radius = 15 filt = Filter.WhiteTophat(masking_radius, is_volume=False) filtered = filt.run(registered, verbose=True, in_place=False) # detect spots using laplacian of gaussians approach p = SpotFinder.BlobDetector( min_sigma=1, max_sigma=10, num_sigma=30, threshold=0.01, measurement_type='mean', ) intensities = p.run(filtered, blobs_image=fov.get_image('dots'), blobs_axes=(Axes.ROUND, Axes.ZPLANE)) # decode the pixel traces using the codebook decoded = codebook.decode_per_round_max(intensities) # segment cells seg = Segmentation.Watershed( nuclei_threshold=.16, input_threshold=.22, min_distance=57, ) label_image = seg.run(primary_image, fov.get_image('dots')) # assign spots to cells ta = TargetAssignment.Label() assigned = ta.run(label_image, decoded) return assigned, label_image
def iss_pipeline(fov, codebook): primary_image = fov[starfish.FieldOfView.PRIMARY_IMAGES] # register the raw images registration = Registration.FourierShiftRegistration( upsampling=1000, reference_stack=fov['dots'] ) registered = registration.run(primary_image, in_place=False) # filter raw data masking_radius = 15 filt = Filter.WhiteTophat(masking_radius, is_volume=False) filtered = filt.run(registered, verbose=True, in_place=False) # detect spots using laplacian of gaussians approach p = SpotFinder.BlobDetector( min_sigma=1, max_sigma=10, num_sigma=30, threshold=0.01, measurement_type='mean', ) mp = fov['dots'].max_proj(Indices.ROUND, Indices.Z) mp_numpy = mp._squeezed_numpy(Indices.ROUND, Indices.Z) intensities = p.run(filtered, blobs_image=mp_numpy) # decode the pixel traces using the codebook decoded = codebook.decode_per_round_max(intensities) # segment cells seg = Segmentation.Watershed( nuclei_threshold=.16, input_threshold=.22, min_distance=57, ) label_image = seg.run(primary_image, fov['nuclei']) # assign spots to cells ta = TargetAssignment.Label() assigned = ta.run(label_image, decoded) return assigned, label_image
def iss_pipeline(fov, codebook): primary_image = fov.primary_image # register the raw images registration = Registration.FourierShiftRegistration( upsampling=1000, reference_stack=fov['dots'] ) registered = registration.run(primary_image, in_place=False) # filter raw data masking_radius = 15 filt = Filter.WhiteTophat(masking_radius, is_volume=False) filtered = filt.run(registered, verbose=True, in_place=False) # detect spots using laplacian of gaussians approach p = SpotFinder.GaussianSpotDetector( min_sigma=1, max_sigma=10, num_sigma=30, threshold=0.01, measurement_type='mean', ) blobs_image = fov['dots'].max_proj(Indices.ROUND, Indices.Z) intensities = p.run(filtered, blobs_image=blobs_image) # decode the pixel traces using the codebook decoded = codebook.decode_per_round_max(intensities) # segment cells seg = Segmentation.Watershed( dapi_threshold=.16, input_threshold=.22, min_distance=57, ) regions = seg.run(primary_image, fov['nuclei']) # assign spots to cells ta = TargetAssignment.PointInPoly2D() assigned = ta.run(decoded, regions) return assigned, regions
# EPY: START code from starfish.image import Segmentation dapi_thresh = .16 # binary mask for cell (nuclear) locations stain_thresh = .22 # binary mask for overall cells // binarization of stain min_dist = 57 registered_mp = registered_image.max_proj(Axes.CH, Axes.ZPLANE) registered_mp_numpy = registered_mp._squeezed_numpy(Axes.CH, Axes.ZPLANE) stain = np.mean(registered_mp_numpy, axis=0) stain = stain / stain.max() nuclei = nuclei.max_proj(Axes.ROUND, Axes.CH, Axes.ZPLANE) nuclei_numpy = nuclei._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE) seg = Segmentation.Watershed(nuclei_threshold=dapi_thresh, input_threshold=stain_thresh, min_distance=min_dist) label_image = seg.run(registered_image, nuclei) seg.show() # EPY: END code # EPY: START markdown #### Assign spots to cells and create cell x gene count matrix # EPY: END markdown # EPY: START code from starfish.spots import TargetAssignment al = TargetAssignment.Label() labeled = al.run(label_image, decoded) # EPY: END code