コード例 #1
0
ファイル: starfish.py プロジェクト: vipulsinghal02/starfish
def build_parser():
    parser = argparse.ArgumentParser()

    parser.add_argument("--profile",
                        action="store_true",
                        help="enable profiling")
    parser.add_argument("--noop",
                        help=argparse.SUPPRESS,
                        dest="starfish_command",
                        action="store_const",
                        const=noop)

    subparsers = parser.add_subparsers(dest="starfish_command")

    Registration._add_to_parser(subparsers)
    Filter._add_to_parser(subparsers)
    SpotFinder.add_to_parser(subparsers)
    Segmentation._add_to_parser(subparsers)
    TargetAssignment.add_to_parser(subparsers)
    Decoder.add_to_parser(subparsers)

    show_group = subparsers.add_parser("show")
    show_group.add_argument("in_json", type=FsExistsType())
    show_group.add_argument("--sz", default=10, type=int, help="Figure size")
    show_group.set_defaults(starfish_command=show)

    build_group = subparsers.add_parser("build")
    BuilderCli.add_to_parser(build_group)

    validate_group = subparsers.add_parser("validate")
    ValidateCli.add_to_parser(validate_group)

    return parser
コード例 #2
0
ファイル: iss_pipeline.py プロジェクト: ttung/starfish
def iss_pipeline(fov, codebook):
    primary_image = fov.get_image(starfish.FieldOfView.PRIMARY_IMAGES)

    # register the raw image
    learn_translation = LearnTransform.Translation(
        reference_stack=fov.get_image('dots'), axes=Axes.ROUND, upsampling=100)
    transforms_list = learn_translation.run(
        primary_image.max_proj(Axes.CH, Axes.ZPLANE))
    warp = ApplyTransform.Warp()
    registered = warp.run(primary_image,
                          transforms_list=transforms_list,
                          in_place=False,
                          verbose=True)

    # filter raw data
    masking_radius = 15
    filt = Filter.WhiteTophat(masking_radius, is_volume=False)
    filtered = filt.run(registered, verbose=True, in_place=False)

    # detect spots using laplacian of gaussians approach
    p = SpotFinder.BlobDetector(
        min_sigma=1,
        max_sigma=10,
        num_sigma=30,
        threshold=0.01,
        measurement_type='mean',
    )

    intensities = p.run(filtered,
                        blobs_image=fov.get_image('dots'),
                        blobs_axes=(Axes.ROUND, Axes.ZPLANE))

    # decode the pixel traces using the codebook
    decoded = codebook.decode_per_round_max(intensities)

    # segment cells
    seg = Segmentation.Watershed(
        nuclei_threshold=.16,
        input_threshold=.22,
        min_distance=57,
    )
    label_image = seg.run(primary_image, fov.get_image('dots'))

    # assign spots to cells
    ta = TargetAssignment.Label()
    assigned = ta.run(label_image, decoded)

    return assigned, label_image
コード例 #3
0
def iss_pipeline(fov, codebook):
    primary_image = fov[starfish.FieldOfView.PRIMARY_IMAGES]

    # register the raw images
    registration = Registration.FourierShiftRegistration(
        upsampling=1000,
        reference_stack=fov['dots']
    )
    registered = registration.run(primary_image, in_place=False)

    # filter raw data
    masking_radius = 15
    filt = Filter.WhiteTophat(masking_radius, is_volume=False)
    filtered = filt.run(registered, verbose=True, in_place=False)

    # detect spots using laplacian of gaussians approach
    p = SpotFinder.BlobDetector(
        min_sigma=1,
        max_sigma=10,
        num_sigma=30,
        threshold=0.01,
        measurement_type='mean',
    )

    mp = fov['dots'].max_proj(Indices.ROUND, Indices.Z)
    mp_numpy = mp._squeezed_numpy(Indices.ROUND, Indices.Z)
    intensities = p.run(filtered, blobs_image=mp_numpy)

    # decode the pixel traces using the codebook
    decoded = codebook.decode_per_round_max(intensities)

    # segment cells
    seg = Segmentation.Watershed(
        nuclei_threshold=.16,
        input_threshold=.22,
        min_distance=57,
    )
    label_image = seg.run(primary_image, fov['nuclei'])

    # assign spots to cells
    ta = TargetAssignment.Label()
    assigned = ta.run(label_image, decoded)

    return assigned, label_image
コード例 #4
0
def iss_pipeline(fov, codebook):
    primary_image = fov.primary_image

    # register the raw images
    registration = Registration.FourierShiftRegistration(
        upsampling=1000,
        reference_stack=fov['dots']
    )
    registered = registration.run(primary_image, in_place=False)

    # filter raw data
    masking_radius = 15
    filt = Filter.WhiteTophat(masking_radius, is_volume=False)
    filtered = filt.run(registered, verbose=True, in_place=False)

    # detect spots using laplacian of gaussians approach
    p = SpotFinder.GaussianSpotDetector(
        min_sigma=1,
        max_sigma=10,
        num_sigma=30,
        threshold=0.01,
        measurement_type='mean',
    )
    blobs_image = fov['dots'].max_proj(Indices.ROUND, Indices.Z)
    intensities = p.run(filtered, blobs_image=blobs_image)

    # decode the pixel traces using the codebook
    decoded = codebook.decode_per_round_max(intensities)

    # segment cells
    seg = Segmentation.Watershed(
        dapi_threshold=.16,
        input_threshold=.22,
        min_distance=57,
    )
    regions = seg.run(primary_image, fov['nuclei'])

    # assign spots to cells
    ta = TargetAssignment.PointInPoly2D()
    assigned = ta.run(decoded, regions)

    return assigned, regions
コード例 #5
0
#Contiguous pixels that decode to the same gene are called as spots via connected components labeling. The minimum area of these spots are set by this parameter. The intuition is that pixel vectors, that pass the distance and magnitude thresholds, shold probably not be trusted as genes as the mRNA transcript would be too small for them to be real. This parameter can be set based on microscope resolution and signal amplification strategy.
#
#### Crop size
#The crop size crops the image by a number of pixels large enough to eliminate parts of the image that suffer from boundary effects from both signal aquisition (e.g., FOV overlap) and image processing. Here this value is 40.
#
#Given these three thresholds, for each pixel vector, the decoder picks the closest code (minimum distance) that satisfies each of the above thresholds, where the distance is calculated between the code and a normalized intensity vector and throws away subsequent spots that are too small.
# EPY: END markdown

# EPY: START code
# TODO this crop should be (x, y) = (40, 40) but it was getting eaten by kwargs
from starfish.spots import SpotFinder
psd = SpotFinder.PixelSpotDetector(codebook=experiment.codebook,
                                   metric='euclidean',
                                   distance_threshold=0.5176,
                                   magnitude_threshold=1.77e-5,
                                   min_area=2,
                                   max_area=np.inf,
                                   norm_order=2,
                                   crop_z=0,
                                   crop_y=0,
                                   crop_x=0)

initial_spot_intensities, prop_results = psd.run(scaled_image)

spot_intensities = initial_spot_intensities.loc[initial_spot_intensities[
    Features.PASSES_THRESHOLDS]]
# EPY: END code

# EPY: START markdown
### Compare to results from paper
#
#The below plot aggregates gene copy number across single cells in the field of view and compares the results to the published intensities in the MERFISH paper.
コード例 #6
0
# I've guessed at these parameters from the allen_smFISH code, but you might want to tweak these a bit.
# as you can see, this function takes a while. It will be great to parallelize this. That's also coming,
# although we haven't figured out where it fits in the priority list.
kwargs = dict(
    spot_diameter=3,  # must be odd integer
    min_mass=0.003,
    max_size=3,  # this is max _radius_
    separation=5,
    noise_size=0.65,  # this is not used because preprocess is False
    preprocess=False,
    percentile=
    10,  # this is irrelevant when min_mass, spot_diameter, and max_size are set properly
    verbose=True,
    is_volume=True,
)
tlmpf = SpotFinder.TrackpyLocalMaxPeakFinder(**kwargs)
spot_attributes = tlmpf.run(primary_image)
# EPY: END code

# EPY: START code
decoded = experiment.codebook.decode_per_round_max(spot_attributes)
# EPY: END code

# EPY: START markdown
#Count the number of spots detected in each channel
# EPY: END markdown

# EPY: START code
spot_attributes.groupby('c').apply(lambda x: np.sum(x > 0))
# EPY: END code
コード例 #7
0
# EPY: END markdown

# EPY: START code
from starfish.spots import SpotFinder
import warnings

# parameters to define the allowable gaussian sizes (parameter space)
min_sigma = 1
max_sigma = 10
num_sigma = 30
threshold = 0.01

p = SpotFinder.BlobDetector(
    min_sigma=min_sigma,
    max_sigma=max_sigma,
    num_sigma=num_sigma,
    threshold=threshold,
    measurement_type='mean',
)

# detect triggers some numpy warnings
with warnings.catch_warnings():
    warnings.simplefilter("ignore")

    # blobs = dots; define the spots in the dots image, but then find them again in the stack.
    intensities = p.run(registered_image,
                        blobs_image=dots,
                        blobs_axes=(Axes.ROUND, Axes.ZPLANE))
# EPY: END code

# EPY: START markdown
コード例 #8
0
ファイル: allen_smFISH.py プロジェクト: xchang1/starfish
# I've guessed at these parameters from the allen_smFISH code, but you might want to tweak these a bit.
# as you can see, this function takes a while. It will be great to parallelize this. That's also coming,
# although we haven't figured out where it fits in the priority list.
kwargs = dict(
    spot_diameter=3,  # must be odd integer
    min_mass=0.003,
    max_size=3,  # this is max _radius_
    separation=5,
    noise_size=0.65,  # this is not used because preprocess is False
    preprocess=False,
    percentile=
    10,  # this is irrelevant when min_mass, spot_diameter, and max_size are set properly
    verbose=True,
    is_volume=True,
)
lmpf = SpotFinder.LocalMaxPeakFinder(**kwargs)
spot_attributes = lmpf.run(primary_image)
# EPY: END code

# EPY: START code
decoded = experiment.codebook.decode_per_round_max(spot_attributes)
# EPY: END code

# EPY: START markdown
#Count the number of spots detected in each channel
# EPY: END markdown

# EPY: START code
spot_attributes.groupby('c').apply(lambda x: np.sum(x > 0))
# EPY: END code
# EPY: END markdown

# EPY: START code
# how much magnitude should a barcode have for it to be considered by decoding? this was set by looking at
# the plot above
magnitude_threshold = 0.5
# how big do we expect our spots to me, min/max size. this was set to be equivalent to the parameters
# determined by the Zhang lab.
area_threshold = (5, 30)
# how close, in euclidean space, should the pixel barcode be to the nearest barcode it was called to?
# here, I set this to be a large number, so I can inspect the distribution of decoded distances below
distance_threshold = 3

psd = SpotFinder.PixelSpotDetector(codebook=exp.codebook,
                                   metric='euclidean',
                                   distance_threshold=distance_threshold,
                                   magnitude_threshold=magnitude_threshold,
                                   min_area=area_threshold[0],
                                   max_area=area_threshold[1])

initial_spot_intensities, results = psd.run(zero_norm_stack)
# EPY: END code

# EPY: START code
spots_df = initial_spot_intensities.to_features_dataframe()
spots_df['area'] = np.pi * spots_df['radius']**2
spots_df = spots_df.loc[spots_df[Features.PASSES_THRESHOLDS]]
spots_df.head()
# EPY: END code

# EPY: START markdown
#### QC Plots