Пример #1
0
def test_max_projection_preserves_coordinates():
    e = data.ISS(use_test_data=True)
    nuclei = e.fov().get_image('nuclei')
    nuclei_proj = nuclei.reduce((Axes.CH, Axes.ROUND, Axes.ZPLANE), "max")
    # Since this data already has only 1 round, 1 ch, 1 zplane
    # let's just assert that the max_proj operation didn't change anything
    assert nuclei.xarray.equals(nuclei_proj.xarray)

    stack_shape = OrderedDict([(Axes.ROUND, 3), (Axes.CH, 2), (Axes.ZPLANE, 3),
                               (Axes.Y, 10), (Axes.X, 10)])

    # Create stack with coordinates, verify coords unaffected by max_poj
    physical_coords = OrderedDict([
        (PhysicalCoordinateTypes.X_MIN, X_COORDS[0]),
        (PhysicalCoordinateTypes.X_MAX, X_COORDS[1]),
        (PhysicalCoordinateTypes.Y_MIN, Y_COORDS[0]),
        (PhysicalCoordinateTypes.Y_MAX, Y_COORDS[1]),
        (PhysicalCoordinateTypes.Z_MIN, Z_COORDS[0]),
        (PhysicalCoordinateTypes.Z_MAX, Z_COORDS[1])
    ])

    stack = imagestack_with_coords_factory(stack_shape, physical_coords)

    stack_proj = stack.reduce((Axes.CH, Axes.ROUND, Axes.ZPLANE), "max")
    expected_z = np.average(Z_COORDS)
    verify_physical_coordinates(stack_proj, X_COORDS, Y_COORDS, expected_z)
Пример #2
0
def test_learn_transforms_throws_error():
    exp = data.ISS(use_test_data=True)
    stack = exp.fov().get_image('primary')
    reference_stack = exp.fov().get_image('dots')
    translation = Translation(reference_stack=reference_stack, axes=Axes.ROUND)
    try:
        translation.run(stack)
    except ValueError as e:
        # Assert value error is thrown when the stack is not max projected across all other axes.
        assert e.args[0] == "Only axes: r can have a length > 1, please use the MaxProj filter."
Пример #3
0
def test_learn_transforms_translation():
    exp = data.ISS(use_test_data=True)
    stack = exp.fov().get_image('primary')
    reference_stack = exp.fov().get_image('dots')
    translation = Translation(reference_stack=reference_stack, axes=Axes.ROUND)
    # Calculate max_proj accrss CH/Z
    stack = stack.max_proj(Axes.CH, Axes.ZPLANE)
    transform_list = translation.run(stack)
    # assert there's a transofrmation object for each round
    assert len(transform_list.transforms) == stack.num_rounds
    for (_, _, transform), shift in zip(transform_list.transforms, ISS_SHIFTS):
        # assert that each TransformationObject has the correct translation shift
        assert np.array_equal(transform.translation, shift)
Пример #4
0
def test_calculate_translation_transforms_and_apply():
    exp = data.ISS(use_test_data=True)
    stack = exp.fov().get_image('primary')
    reference_stack = exp.fov().get_image('dots')
    translation = Translation(reference_stack=reference_stack, axes=Axes.ROUND)
    # Calculate max_proj accrss
    mp = stack.max_proj(Axes.CH, Axes.ZPLANE)
    transform_list = translation.run(mp)
    apply_transform = Warp()
    warped_stack = apply_transform.run(stack=stack,
                                       transforms_list=transform_list)
    assert np.allclose(expected_registered_values,
                       warped_stack.xarray[2, 2, 0, 40:50, 40:50])
Пример #5
0
def test_export_import_transforms_object():
    exp = data.ISS(use_test_data=True)
    stack = exp.fov().get_image('primary')
    reference_stack = exp.fov().get_image('dots')
    translation = Translation(reference_stack=reference_stack, axes=Axes.ROUND)
    # Calculate max_proj accrss CH/Z
    stack = stack.max_proj(Axes.CH, Axes.ZPLANE)
    transform_list = translation.run(stack)
    _, filename = tempfile.mkstemp()
    # save to tempfile and import
    transform_list.to_json(filename)
    imported = TransformsList.from_json(filename)
    for (_, transform_type, transform), shift in zip(imported.transforms, ISS_SHIFTS):
        # assert that each TransformationObject has the correct translation shift
        assert transform_type == TransformType.SIMILARITY
        assert np.array_equal(transform.translation, shift)
Пример #6
0
smFISH, and RNAscope) can be decoded with :py:class:`.PerRoundMaxChannel` by setting
``trace_building_strategy=TraceBuildingStrategies.SEQUENTIAL``. One hot multiplexed assays (e.g.
in situ sequencing, seqFISH, and STARmap) are termed 'one hot' because every round has exactly one
hot channel. They can be decoded with :py:class:`.PerRoundMaxChannel` by setting
``trace_building_strategy=TraceBuildingStrategies.EXACT_MATCH`` or
``trace_building_strategy=TraceBuildingStrategies.NEAREST_NEIGHBORS``. The example below
demonstrates the recommended method for decoding one hot multiplexed
data using :py:class:`.PerRoundMaxChannel`.
"""

# Load in situ sequencing experiment and find spots
from starfish.image import ApplyTransform, LearnTransform, Filter
from starfish.types import Axes, TraceBuildingStrategies
from starfish import data, FieldOfView
from starfish.spots import FindSpots
experiment = data.ISS()
fov = experiment.fov()
imgs = fov.get_image(FieldOfView.PRIMARY_IMAGES)  # primary images
dots = fov.get_image("dots")  # reference round for image registration

# filter raw data
masking_radius = 15
filt = Filter.WhiteTophat(masking_radius, is_volume=False)
filt.run(imgs, in_place=True)
filt.run(dots, in_place=True)

# register primary images to reference round
learn_translation = LearnTransform.Translation(reference_stack=dots,
                                               axes=Axes.ROUND,
                                               upsampling=1000)
transforms_list = learn_translation.run(
Пример #7
0
# EPY: START code
matplotlib.rcParams["figure.dpi"] = 150
# EPY: END code

# EPY: START markdown
### Load Data into Starfish from the Cloud
#
#The primary data from one field of view correspond to 16 images from 4 hybridzation rounds (r) 4 color channels (c) one z plane (z). Each image is 1044 x 1390 (y, x). These data arise from human breast tissue. O(10) transcripts are barcoded for subsequent spatial resolution. Average pixel intensity values for one 'spot' in the image, across all rounds and channels, can be decoded into the nearest barcode, thus resolving each pixel into a particular gene.
# EPY: END markdown

# EPY: START code
use_test_data = os.getenv("USE_TEST_DATA") is not None

# An experiment contains a codebook, primary images, and auxiliary images
experiment = data.ISS(use_test_data=use_test_data)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(experiment._src_doc)
# EPY: END code

# EPY: START code
fov = experiment.fov()

# note the structure of the 5D tensor containing the raw imaging data
imgs = fov.get_image(FieldOfView.PRIMARY_IMAGES)
print(imgs)
# EPY: END code

# EPY: START markdown
### Visualize Codebook
#
Пример #8
0
pipeline.

3. **Use starfish.** Starfish exposes simple fourier-domain translational registration to adjust
some common registration issues. It also supports the :py:class:`.Warp` functionality to apply any
pre-learned affine transformation.

This tutorial will cover how to use :py:class:`.LearnTransform` and :py:class:`.ApplyTransform` to
register `primary images` in a starfish pipeline. This in situ sequencing (ISS) example includes a
`dots` image, which is one image with all the RNA spots, that is used as the `reference_image` for
registration. The maximum intensity projection of any round from `primary images` can also be
used in lieu of a `dots` image.
"""

from starfish import data

experiment = data.ISS(use_test_data=True)
imgs = experiment["fov_001"].get_image('primary')
dots = experiment["fov_001"].get_image('dots')

###################################################################################################
# The images used by :py:class:`.LearnTransform` depends on the data available. For example,
# if there are common landmarks present in every `primary image`, such as fiducial markers (e.g.
# fixed fluorescence beads) or autofluorescence cellular structures, then those images can be used
# to learn the transforms. In this example where all RNA spots are present in each round,
# the images from each round are projected and then the RNA spots are used as the landmarks.

from starfish.types import Axes

projected_imgs = imgs.reduce({Axes.CH}, func="max")
print(projected_imgs)