Exemplo n.º 1
0
def generic_instationary_filter(input_field: pystencils.Field,
                                output_field: pystencils.Field,
                                stencil,
                                weighting_function,
                                normalize_weights=True):
    """Implements a generic instationary filter.

    The filter weight depends on the current stencil offset, the function value there and
    the central function value at stencil center.

    :param input_field:
    :type input_field: pystencils.Field
    :param output_field:
    :type output_field: pystencils.Field
    :param stencil:
    :param weighting_function: A function that takes current offset, offset function value and
                               stencils center function value
    :param normalize_weights: whether or not to normalize weights to a sum of one
    """

    weights = 0
    sum = 0
    for s in stencil:
        weight = weighting_function(s, input_field[s], input_field.center())
        weights += weight
        sum += weight * input_field[s]

    assignments = AssignmentCollection(
        {output_field.center(): sum / weights if normalize_weights else sum})

    return assignments
Exemplo n.º 2
0
def apply_wieners(complex_field: Field, wieners: Field, output_weight_field: Field):
    assert complex_field.index_dimensions == 3
    assert wieners.index_dimensions == 2
    assert output_weight_field.index_dimensions == 1

    assignments = []
    wiener_sum = []

    for stack_index in range(complex_field.index_shape[0]):
        for patch_index in range(complex_field.index_shape[1]):

            wien = wieners(stack_index, patch_index)

            wiener_sum.append(wien**2)

            assignments.extend(
                pystencils.Assignment(complex_field.center(stack_index, patch_index, i),
                                      complex_field.center(stack_index, patch_index, i) * wien)
                for i in (0, 1)
            )

        assignments.append(pystencils.Assignment(
            output_weight_field.center(stack_index), 1 / sympy.Add(*wiener_sum)
        ))

    return AssignmentCollection(assignments)
Exemplo n.º 3
0
def wiener_filtering(complex_field: Field, output_weight_field: Field, sigma):
    assert complex_field.index_dimensions == 3
    assert output_weight_field.index_dimensions == 1

    assignments = []

    norm_factor = complex_field.index_shape[0] * complex_field.index_shape[1]
    wiener_sum = []

    for stack_index in range(complex_field.index_shape[0]):
        for patch_index in range(complex_field.index_shape[1]):

            magnitude = sum(complex_field.center(stack_index, patch_index, i) ** 2 for i in (0, 1))
            val = magnitude / norm_factor  # implementation differ whether to apply norm_factor on val on wien
            wien = val / (val + sigma * sigma)

            wiener_sum.append(wien**2)

            assignments.extend(
                pystencils.Assignment(complex_field.center(stack_index, patch_index, i),
                                      complex_field.center(stack_index, patch_index, i) * wien)
                for i in (0, 1)
            )

        assignments.append(pystencils.Assignment(
            output_weight_field.center(stack_index), 1 / sympy.Add(*wiener_sum)
        ))

    return AssignmentCollection(assignments)
Exemplo n.º 4
0
def hard_thresholding(complex_field: Field, output_weight_field, threshold):
    assert complex_field.index_dimensions == 3
    assert output_weight_field.index_dimensions == 1

    assignments = []

    for stack_index in range(complex_field.index_shape[0]):
        num_nonzeros = []
        for patch_index in range(complex_field.index_shape[1]):

            magnitude = sum(complex_field.center(stack_index, patch_index, i) ** 2 for i in (0, 1))
            assignments.extend(
                pystencils.Assignment(complex_field.center(stack_index, patch_index, i),
                                      sympy.Piecewise(
                    (complex_field.center(stack_index, patch_index, i),
                     magnitude > threshold ** 2), (0, True)))
                for i in (0, 1)
            )
            num_nonzeros.append(sympy.Piecewise((1, magnitude > threshold ** 2), (0, True)))

        assignments.append(pystencils.Assignment(
            output_weight_field.center(stack_index), sympy.Add(*num_nonzeros)
        ))

    return AssignmentCollection(assignments)
Exemplo n.º 5
0
def aggregate(block_scores: Field,
              patch_input_field: Field,
              destination_field: Field,
              block_stencil,
              matching_stencil,
              threshold,
              max_selected,
              compilation_target,
              patch_weights: Field = None,
              accumulated_weights: Field = None,
              **compilation_kwargs):

    max_offset = max(max(o) for o in matching_stencil)
    max_offset += max(max(o) for o in block_stencil)

    offset = pystencils_reco.typed_symbols('_o:%i' % patch_input_field.spatial_dimensions, 'int32')
    copies = []

    assert destination_field.index_dimensions == 2
    assert destination_field.index_shape[-1] == len(block_stencil)

    n, nth_hit = pystencils_reco.typed_symbols('_n, nth_hit', 'int32')
    for i, s in enumerate(block_stencil):
        shifted = tuple(s + o for s, o in zip(offset, s))
        weight = patch_weights.center(nth_hit) if patch_weights else 1

        assignment = pystencils.Assignment(_get_dummy_symbol(),
                                           sympy.Function('atomicAdd')(address_of(patch_input_field[shifted]),
                                                                       weight * destination_field.center(nth_hit, i)))
        copies.append(assignment)
        if accumulated_weights:
            assignment = pystencils.Assignment(_get_dummy_symbol(),
                                               sympy.Function('atomicAdd')(
                                                   address_of(accumulated_weights[shifted]), weight))
            copies.append(assignment)

    assignments = AssignmentCollection(copies)
    ast = pystencils.create_kernel(assignments,
                                   target=compilation_target,
                                   data_type=patch_input_field.dtype,
                                   ghost_layers=max_offset,
                                   **compilation_kwargs)

    ast._body = Select(ast.body,
                       what=offset,
                       from_iterable=matching_stencil,
                       predicate=block_scores.center(n) < threshold,
                       counter_symbol=n,
                       hit_counter_symbol=nth_hit,
                       compilation_target=compilation_target,
                       max_selected=max_selected)
    return ast.compile()
Exemplo n.º 6
0
def generic_stationary_filter(input_field: pystencils.Field,
                              output_field: pystencils.Field,
                              stencil,
                              weighting_function,
                              normalize_weights=True):
    """generic_function_filter

    :param input_field:
    :type input_field: pystencils.Field
    :param output_field:
    :type output_field: pystencils.Field
    :param stencil:
    :param weighting_function: A function that takes a offset tuple and transfers it to weighting of the function value
    :param normalize_weights: whether or not to normalize weights to a sum of one
    """

    weights = 0
    sum = 0
    for s in stencil:
        weight = weighting_function(s)
        weights += weight
        sum += weight * input_field[s]

    assignments = AssignmentCollection(
        {output_field.center(): sum / weights if normalize_weights else sum})

    return assignments
Exemplo n.º 7
0
def mean_filter(input_field: pystencils.Field, output_field: pystencils.Field,
                stencil):
    assignments = {
        output_field.center():
        sum(input_field[t] for t in stencil) / len(stencil)
    }

    return assignments
Exemplo n.º 8
0
def calc_wiener_coefficients(complex_field: Field, output_wieners: Field, sigma):
    assert complex_field.index_dimensions == 3
    assert output_wieners.index_dimensions == 2

    assignments = []
    norm_factor = complex_field.index_shape[0] * complex_field.index_shape[1]

    for stack_index in range(complex_field.index_shape[0]):
        for patch_index in range(complex_field.index_shape[1]):

            magnitude = sum(complex_field.center(stack_index, patch_index, i) ** 2 for i in (0, 1))
            val = magnitude / norm_factor
            wien = val / (val + sigma * sigma)

            assignments.append(
                pystencils.Assignment(output_wieners.center(stack_index, patch_index), wien)
            )

    return AssignmentCollection(assignments)
Exemplo n.º 9
0
def collect_patches(block_scores: Field,
                    patch_input_field: Field,
                    destination_field: Field,
                    block_stencil,
                    matching_stencil,
                    threshold,
                    max_selected,
                    compilation_target,
                    **compilation_kwargs
                    ):
    max_offset = max(max(o) for o in matching_stencil)
    max_offset += max(max(o) for o in block_stencil)

    offset = pystencils_reco.typed_symbols('_o:%i' % patch_input_field.spatial_dimensions, 'int32')
    copies = []

    assert destination_field.index_dimensions == 2
    assert destination_field.index_shape[-1] == len(block_stencil)

    n, nth_hit = pystencils_reco.typed_symbols('_n, nth_hit', 'int32')
    for i, s in enumerate(block_stencil):
        shifted = tuple(s + o for s, o in zip(offset, s))
        copies.append(pystencils.Assignment(destination_field.center(nth_hit, i), patch_input_field[shifted]))

    assignments = AssignmentCollection(copies)
    ast = pystencils.create_kernel(assignments, target=compilation_target,
                                   data_type=patch_input_field.dtype,
                                   ghost_layers=max_offset,
                                   **compilation_kwargs)
    # TODO move select on per coordinate level
    ast._body = Select(ast.body,
                       what=offset,
                       from_iterable=matching_stencil,
                       predicate=block_scores.center(n) < threshold,
                       counter_symbol=n,
                       hit_counter_symbol=nth_hit,
                       max_selected=max_selected,
                       compilation_target=compilation_target)
    return ast.compile()
Exemplo n.º 10
0
def generic_guided_filter(input_field: pystencils.Field,
                          guide_field: pystencils.Field,
                          output_field: pystencils.Field,
                          stencil,
                          weighting_function,
                          normalize_weights=True):
    """Implements a generic non-stationary filter.

    The filter weight depends on the current stencil offset, the function value there and the central function value

    :param input_field:
    :type input_field: pystencils.Field
    :param guide_field:
    :type guide_field: pystencils.Field
    :param output_field:
    :type output_field: pystencils.Field
    :param stencil:   Describes filter kernel, an Iterable over all accessed relative offsets
    :param weighting_function:
                       A function that takes current offset,
                       the value of the filter image at that offset,
                       the value of the guide filter at that offset,
                       and the value of the filter image at the stencils center
    :param normalize_weights: whether or not to normalize weights to a sum of one
    """

    weights = 0
    sum = 0
    for s in stencil:
        weight = weighting_function(s, input_field[s], guide_field[s],
                                    input_field.center())
        weights += weight
        sum += weight * input_field[s]

    assignments = AssignmentCollection(
        {output_field.center(): sum / weights if normalize_weights else sum})

    return assignments
Exemplo n.º 11
0
def get_curl(input_field: ps.Field, curl_field: ps.Field):
    """Return a ps.AssignmentCollection describing the calculation of
    the curl given a 2d or 3d vector field [z,y,x](f) or [y,x](f)

    Note that the curl of a 2d vector field is defined in ℝ3!
    Only the non-zero z-component is returned

    Arguments:
        field {ps.Field} -- A field with index_dimensions <= 1
            Scalar fields are interpreted as a z-component

    Raises:
        NotImplementedError -- [description]
        NotImplementedError -- Only support 2d or 3d vector fields or scalar fields are supported

    Returns:
        ps.AssignmentCollection -- AssignmentCollection describing the calculation of the curl
    """
    assert input_field.index_dimensions <= 1, "Must be a vector or a scalar field"
    assert curl_field.index_dimensions == 1, "Must be a vector field"
    discretize = ps.fd.Discretization2ndOrder(dx=1)

    if input_field.index_dimensions == 0:
        dy = ps.fd.Diff(input_field, 0)
        dx = ps.fd.Diff(input_field, 1)
        f_x = ps.Assignment(curl_field.center(0), discretize(dy))
        f_y = ps.Assignment(curl_field.center(1), discretize(dx))
        return ps.AssignmentCollection([f_x, f_y], [])

    else:

        if input_field.index_shape[0] == 2:
            raise NotImplementedError()

        elif input_field.index_shape[0] == 3:
            raise NotImplementedError()
        else:
            raise NotImplementedError()
Exemplo n.º 12
0
def forward_projection(volume: pystencils.Field,
                       projection: pystencils.Field,
                       projection_matrix,
                       step_size=1,
                       cubic_bspline_interpolation=False,
                       add_to_projector=False,
                       central_ray_point=None):
    # is_projection_stack = projection.spatial_dimensions == volume.spatial_dimensions

    interpolation_mode = 'cubic_spline' if cubic_bspline_interpolation else 'linear'
    volume_texture = pystencils.interpolation_astnodes.Interpolator(
        volume, interpolation_mode)
    ndim = volume.spatial_dimensions
    projection_matrix = pystencils_reco.ProjectiveMatrix(projection_matrix)

    t = pystencils_reco.typed_symbols('_parametrization', 'float32')
    texture_coordinates = sympy.Matrix(
        pystencils_reco.typed_symbols(f'_t:{ndim}', 'float32'))
    u = projection.physical_coordinates_staggered
    x = volume.index_to_physical(texture_coordinates)

    is_perspective = projection_matrix.matrix.cols == ndim + 1

    if is_perspective:
        eqn = projection_matrix @ sympy.Matrix([*x, 1]) - sympy.Matrix(
            [*(t * u), t])
    else:
        # this also works for perspective/cone beam projection (but may lead to instable parametrization)
        eqn = projection_matrix @ x - u
    ray_equations = sympy.solve(eqn, texture_coordinates, rational=False)

    if not is_perspective:
        t = [t for t in texture_coordinates
             if t not in ray_equations.keys()][0]
        assert len(
            ray_equations.keys()
        ) == ndim - 1, "projection_matrix does not appear to define a projection"
    ray_equations = sympy.Matrix(
        [ray_equations[s] if s != t else t for s in texture_coordinates])

    projection_vector = sympy.diff(ray_equations, t)
    projection_vector_norm = projection_vector.norm()
    projection_vector /= projection_vector_norm

    conditions = pystencils_reco._geometry.coordinate_in_field_conditions(
        volume, ray_equations)

    if not central_ray_point:
        central_ray_point = [0] * projection.spatial_dimensions
    central_ray = projection_vector.subs({
        i: j
        for i, j in zip(pystencils.x_vector(projection.spatial_dimensions),
                        central_ray_point)
    })

    intersection_candidates = []
    for i in range(ndim):
        solution_min = sympy.solve(ray_equations[i], t, rational=False)
        solution_max = sympy.solve(ray_equations[i] - volume.spatial_shape[i],
                                   t,
                                   rational=False)
        intersection_candidates.extend(solution_min + solution_max)

    intersection_point1 = sympy.Piecewise(
        *[(f, sympy.And(*conditions).subs({t: f}))
          for f in intersection_candidates], (-0, True))
    intersection_point2 = sympy.Piecewise(
        *[(f, sympy.And(*conditions).subs({t: f}))
          for f in reversed(intersection_candidates)], (-0, True))
    assert intersection_point1 != intersection_point2, \
        "The intersections are unconditionally equal, reconstruction volume is not in detector FOV!"

    # perform a integer set analysis here?
    # space = isl.Space.create_from_names(isl.DEFAULT_CONTEXT, set=[str(t) for t in texture_coordinates])
    # ray_set = isl.BasicSet.universe(space)
    # for i, t in enumerate(texture_coordinates):
    #    # dafaq?
    #    ray_set.add_constraint(isl.Constraint.ineq_from_names(space, {str(texture_coordinates): 1}))
    #    ray_set.add_constraint(isl.Constraint.ineq_from_names(space,
    #                                                        # {1: -volume.shape[i],
    # str(texture_coordinates): -1}))
    #    ray_set.add_constraint(isl.Constraint.eq_from_name(space, ray_equations[i].subs({ #TODO

    min_t = sympy.Min(intersection_point1, intersection_point2)
    max_t = sympy.Max(intersection_point1, intersection_point2)
    # parametrization_dim = list(ray_equations).index(t)
    # min_t = 0
    # max_t = volume.spatial_shape[parametrization_dim]

    line_integral, num_steps, min_t_tmp, max_t_tmp, intensity_weighting, step = pystencils.data_types.typed_symbols(
        'line_integral, num_steps, min_t_tmp, max_t_tmp, intensity_weighting, step',
        'float32')
    i = pystencils.data_types.TypedSymbol('i', 'int32')
    num_steps = pystencils.data_types.TypedSymbol('num_steps', 'int32')

    # step = step_size / projection_vector_norm
    # tex_coord = ray_equations.subs({t: min_t_tmp + i * step})
    tex_coord = ray_equations.subs({t: min_t_tmp}) + projection_vector * i

    if callable(volume.coordinate_transform):
        intensity_weighting_sym = projection_vector.dot(central_ray)**2
    else:
        intensity_weighting_sym = projection_vector.dot(central_ray)**2

    assignments = {
        min_t_tmp:
        min_t,
        max_t_tmp:
        max_t,
        num_steps:
        sympy.ceiling(
            (max_t_tmp - min_t_tmp) / (step_size / projection_vector_norm)),
        line_integral:
        sympy.Sum(volume_texture.at(tex_coord), (i, 0, num_steps)),
        intensity_weighting:
        intensity_weighting_sym,
        projection.center():
        (line_integral * step_size * intensity_weighting) +
        (projection.center() if add_to_projector else 0)
        # projection.center(): (max_t_tmp - min_t_tmp) / step # Uncomment to get path length
    }

    # def create_autodiff(self, constant_fields=None):
    # backward_assignments = backward_projection(AdjointField(projections),
    # AdjointField(volume),
    # projection_matrix,
    # 1)
    # self._autodiff = pystencils.autodiff.AutoDiffOp(
    # assignments, "op", constant_fields=constant_fields, backward_assignments=backward_assignments)

    # assignments._create_autodiff = types.MethodType(create_autodiff, assignments)

    return assignments