Beispiel #1
0
 def thread2():
     time.sleep(0.2)
     def fiber1():
         time.sleep(0.4)
         f22.switch()
     def fiber2():
         pass
     f21 = fibers.Fiber(fiber1)
     f22 = fibers.Fiber(fiber2)
     f21.switch()
Beispiel #2
0
    def __init__(self, m, func, args, kwargs):
        def _run():
            _tls.current_proc = self
            self._is_started = 1
            try:
                return func(*args, **kwargs)
            except ProcExit:
                pass
            finally:
                m.removeg()

        self.m = m
        self.fiber = fibers.Fiber(_run)
        self.waiting = False
        self.sleeping = False
        self.param = None
        self._is_started = 0
Beispiel #3
0
 def do_async(self, func):
     f = fibers.Fiber(target=func)
     self._fibers.append(f)
Beispiel #4
0
 def thread1():
     def fiber1():
         time.sleep(0.4)
     f11 = fibers.Fiber(fiber1)
     f11.switch()
Beispiel #5
0
 def start(self):
     self.fiber = fibers.Fiber(self.handler)
     self.fiber.switch()
Beispiel #6
0
def laplacian_of_gaussian(inpd,
                          fiber_distance_sigma=25,
                          points_per_fiber=30,
                          n_jobs=2,
                          upper_thresh=30):
    """ Filter nearby fibers, using LoG weights.
    
    The pairwise fiber distance matrix is computed, then fibers are
    averaged with their neighbors using LoG weighting.  This is
    essentially a fiber subtraction operation, giving vectors pointing
    from the center fiber under the kernel, to all nearby fibers. Thus
    the output of this operation is not a fiber, but we compute
    properties of the output that might be interesting and related to
    fibers. We summarize the result using the average vector at each
    fiber point (output is its magnitude, similar to edge
    strength). The covariance of the vectors is also
    investigated. This matrix would be spherical in an isotropic
    region such as a tract center (tube/line detector), or planar in a
    sheetlike tract (sheet detector).

    The equation is: (1-d^2/sigma^2) exp(-d^2/(2*sigma^2)), and
    weights are normalized in the neighborhood (weighted averaging).
    """

    sigmasq = fiber_distance_sigma * fiber_distance_sigma

    # polydata to array conversion, fixed-length fiber representation
    fiber_array = fibers.FiberArray()
    fiber_array.points_per_fiber = points_per_fiber
    fiber_array.convert_from_polydata(inpd)

    fiber_indices = range(0, fiber_array.number_of_fibers)

    # pairwise distance matrix
    if USE_PARALLEL:
        distances = Parallel(n_jobs=n_jobs, verbose=1)(
            delayed(similarity.fiber_distance)(fiber_array.get_fiber(
                lidx), fiber_array, 0, 'Hausdorff') for lidx in fiber_indices)
        distances = numpy.array(distances)
    else:
        distances = \
            numpy.zeros(
            (fiber_array.number_of_fibers,
             fiber_array.number_of_fibers))
        for lidx in fiber_indices:
            distances[lidx, :] = \
                similarity.fiber_distance(
                    fiber_array.get_fiber(lidx),
                    fiber_array, 0)

    # fiber list data structure initialization for easy fiber averaging
    fiber_list = list()
    for lidx in range(0, fiber_array.number_of_fibers):
        fiber_list.append(fiber_array.get_fiber(lidx))

    filter_vectors = list()
    filter_vector_magnitudes = list()
    filter_confidences = list()

    # gaussian smooth all fibers using local neighborhood
    for fidx in fiber_indices:
        if (fidx % 100) == 0:
            print fidx, '/', fiber_array.number_of_fibers

        current_fiber = fiber_list[fidx]

        # find indices of all nearby fibers
        # this includes the center fiber under the kernel
        indices = numpy.nonzero(distances[fidx] < upper_thresh)[0]
        local_fibers = list()
        local_weights = list()

        for idx in indices:
            dist = distances[fidx][idx]
            # compute filter kernel weights
            weight = numpy.exp(-(dist * dist) / sigmasq)
            #weight = (1 - (dist*dist)/sigmasq) * numpy.exp(-(dist*dist)/(2*sigmasq))
            local_fibers.append(fiber_list[idx])
            local_weights.append(weight)

        # actually perform the weighted average
        #mean_weight = numpy.mean(numpy.array(local_weights))
        #out_weights = local_weights[0]
        #for weight in local_weights[1:]:
        #    out_weights += weight
        # the weights must sum to 0 for LoG
        # (response in constant region is 0)
        #mean_weight = out_weights / len(local_weights)
        #local_normed_weights = list()
        #for weight in local_weights:
        #    local_normed_weights.append(weight - mean_weight)

        #match_fiber = local_fibers[0]
        #out_vector = local_fibers[0] * local_normed_weights[0]
        idx = 0
        for fiber in local_fibers:
            #out_vector += fiber
            # ensure fiber ordering by matching to current fiber only
            # otherwise the order is undefined after fiber subtraction
            matched_fiber = current_fiber.match_order(fiber)
            #filtered_fiber = matched_version * local_normed_weights[idx]
            #filtered_fiber = matched_version * local_weights[idx]
            if idx == 0:
                out_vector = fibers.Fiber()
                out_vector.points_per_fiber = points_per_fiber
                out_vector.r = numpy.zeros(points_per_fiber)
                out_vector.a = numpy.zeros(points_per_fiber)
                out_vector.s = numpy.zeros(points_per_fiber)
            #filtered_fiber = match_fiber.match_order(fiber)
            #out_vector.r = (out_vector.r + matched_fiber.r) * local_weights[idx]
            #out_vector.a = (out_vector.a + matched_fiber.a) * local_weights[idx]
            #out_vector.s = (out_vector.s + matched_fiber.s) * local_weights[idx]
            out_vector.r += (current_fiber.r -
                             matched_fiber.r) * local_weights[idx]
            out_vector.a += (current_fiber.a -
                             matched_fiber.a) * local_weights[idx]
            out_vector.s += (current_fiber.s -
                             matched_fiber.s) * local_weights[idx]
            idx += 1

        total_weights = numpy.sum(numpy.array(local_weights))
        out_vector = out_vector / total_weights

        filter_vectors.append(out_vector)
        filter_confidences.append(total_weights)

        filter_vector_magnitudes.append(numpy.sqrt(\
                numpy.multiply(out_vector.r, out_vector.r) + \
                    numpy.multiply(out_vector.a, out_vector.a) + \
                    numpy.multiply(out_vector.s, out_vector.s)))
        #filter_vector_magnitudes.append(numpy.sum(out_vector.r))

    # output a new pd!!!!
    # with fixed length fibers. and the new vector field.
    # output the vectors from the filtering
    outpd = fiber_array.convert_to_polydata()
    vectors = vtk.vtkFloatArray()
    vectors.SetName('FiberDifferenceVectors')
    vectors.SetNumberOfComponents(3)
    for vec in filter_vectors:
        for idx in range(points_per_fiber):
            vectors.InsertNextTuple3(vec.r[idx], vec.a[idx], vec.s[idx])
    magnitudes = vtk.vtkFloatArray()
    magnitudes.SetName('FiberDifferenceMagnitudes')
    magnitudes.SetNumberOfComponents(1)
    for mag in filter_vector_magnitudes:
        for idx in range(points_per_fiber):
            magnitudes.InsertNextTuple1(mag[idx])
    confidences = vtk.vtkFloatArray()
    confidences.SetName('FiberDifferenceConfidences')
    confidences.SetNumberOfComponents(1)
    for mag in filter_confidences:
        for idx in range(points_per_fiber):
            confidences.InsertNextTuple1(mag)

    outpd.GetPointData().AddArray(vectors)
    outpd.GetPointData().SetActiveVectors('FiberDifferenceVectors')

    outpd.GetPointData().AddArray(confidences)
    outpd.GetPointData().SetActiveScalars('FiberDifferenceConfidences')

    outpd.GetPointData().AddArray(magnitudes)
    outpd.GetPointData().SetActiveScalars('FiberDifferenceMagnitudes')

    # color by the weights or "local density"
    # color output by the number of fibers that each output fiber corresponds to
    #outcolors = vtk.vtkFloatArray()
    #outcolors.SetName('KernelDensity')
    #for weight in next_weights:
    #    outcolors.InsertNextTuple1(weight)
    #inpd.GetCellData().AddArray(outcolors)
    #inpd.GetCellData().SetActiveScalars('KernelDensity')
    #outcolors = vtk.vtkFloatArray()
    #outcolors.SetName('EdgeMagnitude')
    #for magnitude in filter_vector_magnitudes:
    #    outcolors.InsertNextTuple1(magnitude)
    #inpd.GetCellData().AddArray(outcolors)
    #inpd.GetCellData().SetActiveScalars('EdgeMagnitude')

    return outpd, numpy.array(filter_vector_magnitudes)