Ejemplo n.º 1
0
def just_denoise(arr,
                 k=3,
                 level=5,
                 noise_std=None,
                 coefs=None,
                 supp=None,
                 min_nscales=2):
    if np.iterable(k):
        level = len(k)
    if coefs is None:
        coefs = atrous.decompose(arr, level)
    if noise_std is None:
        if arr.ndim > 2:
            noise_std = atrous.estimate_sigma_mad(coefs[0], True)
        else:
            noise_std = atrous.estimate_sigma(arr, coefs)
    ## calculate support taking only positive coefficients (light sources)
    if supp is None:
        supp = atrous.get_support(coefs,
                                  np.array(k, _dtype_) * noise_std,
                                  modulus=False)
    structures = get_structures(coefs, supp)
    g = connectivity_graph(structures, min_nscales)
    #labels = reduce(lambda a,b:a+b, (n.labels for n in lib.flatten(g)))
    new_supp = supp_from_connectivity(g, level)
    return atrous.rec_with_support(coefs, new_supp)
Ejemplo n.º 2
0
def find_objects(
        arr,
        k=3,
        level=5,
        noise_std=None,
        coefs=None,
        supp=None,
        dec_fn=atrous.decompose,
        retraw=False,  # return raw, only used for testing
        start_scale=0,
        weights=None,
        deblendp=True,
        min_px_size=200,
        min_nscales=2,
        rec_variant=2,
        modulus=False):
    """Use MVM to find objects in the input array.

    Parameters:
      - `arr`: (`numpy array`) -- 1D, 2D or 3D ``numpy`` array. Input data.
      - `k` : (`number`) -- threshold to regard wavelet coefficient as
        significant, in :math:`\\times \\sigma` (in noise standard deviations)
      - `level`: (`int`) -- level of wavelet transform
      - `noise_std`: (`number` or `None`) -- if known, provide noise
        :math:`\\sigma`
      - `coefs`: if already calculated, provide wavelet coefficients
      - `supp`: if already calculated, provide support of significant wavelet
        coefficients
      - `start_scale`: (`int`) -- start reconstruction at this scale
	(decomposition level)
      - `weights`: (`list` of numbers) -- weight coefficients at different
        levels before reconstruction
      - `min_px_size`: an `MVMNode` should contain at least this number of
        pixels
      - `min_nscales`: an object should have at least this scales/levels
      - `modulus`: if False, only search for light sources
      - retraw : only used for debugging

    Returns:
      a `list` of recovered objects as *embedddings* around non-zero voxels.
      see `embedding` function for details
    
    """
    if np.iterable(k):
        level = len(k)
    if coefs is None:
        coefs = dec_fn(arr, level)
    if noise_std is None:
        noise_std = atrous.estimate_sigma_mad(coefs[0], True)
        ## if arr.ndim > 2:
        ##     noise_std = atrous.estimate_sigma_mad(coefs[0], True)
        ## else:
        ##     noise_std = atrous.estimate_sigma(arr, coefs)
        ## calculate support taking only positive coefficients (light sources)
    sigmaej = atrous.sigmaej
    if dec_fn == mmt.decompose_mwt:
        sigmaej = mmt.sigmaej_mwts2
    if supp is None:
        supp = multiscale.threshold_w(coefs,
                                      np.array(k, _dtype_) * noise_std,
                                      modulus=modulus,
                                      sigmaej=sigmaej)
    if weights is None:
        weights = np.ones(level)
    structures = get_structures(coefs, supp)
    g = connectivity_graph(structures)
    if deblendp:
        gdeblended = deblend_all(g, coefs, min_nscales)  # destructive
    else:
        gdeblended = [r for r in g if nscales(r) >= min_nscales]

    #check = lambda x: len(tree_locations2(x)) > min_px_size
    def check(x):
        return len(tree_locations2(x)) > min_px_size

    objects = sorted([x for x in gdeblended if check(x)],
                     key=lambda u: tree_mass(u),
                     reverse=True)
    if retraw == 1:
        return objects
    if retraw == 2:
        return [supp_from_obj(o, start_scale) for o in objects]
    # note: even if we decompose with mmt.decompose_mwt
    # we use atrous.decompose for object reconstruction because
    # we don't expect too many outliers and this way it's faster
    pipelines = [
        lib.flcompose(lambda x1, x2: supp_from_obj(x1, x2, weights=weights),
                      lambda x: multiscale.simple_rec(coefs, x), embedding),
        lib.flcompose(
            lambda x1, x2: supp_from_obj(x1, x2, weights=weights),
            lambda x: multiscale.simple_rec_iterative(
                coefs, x, positive_only=(not modulus)), embedding)
    ]
    recovered = (pipelines[rec_variant - 1](obj, start_scale)
                 for obj in objects)
    return filter(lambda x: np.sum(x[0] > 0) > min_px_size, recovered)