Beispiel #1
0
def load_params(indf):
    """ Takes a pandas.DataFrame generated by grism and reads it into an
    lmfit.Parameters object.
    """
    df = indf.copy()
    p = lf.Parameters()
    p.clear()
    # print('DEBUG LMFITWRAPPER: POS', df.Pos)
    df.set_value('Contin', 'Identifier', 'x')
    if not 'SigMin' in df.columns:
        df['SigMin'] =  0.1
    if not 'SigMax' in df.columns:
        df['SigMax'] = 30.
    if not 'AmpMin' in df.columns:
        df['AmpMin'] = 0.001
    if not 'AmpMax' in df.columns:
        df['AmpMax'] = 5000.
    if not 'WavMin' in df.columns:
        df['WavMin'] = df.Pos - 10.
    if not 'WavMax' in df.columns:
        df['WavMax'] = df.Pos + 10.
    # print('DEBUG LMFITWRAPPER: POS', df.Pos)
    # print('DEBUG LMFITWRAPPER: WAVMIN', df.WavMin)
    df.Pos += df['Line center']

    # print df
    for comp in df.index:
        if comp == 'Contin':
            varval = df.ix[comp]['Ampl'] + .0001
            p.add('Contin_Ampl', value=varval, min=-10., max=10000.,
                  vary=sp.invert(df.loc[comp]['Lock']))
            continue
        else:
            for col in df.columns:
                if '_stddev' in col:
                    continue
                if col in ['Ampl', 'Sigma', 'Pos', 'Gamma',]:
                    #name = df.ix[comp]['Identifier']+'_'+col
                    name = comp + '_' + col
                    value = df.ix[comp][col]
                    if col == 'Pos':
                        varmin = df.loc[comp]['Line center'] + \
                            df.loc[comp]['WavMin']  # value - 10
                        varmax = df.loc[comp]['Line center'] + \
                            df.loc[comp]['WavMax']  # value + 10
                        vary = sp.invert(df.loc[comp]['Lock'][0])
                    elif col == 'Sigma':
                        varmin = df.loc[comp]['SigMin']
                        varmax = df.loc[comp]['SigMax']
                        vary = sp.invert(df.loc[comp]['Lock'][1])
                    elif col == 'Ampl':
                        varmin = 0.001  # df.loc[comp]['AmpMin']
                        varmax = df.loc[comp]['AmpMax']
                        vary = sp.invert(df.loc[comp]['Lock'][2])
                    else:
                        varmin = None
                        varmax = None
                    p.add(name, value, min=varmin, max=varmax, vary=vary)
    return p
Beispiel #2
0
def load_exps():
    _FEATURES = None
    exp_data = dict()
    exp_labs = dict()
    for mpath in mat_paths:
        exp = format_exp(mpath)
        matobj = sio.loadmat(mpath)
        feats = format_featnames(matobj)
        data = format_data(matobj)
        mask = sp.asarray([sp.invert(has_complex(arr)) for arr in data])
        feats = feats[mask]
        order = sp.argsort(feats)
        feats = [f.lower() for f in feats[order]]
        data = data[mask][order].real
        labels = format_labels(matobj)
        if _FEATURES is None:
            _FEATURES = feats
        assert feats == _FEATURES, 'Features do not match! %s\n%s' % (
            ' '.join(feats), ' '.join(_FEATURES))
        assert data.shape[1] == len(
            labels), 'Labels (%d) do not match time points (%d)' % (
                len(labels), data.shape[1])
        exp_data[exp] = data
        exp_labs[exp] = labels
    return exp_data, exp_labs, _FEATURES
Beispiel #3
0
def load_params(indf):
    df = indf.copy()
    p = lf.Parameters()
    p.clear()
    df.Pos += df['Line center']
    df.set_value('Contin', 'Identifier', 'x')
    for comp in df.index:
        print comp
        if comp == 'Contin':
            varval = df.ix[comp]['Ampl'] + .0001
            p.add('Contin_Ampl', value=varval, min=0.,
                  vary=sp.invert(df.loc[comp]['Lock']))
            continue
        else:
            for col in df.columns:
                if col in ['Ampl', 'Sigma', 'Pos', 'Gamma',]:
                    #name = df.ix[comp]['Identifier']+'_'+col
                    name = comp + '_' + col
                    value = df.ix[comp][col]
                    if col == 'Pos':
                        varmin = value - 10
                        varmax = value + 10
                        vary = sp.invert(df.loc[comp]['Lock'][0])
                    elif col == 'Sigma':
                        varmin = 1e-8
                        varmax = 1e8
                        vary = sp.invert(df.loc[comp]['Lock'][1])
                    elif col == 'Ampl':
                        varmin = 0.
                        varmax = None
                        vary = sp.invert(df.loc[comp]['Lock'][2])
                    else:
                        varmin = None
                        varmax = None
                    p.add(name, value, min=varmin, max=varmax, vary=vary)
    return p
Beispiel #4
0
 def fit(self, X, y):
     num_tasks = X.shape[0]
     X = np.array(X)
     y = np.array(y)
     X = [item for task in X for item in task]
     sample_per_task = [task.shape[0] for task in X]
     K = self.kernel.compute(X, X)
     Q = np.multiply(np.multiply(X, y), y.T)
     delta = np.repeat(np.array(range(0, num_tasks)), sample_per_task)
     for i in delta:
         for j in delta:
             if i == j:
                 Q[i, j] = Q[i, j] * (1 + num_tasks / self.mu + 1)
     alpha = scipy.invert(Q) * np.ones(y.shape[0])
     pass
Beispiel #5
0
def tri_flat(array):
    R = array.shape[0]
    mask = SP.asarray(SP.invert(SP.tri(R, R, dtype=bool)), dtype=float)
    x, y = mask.nonzero()
    return array[x, y]
Beispiel #6
0
def herm_sqrt_inv(x,
                  zero_tol=1E-15,
                  sanity_checks=False,
                  return_rank=False,
                  sc_data=''):
    if isinstance(x, mm.eyemat):
        x_sqrt = x
        x_sqrt_i = x
        rank = x.shape[0]
    else:
        try:
            ev = x.diag  #simple_diag_matrix
            EV = None
        except AttributeError:
            ev, EV = la.eigh(x)

        zeros = ev <= zero_tol  #throw away negative results too!

        ev_sqrt = sp.sqrt(ev)

        err = sp.seterr(divide='ignore', invalid='ignore')
        try:
            ev_sqrt_i = 1 / ev_sqrt
            ev_sqrt[zeros] = 0
            ev_sqrt_i[zeros] = 0
        finally:
            sp.seterr(divide=err['divide'], invalid=err['invalid'])

        if EV is None:
            x_sqrt = mm.simple_diag_matrix(ev_sqrt, dtype=x.dtype)
            x_sqrt_i = mm.simple_diag_matrix(ev_sqrt_i, dtype=x.dtype)
        else:
            B = mm.mmul_diag(ev_sqrt, EV.conj().T)
            x_sqrt = EV.dot(B)

            B = mm.mmul_diag(ev_sqrt_i, EV.conj().T)
            x_sqrt_i = EV.dot(B)

        rank = x.shape[0] - np.count_nonzero(zeros)

        if sanity_checks:
            if ev.min() < -zero_tol:
                log.warning(
                    "Sanity Fail in herm_sqrt_inv(): Throwing away negative eigenvalues! %s %s",
                    ev.min(), sc_data)

            if not np.allclose(x_sqrt.dot(x_sqrt), x):
                log.warning(
                    "Sanity Fail in herm_sqrt_inv(): x_sqrt is bad! %s %s",
                    la.norm(x_sqrt.dot(x_sqrt) - x), sc_data)

            if EV is None:
                nulls = sp.zeros(x.shape[0])
                nulls[zeros] = 1
                nulls = sp.diag(nulls)
            else:  #if we did an EVD then we use the eigenvectors
                nulls = EV.copy()
                nulls[:, sp.invert(zeros)] = 0
                nulls = nulls.dot(nulls.conj().T)

            eye = np.eye(x.shape[0])
            if not np.allclose(x_sqrt.dot(x_sqrt_i), eye - nulls):
                log.warning(
                    "Sanity Fail in herm_sqrt_inv(): x_sqrt_i is bad! %s %s",
                    la.norm(x_sqrt.dot(x_sqrt_i) - eye + nulls), sc_data)

    if return_rank:
        return x_sqrt, x_sqrt_i, rank
    else:
        return x_sqrt, x_sqrt_i
Beispiel #7
0
    matx_active = signs * matx[:, active]

    u, s, vh = scipy.linalg.svd(matx_active, full_matrices=False)
    matg = vh.T @ scipy.diag(s**2) @ vh
    matg_inv = vh.T @ scipy.diag(scipy.reciprocal(s**2)) @ vh
    vec1 = scipy.ones(len(active))
    scalara = (matg_inv.sum())**(-.5)

    vecw = scalara * matg_inv.sum(axis=1)
    vecw = np.reshape(vecw, (vecw.shape[0], 1))
    vecu = matx_active @ vecw

    veca = matx.T @ vecu

    if k < size_predictor - 1:
        inactive = indices_predictor[scipy.invert(mask_maxc)]
        arr_gamma = scipy.concatenate([(maxc - np.take(vecc, inactive)) /
                                       (scalara - np.take(veca, inactive)),
                                       (maxc + np.take(vecc, inactive)) /
                                       (scalara + np.take(veca, inactive))
                                       ]).ravel()
        scalargamma = arr_gamma[arr_gamma > 0].min()
    else:
        scalargamma = maxc / scalara

    vecy_fitted += (scalargamma * vecu).T
    betas[active] += scalargamma * signs
    beta_lars.append(list(betas))

fig = plt.figure(figsize=(20, 20))
ax = fig.add_subplot(1, 1, 1)
Beispiel #8
0
def herm_sqrt_inv(x, zero_tol=1E-15, sanity_checks=False, return_rank=False, sc_data=''):
    if isinstance(x,  mm.eyemat):
        x_sqrt = x
        x_sqrt_i = x
        rank = x.shape[0]
    else:
        try:
            ev = x.diag #simple_diag_matrix
            EV = None
        except AttributeError:
            ev, EV = la.eigh(x)
        
        zeros = ev <= zero_tol #throw away negative results too!
        
        ev_sqrt = sp.sqrt(ev)
        
        err = sp.seterr(divide='ignore', invalid='ignore')
        try:
            ev_sqrt_i = 1 / ev_sqrt
            ev_sqrt[zeros] = 0
            ev_sqrt_i[zeros] = 0
        finally:
            sp.seterr(divide=err['divide'], invalid=err['invalid'])
        
        if EV is None:
            x_sqrt = mm.simple_diag_matrix(ev_sqrt, dtype=x.dtype)
            x_sqrt_i = mm.simple_diag_matrix(ev_sqrt_i, dtype=x.dtype)
        else:
            B = mm.mmul_diag(ev_sqrt, EV.conj().T)
            x_sqrt = EV.dot(B)
            
            B = mm.mmul_diag(ev_sqrt_i, EV.conj().T)
            x_sqrt_i = EV.dot(B)
            
        rank = x.shape[0] - np.count_nonzero(zeros)
        
        if sanity_checks:
            if ev.min() < -zero_tol:
                log.warning("Sanity Fail in herm_sqrt_inv(): Throwing away negative eigenvalues! %s %s",
                            ev.min(), sc_data)
            
            if not np.allclose(x_sqrt.dot(x_sqrt), x):
                log.warning("Sanity Fail in herm_sqrt_inv(): x_sqrt is bad! %s %s",
                            la.norm(x_sqrt.dot(x_sqrt) - x), sc_data)
            
            if EV is None: 
                nulls = sp.zeros(x.shape[0])
                nulls[zeros] = 1
                nulls = sp.diag(nulls)
            else: #if we did an EVD then we use the eigenvectors
                nulls = EV.copy()
                nulls[:, sp.invert(zeros)] = 0
                nulls = nulls.dot(nulls.conj().T)
                
            eye = np.eye(x.shape[0])
            if not np.allclose(x_sqrt.dot(x_sqrt_i), eye - nulls):
                log.warning("Sanity Fail in herm_sqrt_inv(): x_sqrt_i is bad! %s %s",
                            la.norm(x_sqrt.dot(x_sqrt_i) - eye + nulls), sc_data)
    
    if return_rank:
        return x_sqrt, x_sqrt_i, rank
    else:
        return x_sqrt, x_sqrt_i
def main():
    # parse cmd arguments
    parser = getParser()
    parser.parse_args()
    args = getArguments(parser)

    # prepare logger
    logger = Logger.getInstance()
    if args.debug:
        logger.setLevel(logging.DEBUG)
    elif args.verbose:
        logger.setLevel(logging.INFO)

    logger.info("Selected viscous type is {}".format(args.type))

    # iterate over input images
    for image in args.images:

        # get and prepare image data
        logger.info("Loading image {} using NiBabel...".format(image))
        image_gradient = load(image)

        # get and prepare image data
        image_gradient_data = scipy.squeeze(image_gradient.get_data())

        logger.debug(
            "Intensity range of gradient image is ({}, {})".format(image_gradient_data.min(), image_gradient_data.max())
        )

        # build output file name and check for its existence, if not in sections mode
        if "sections" != args.type:
            # build output file name
            image_viscous_name = (
                args.folder
                + "/"
                + image.split("/")[-1][:-4]
                + "_viscous_{}_sec_{}_ds_{}".format(args.type, args.sections, args.dsize)
            )
            image_viscous_name += image.split("/")[-1][-4:]

            # check if output file exists
            if not args.force:
                if os.path.exists(image_viscous_name):
                    logger.warning("The output file {} already exists. Skipping this image.".format(image_viscous_name))
                    continue

        # execute plain closing i.e. a closing operation over the whole image, if in plain mode
        if "plain" == args.type:
            # prepare the disc structure (a ball with a diameter of (args.dsize * 2 + 1))
            disc = iterate_structure(generate_binary_structure(3, 1), args.dsize).astype(scipy.int_)

            # apply closing
            logger.info("Applying the morphology over whole image at once...")
            image_viscous_data = grey_closing(image_gradient_data, footprint=disc)

            # save resulting gradient image
            logger.info("Saving resulting gradient image as {}...".format(image_viscous_name))
            image_viscous = image_like(image_viscous_data, image_gradient)
            save(image_viscous, image_viscous_name)

            # skip other morphologies
            continue

        # create gradient images flattened histogram
        bins = hist_flatened(image_gradient_data, args.sections)
        logger.debug("{} bins created".format(len(bins) - 1))

        # check if the number of bins is consistent
        if args.sections != len(bins) - 1:
            raise Exception(
                "Inconsistency between the number of requested and created bins ({} to {})".format(
                    args.sections, len(bins) - 1
                )
            )

        # prepare result file
        image_viscous_data = image_gradient_data

        # transform the gradient images topography (Note: the content of one bin is: bins[slice - 1] <= content < bins[slice]
        logger.info("Applying the viscous morphological operations {} times...".format(args.sections))
        for slice in range(1, args.sections + 1):

            # build output file name and check for its existence, if in sections mode
            if "sections" == args.type:
                # build output file name
                image_viscous_name = (
                    args.folder
                    + "/"
                    + image.split("/")[-1][:-4]
                    + "_viscous_{}_sec_{}_ds_{}_sl_{}".format(args.type, args.sections, args.dsize, slice)
                )
                image_viscous_name += image.split("/")[-1][-4:]

                # check if output file exists
                if not args.force:
                    if os.path.exists(image_viscous_name):
                        logger.warning(
                            "The output file {} already exists. Skipping this slice.".format(image_viscous_name)
                        )
                        continue

                # prepare result file
                image_viscous_data = image_gradient_data

            # create masks to extract the affected voxels (i.e. the current slice of the topographic image representation)
            mask_greater = image_gradient_data >= bins[slice]  # all voxels with are over the current slice
            mask_lower = image_gradient_data < bins[slice - 1]  # all voxels which are under the current slice
            mask_equal = scipy.invert(mask_greater | mask_lower)  # all voxels in the current slice
            if "mercury" == args.type:
                dsize = int((args.dsize / float(args.sections)) * (slice))
                disc = iterate_structure(generate_binary_structure(3, 1), dsize).astype(scipy.int_)
                mask_equal_or_greater = mask_equal | mask_greater
                image_threshold_data = image_gradient_data * mask_equal_or_greater
            elif "oil" == args.type:
                dsize = int((args.dsize / float(args.sections)) * (args.sections - slice + 1))
                disc = iterate_structure(generate_binary_structure(3, 1), dsize).astype(scipy.int_)
                image_threshold_data = image_gradient_data.copy()
                mask_equal_or_lower = mask_equal | mask_lower
                # set all voxels over the current slice to the max of all voxels in the current slice
                image_threshold_data[mask_greater] = image_threshold_data[mask_equal_or_lower].max()
            elif "sections" == args.type:
                dsize = args.dsize
                disc = iterate_structure(generate_binary_structure(3, 1), args.dsize).astype(scipy.int_)
                image_threshold_data = image_gradient_data.copy()
                # set all voxels under the current slice to zero
                image_threshold_data[mask_lower] = 0
                # set all voxels over the current slice to the max of all voxels in the current slice
                image_threshold_data[mask_greater] = image_threshold_data[mask_equal].max()

            logger.debug(
                "{} of {} voxels belong to this level.".format(
                    len(mask_equal.nonzero()[0]), scipy.prod(image_threshold_data.shape)
                )
            )

            # apply the closing with the appropriate disc size
            logger.debug(
                "Applying a disk of {} to all values >= {} and < {}...".format(dsize, bins[slice - 1], bins[slice])
            )
            image_closed_data = grey_closing(image_threshold_data, footprint=disc)

            # add result of this slice to the general results
            image_viscous_data = scipy.maximum(image_viscous_data, image_closed_data)

            # save created output file, if in sections mode
            if "sections" == args.type:
                # save resulting gradient image
                logger.info("Saving resulting gradient image as {}...".format(image_viscous_name))
                image_viscous = image_like(image_viscous_data, image_gradient)
                save(image_viscous, image_viscous_name)

        # save created output file, if not in sections mode
        if "sections" != args.type:
            # save resulting gradient image
            logger.info("Saving resulting gradient image as {}...".format(image_viscous_name))
            image_viscous = image_like(image_viscous_data, image_gradient)
            save(image_viscous, image_viscous_name)

    logger.info("Successfully terminated.")
Beispiel #10
0
def tri_flat(array):
    R = array.shape[0]
    mask = SP.asarray(SP.invert(SP.tri(R, R, dtype=bool)), dtype=float)
    x, y = mask.nonzero()
    return array[x, y]
def main():
    # parse cmd arguments
    parser = getParser()
    parser.parse_args()
    args = getArguments(parser)

    # prepare logger
    logger = Logger.getInstance()
    if args.debug: logger.setLevel(logging.DEBUG)
    elif args.verbose: logger.setLevel(logging.INFO)

    logger.info(
        'Executing weighted viscous morphology with {} ({} bins).'.format(
            ','.join(map(str, args.func)), len(args.func)))

    # iterate over input images
    for image in args.images:

        # build output file name
        image_viscous_name = args.folder + '/' + image.split(
            '/')[-1][:-4] + '_wviscous_' + '_'.join(map(str, args.func))
        image_viscous_name += image.split('/')[-1][-4:]

        # check if output file exists
        if not args.force:
            if os.path.exists(image_viscous_name):
                logger.warning(
                    'The output file {} already exists. Skipping this image.'.
                    format(image_viscous_name))
                continue

        # get and prepare image data
        logger.info('Loading image {} using NiBabel...'.format(image))
        image_gradient = load(image)

        # get and prepare image data
        image_gradient_data = scipy.squeeze(image_gradient.get_data())

        # prepare result image and extract required attributes of input image
        if args.debug:
            logger.debug(
                'Intensity range of gradient image is ({}, {})'.format(
                    image_gradient_data.min(), image_gradient_data.max()))

        # create gradient images flattened histogram
        bins = hist_flatened(image_gradient_data, len(args.func))
        logger.debug('{} bins created'.format(len(bins) - 1))

        # check if the number of bins is consistent
        if len(args.func) != len(bins) - 1:
            raise Exception(
                'Inconsistency between the number of requested and created bins ({} to {})'
                .format(args.sections,
                        len(bins) - 1))

        # prepare result file
        image_viscous_data = image_gradient_data

        # transform the gradient images topography
        logger.info(
            'Applying the viscous morphological operations on {} sections...'.
            format(len(args.func)))
        for sl in range(1, len(args.func) + 1):

            # create sphere to use in this step
            if 0 >= args.func[sl - 1]:
                continue  # sphere of sizes 0 or below lead to no changes and are not executed
            sphere = iterate_structure(generate_binary_structure(3, 1),
                                       args.func[sl - 1]).astype(scipy.int_)

            # create masks to extract the affected voxels (i.e. the current slice of the topographic image representation)
            mask_greater = (image_gradient_data >= bins[sl]
                            )  # all voxels with are over the current slice
            mask_lower = (image_gradient_data < bins[sl - 1]
                          )  # all voxels which are under the current slice
            mask_equal = scipy.invert(
                mask_greater | mask_lower)  # all voxels in the current slice

            # extract slice
            image_threshold_data = image_gradient_data.copy()
            image_threshold_data[
                mask_lower] = 0  # set all voxels under the current slice to zero
            image_threshold_data[mask_greater] = image_threshold_data[
                mask_equal].max(
                )  # set all voxels over the current slice to the max of all voxels in the current slice

            logger.debug('{} of {} voxels belong to this level.'.format(
                len(mask_equal.nonzero()[0]),
                scipy.prod(image_threshold_data.shape)))

            # apply the closing with the appropriate sphere
            logger.debug(
                'Applying a disk of {} to all values >= {} and < {} (sec {})...'
                .format(args.func[sl - 1], bins[sl - 1], bins[sl], sl))
            image_closed_data = grey_closing(image_threshold_data,
                                             footprint=sphere)

            # add result of this slice to the general results
            image_viscous_data = scipy.maximum(image_viscous_data,
                                               image_closed_data)

        # save resulting gradient image
        logger.info('Saving resulting gradient image as {}...'.format(
            image_viscous_name))
        image_viscous = image_like(image_viscous_data, image_gradient)
        save(image_viscous, image_viscous_name)

    logger.info('Successfully terminated.')
Beispiel #12
0
def main():
    # parse cmd arguments
    parser = getParser()
    parser.parse_args()
    args = getArguments(parser)

    # prepare logger
    logger = Logger.getInstance()
    if args.debug: logger.setLevel(logging.DEBUG)
    elif args.verbose: logger.setLevel(logging.INFO)

    logger.info('Selected viscous type is {}'.format(args.type))

    # iterate over input images
    for image in args.images:

        # get and prepare image data
        logger.info('Loading image {} using NiBabel...'.format(image))
        image_gradient = load(image)

        # get and prepare image data
        image_gradient_data = scipy.squeeze(image_gradient.get_data())

        logger.debug('Intensity range of gradient image is ({}, {})'.format(
            image_gradient_data.min(), image_gradient_data.max()))

        # build output file name and check for its existence, if not in sections mode
        if 'sections' != args.type:
            # build output file name
            image_viscous_name = args.folder + '/' + image.split(
                '/')[-1][:-4] + '_viscous_{}_sec_{}_ds_{}'.format(
                    args.type, args.sections, args.dsize)
            image_viscous_name += image.split('/')[-1][-4:]

            # check if output file exists
            if not args.force:
                if os.path.exists(image_viscous_name):
                    logger.warning(
                        'The output file {} already exists. Skipping this image.'
                        .format(image_viscous_name))
                    continue

        # execute plain closing i.e. a closing operation over the whole image, if in plain mode
        if 'plain' == args.type:
            # prepare the disc structure (a ball with a diameter of (args.dsize * 2 + 1))
            disc = iterate_structure(generate_binary_structure(3, 1),
                                     args.dsize).astype(scipy.int_)

            # apply closing
            logger.info('Applying the morphology over whole image at once...')
            image_viscous_data = grey_closing(image_gradient_data,
                                              footprint=disc)

            # save resulting gradient image
            logger.info('Saving resulting gradient image as {}...'.format(
                image_viscous_name))
            image_viscous = image_like(image_viscous_data, image_gradient)
            save(image_viscous, image_viscous_name)

            # skip other morphologies
            continue

        # create gradient images flattened histogram
        bins = hist_flatened(image_gradient_data, args.sections)
        logger.debug('{} bins created'.format(len(bins) - 1))

        # check if the number of bins is consistent
        if args.sections != len(bins) - 1:
            raise Exception(
                'Inconsistency between the number of requested and created bins ({} to {})'
                .format(args.sections,
                        len(bins) - 1))

        # prepare result file
        image_viscous_data = image_gradient_data

        # transform the gradient images topography (Note: the content of one bin is: bins[slice - 1] <= content < bins[slice]
        logger.info(
            'Applying the viscous morphological operations {} times...'.format(
                args.sections))
        for slice in range(1, args.sections + 1):

            # build output file name and check for its existence, if in sections mode
            if 'sections' == args.type:
                # build output file name
                image_viscous_name = args.folder + '/' + image.split(
                    '/')[-1][:-4] + '_viscous_{}_sec_{}_ds_{}_sl_{}'.format(
                        args.type, args.sections, args.dsize, slice)
                image_viscous_name += image.split('/')[-1][-4:]

                # check if output file exists
                if not args.force:
                    if os.path.exists(image_viscous_name):
                        logger.warning(
                            'The output file {} already exists. Skipping this slice.'
                            .format(image_viscous_name))
                        continue

                # prepare result file
                image_viscous_data = image_gradient_data

            # create masks to extract the affected voxels (i.e. the current slice of the topographic image representation)
            mask_greater = (image_gradient_data >= bins[slice]
                            )  # all voxels with are over the current slice
            mask_lower = (image_gradient_data < bins[slice - 1]
                          )  # all voxels which are under the current slice
            mask_equal = scipy.invert(
                mask_greater | mask_lower)  # all voxels in the current slice
            if 'mercury' == args.type:
                dsize = int((args.dsize / float(args.sections)) * (slice))
                disc = iterate_structure(generate_binary_structure(3, 1),
                                         dsize).astype(scipy.int_)
                mask_equal_or_greater = mask_equal | mask_greater
                image_threshold_data = image_gradient_data * mask_equal_or_greater
            elif 'oil' == args.type:
                dsize = int((args.dsize / float(args.sections)) *
                            (args.sections - slice + 1))
                disc = iterate_structure(generate_binary_structure(3, 1),
                                         dsize).astype(scipy.int_)
                image_threshold_data = image_gradient_data.copy()
                mask_equal_or_lower = mask_equal | mask_lower
                # set all voxels over the current slice to the max of all voxels in the current slice
                image_threshold_data[mask_greater] = image_threshold_data[
                    mask_equal_or_lower].max()
            elif 'sections' == args.type:
                dsize = args.dsize
                disc = iterate_structure(generate_binary_structure(3, 1),
                                         args.dsize).astype(scipy.int_)
                image_threshold_data = image_gradient_data.copy()
                # set all voxels under the current slice to zero
                image_threshold_data[mask_lower] = 0
                # set all voxels over the current slice to the max of all voxels in the current slice
                image_threshold_data[mask_greater] = image_threshold_data[
                    mask_equal].max()

            logger.debug('{} of {} voxels belong to this level.'.format(
                len(mask_equal.nonzero()[0]),
                scipy.prod(image_threshold_data.shape)))

            # apply the closing with the appropriate disc size
            logger.debug(
                'Applying a disk of {} to all values >= {} and < {}...'.format(
                    dsize, bins[slice - 1], bins[slice]))
            image_closed_data = grey_closing(image_threshold_data,
                                             footprint=disc)

            # add result of this slice to the general results
            image_viscous_data = scipy.maximum(image_viscous_data,
                                               image_closed_data)

            # save created output file, if in sections mode
            if 'sections' == args.type:
                # save resulting gradient image
                logger.info('Saving resulting gradient image as {}...'.format(
                    image_viscous_name))
                image_viscous = image_like(image_viscous_data, image_gradient)
                save(image_viscous, image_viscous_name)

        # save created output file, if not in sections mode
        if 'sections' != args.type:
            # save resulting gradient image
            logger.info('Saving resulting gradient image as {}...'.format(
                image_viscous_name))
            image_viscous = image_like(image_viscous_data, image_gradient)
            save(image_viscous, image_viscous_name)

    logger.info('Successfully terminated.')
def main():
    # parse cmd arguments
    parser = getParser()
    parser.parse_args()
    args = getArguments(parser)
    
    # prepare logger
    logger = Logger.getInstance()
    if args.debug: logger.setLevel(logging.DEBUG)
    elif args.verbose: logger.setLevel(logging.INFO)
    
    logger.info('Executing weighted viscous morphology with {} ({} bins).'.format(','.join(map(str, args.func)), len(args.func)))
        
    # iterate over input images
    for image in args.images:
        
        # build output file name
        image_viscous_name = args.folder + '/' + image.split('/')[-1][:-4] + '_wviscous_' + '_'.join(map(str, args.func))
        image_viscous_name += image.split('/')[-1][-4:]
        
        # check if output file exists
        if not args.force:
            if os.path.exists(image_viscous_name):
                logger.warning('The output file {} already exists. Skipping this image.'.format(image_viscous_name))
                continue
        
        # get and prepare image data
        logger.info('Loading image {} using NiBabel...'.format(image))
        image_gradient = load(image)
        
        # get and prepare image data
        image_gradient_data = scipy.squeeze(image_gradient.get_data())
        
        # prepare result image and extract required attributes of input image
        if args.debug:
            logger.debug('Intensity range of gradient image is ({}, {})'.format(image_gradient_data.min(), image_gradient_data.max()))
        
        # create gradient images flattened histogram
        bins = hist_flatened(image_gradient_data, len(args.func))
        logger.debug('{} bins created'.format(len(bins) -1))
        
        # check if the number of bins is consistent
        if len(args.func) != len(bins) - 1:
            raise Exception('Inconsistency between the number of requested and created bins ({} to {})'.format(args.sections, len(bins) - 1))
        
        # prepare result file
        image_viscous_data = image_gradient_data
        
        # transform the gradient images topography
        logger.info('Applying the viscous morphological operations on {} sections...'.format(len(args.func)))
        for sl in range(1, len(args.func) + 1):
            
            # create sphere to use in this step
            if 0 >= args.func[sl - 1]: continue # sphere of sizes 0 or below lead to no changes and are not executed
            sphere = iterate_structure(generate_binary_structure(3, 1), args.func[sl - 1]).astype(scipy.int_)
            
            # create masks to extract the affected voxels (i.e. the current slice of the topographic image representation)
            mask_greater = (image_gradient_data >= bins[sl]) # all voxels with are over the current slice
            mask_lower = (image_gradient_data < bins[sl - 1]) # all voxels which are under the current slice
            mask_equal = scipy.invert(mask_greater | mask_lower) # all voxels in the current slice
            
            # extract slice
            image_threshold_data = image_gradient_data.copy()
            image_threshold_data[mask_lower] = 0 # set all voxels under the current slice to zero
            image_threshold_data[mask_greater] = image_threshold_data[mask_equal].max() # set all voxels over the current slice to the max of all voxels in the current slice
            
            logger.debug('{} of {} voxels belong to this level.'.format(len(mask_equal.nonzero()[0]), scipy.prod(image_threshold_data.shape)))            
            
            # apply the closing with the appropriate sphere
            logger.debug('Applying a disk of {} to all values >= {} and < {} (sec {})...'.format(args.func[sl - 1], bins[sl - 1],  bins[sl], sl))
            image_closed_data = grey_closing(image_threshold_data, footprint=sphere)
            
            # add result of this slice to the general results
            image_viscous_data = scipy.maximum(image_viscous_data, image_closed_data)
                    
        # save resulting gradient image
        logger.info('Saving resulting gradient image as {}...'.format(image_viscous_name))
        image_viscous = image_like(image_viscous_data, image_gradient)
        save(image_viscous, image_viscous_name)
            
    logger.info('Successfully terminated.')