def run_compute_feature_images(yamlfile):

    ipl = IPL(yaml=yamlfile)

    ipl.set_indent(1)

    params = rdict(data=ipl.get_params())
    ipl.startlogger(filename=params['resultfolder'] + 'compute_feature_images.log', type='w', name='ComputeFeatureImages')

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'compute_feature_images.parameters.yml')

        # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        compute_feature_images(ipl)

        # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
Esempio n. 2
0
def run_paths_of_merges(yamlfile, logging=True):

    ipl = IPL(yaml=yamlfile)

    ipl.set_indent(1)

    params = rdict(data=ipl.get_params())
    if logging:
        ipl.startlogger(filename=params['resultfolder'] +
                        'paths_of_merges.log',
                        type='w',
                        name='PathsOfMerges')
    else:
        ipl.startlogger()

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'paths_of_merges.parameters.yml')

        # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        paths_of_merges(ipl, params['debug'])

        # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
def run_find_border_contacts(yamlfile, logging=True):

    ipl = IPL(yaml=yamlfile)

    ipl.set_indent(1)

    params = rdict(data=ipl.get_params())
    if logging:
        ipl.startlogger(filename=params['resultfolder'] + 'find_border_contacts.log', type='w', name='FindBorderContacts')
    else:
        ipl.startlogger()

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'find_border_contacts.parameters.yml')

        # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        find_border_contacts(ipl)

        # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
Esempio n. 4
0
def run_random_forest(yamlfile,
                      logging=True,
                      make_only_feature_array=False,
                      debug=False,
                      write=True):

    ipl = IPL(yaml=yamlfile)

    ipl.set_indent(1)

    params = rdict(data=ipl.get_params())
    if logging:
        ipl.startlogger(filename=params['resultfolder'] + 'random_forest.log',
                        type='w',
                        name='RandomForest')
    else:
        ipl.startlogger()

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml')

        # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        if make_only_feature_array:
            make_feature_arrays(ipl)
        else:
            result = IPL()
            result['result'], result['evaluation'] = random_forest(ipl,
                                                                   debug=debug)

            # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

            if write:
                result.write(filepath=params['resultfolder'] +
                             params['resultsfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:
        ipl.errout('Unexpected error')
def run_remove_small_objects(yamlfile):

    ipl = IPL(
        yaml=yamlfile,
        yamlspec={'path': 'datafolder', 'filename': 'labelsfile', 'skeys': 'labelsname'},
        recursive_search=True,
        nodata=True
    )

    # Set indentation of the logging
    ipl.set_indent(1)

    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='w', name='RemoveSmallObjects')

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        remove_small_objects(ipl)

        ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
def run_random_forest(yamlfile, logging=True, make_only_feature_array=False, debug=False, write=True):

    ipl = IPL(yaml=yamlfile)

    ipl.set_indent(1)

    params = rdict(data=ipl.get_params())
    if logging:
        ipl.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w', name='RandomForest')
    else:
        ipl.startlogger()

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml')

        # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        if make_only_feature_array:
            make_feature_arrays(ipl)
        else:
            result = IPL()
            result['result'], result['evaluation'] = random_forest(ipl, debug=debug)

            # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

            if write:
                result.write(filepath=params['resultfolder'] + params['resultsfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:
        ipl.errout('Unexpected error')
Esempio n. 7
0
def run_remove_small_objects(yamlfile):

    ipl = IPL(yaml=yamlfile,
              yamlspec={
                  'path': 'datafolder',
                  'filename': 'labelsfile',
                  'skeys': 'labelsname'
              },
              recursive_search=True,
              nodata=True)

    # Set indentation of the logging
    ipl.set_indent(1)

    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder'] +
                    'remove_small_objects.log',
                    type='w',
                    name='RemoveSmallObjects')

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')

        ipl.logging('\nipl datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        remove_small_objects(ipl)

        ipl.logging('\nFinal datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
    #     tkeys='true',
    #     castkey=None
    # )
    # params = hfp.get_params()
    # hfp.logging('params = {}', params)
    # hfp.data_from_file(
    #     filepath=params['intermedfolder'] + params['pathsfalsefile'],
    #     tkeys='false',
    #     castkey=None
    # )
    hfp.startlogger(filename=params["intermedfolder"] + "features_of_paths.log", type="a")

    try:

        hfp.code2log(inspect.stack()[0][1])
        hfp.logging("")
        hfp.yaml2log()
        hfp.logging("")

        hfp.logging("\nhfp datastructure: \n\n{}", hfp.datastructure2string(maxdepth=1))

        # Done: Iterate over paths and accumulate features
        # Done: Implement data iterator

        # Done: Make path image (true)
        # Done: Make path image (false)
        # TODO: Get topological features
        # Done:     Topological feature: Length
        # TODO:     Topological feature: Statistics on curvature
        # TODO: Get data features on path (raw, probabilities, distance transform)
        # TODO: Get data features on end points (raw, probabilities, distance transform)
                       'filename': 'featurefile'
                   })
    params = features.get_params()
    thisparams = params['random_forest']
    features.startlogger(filename=params['resultfolder'] + 'random_forest.log',
                         type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'random_forest.parameters.yml')
        # Write script and parameters to the logfile
        features.code2log(inspect.stack()[0][1])
        features.logging('')
        features.yaml2log()
        features.logging('')

        features.logging('\nfeatures datastructure: \n---\n{}',
                         features.datastructure2string(maxdepth=2))

        result = random_forest_iteration(features)

        result.write(filepath=params['intermedfolder'] +
                     params['randforestfile'])

        features.logging('\nFinal dictionary structure:\n---\n{}',
                         features.datastructure2string(maxdepth=2))
        features.logging('')
        features.stoplogger()
Esempio n. 10
0
from hdf5_image_processing import Hdf5ImageProcessingLib as IPL
import os
import numpy as np

__author__ = 'jhennies'

if __name__ == '__main__':

    yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml'
    ipl = IPL(yaml=yamlfile)

    ipl.logging('Parameters: {}', ipl.get_params())
    params = ipl.get_params()

    ipl.data_from_file(filepath=params['datafolder'] +
                       'cremi.splA.raw_neurons.crop.h5',
                       skeys='raw',
                       tkeys='raw')

    ipl.crop_bounding_rect(np.s_[10:110, 200:712, 200:712], keys='raw')

    ipl.write(filepath=params['datafolder'] +
              'cremi.splA.raw_neurons.crop.crop_10-200-200_110-712-712.h5')
                       skeys=params['locmaxnames'][0],
                       recursive_search=True,
                       integrate=True)
    ipl.data_from_file(params['intermedfolder'] + params['locmaxborderfile'],
                       skeys=(params['locmaxbordernames'][0], params['locmaxbordernames'][2]),
                       recursive_search=True,
                       integrate=True)

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'paths_within_labels.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        paths = paths_within_labels_image_iteration(ipl)

        paths.write(filepath=params['intermedfolder'] + params['pathstruefile'])

        ipl.logging('\nFinal dictionary structure:\n---\n{}', ipl.datastructure2string())
        ipl.logging('')
        ipl.stoplogger()

    except:
Esempio n. 12
0
              castkey=None)
    params = hfp.get_params()
    thisparams = params['localmax_on_disttransf']
    hfp.startlogger(filename=params['resultfolder'] +
                    'localmax_on_disttransf.log',
                    type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}',
                    hfp.datastructure2string(maxdepth=1))

        localmax_on_disttransf(hfp, ('disttransf', 'disttransfm'))

        hfp.write(filepath=params['intermedfolder'] + params['locmaxfile'])

        hfp.logging('\nFinal dictionary structure:\n---\n{}',
                    hfp.datastructure2string())
        hfp.logging('')
        hfp.stoplogger()
Esempio n. 13
0
    hfp.data_from_file(filepath=params['intermedfolder'] +
                       params['pathsfalsefile'],
                       tkeys='false',
                       castkey=None)

    hfp.data_from_file(filepath=params['intermedfolder'] +
                       params['locmaxfile'],
                       skeys=('disttransf', 'disttransfm'),
                       tkeys=('disttransf', 'disttransfm'))

    hfp.startlogger()

    try:

        hfp.logging('hfp datastructure:\n---\n{}---',
                    hfp.datastructure2string(maxdepth=2))

        hfp.anytask(lib.getvaluesfromcoords,
                    reciprocal=True,
                    keys='disttransfm',
                    indict=hfp['false', '6155_9552'],
                    tkeys='result_false')

        hfp.logging('hfp datastructure:\n---\n{}---',
                    hfp.datastructure2string(maxdepth=2))

        # y = []
        # maxlen = 0
        # for d, k, v, kl in hfp['result_false'].data_iterator():
        #     y.append(v)
        #     x = range(0, len(v))
Esempio n. 14
0
from hdf5_image_processing import Hdf5ImageProcessingLib as IPL
import os
import numpy as np


__author__ = 'jhennies'


if __name__ == '__main__':

    yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml'
    ipl = IPL(
        yaml=yamlfile
    )

    ipl.logging('Parameters: {}', ipl.get_params())
    params = ipl.get_params()

    ipl.data_from_file(filepath=params['datafolder'] + 'cremi.splA.raw_neurons.crop.h5',
                       skeys='raw',
                       tkeys='raw')

    ipl.crop_bounding_rect(np.s_[10:110, 200:712, 200:712], keys='raw')

    ipl.write(filepath=params['datafolder'] + 'cremi.splA.raw_neurons.crop.crop_10-200-200_110-712-712.h5')
Esempio n. 15
0
        filepath=params['intermedfolder'] + params['locmaxborderfile'],
        skeys=('disttransf', 'disttransfm'),
        tkeys=('disttransf', 'disttransfm')
    )

    hfp.data_from_file(
        filepath=params['datafolder'] + params['rawdatafile'],
        skeys=params['rawdataname'],
        tkeys='raw'
    )

    hfp.startlogger()

    try:

        hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2))

        # hfp.anytask(lib.getvaluesfromcoords,
        #     reciprocal=True,
        #     keys='disttransfm',
        #     indict=hfp['false', '6155_9552'],
        #     tkeys='result_false'
        # )

        # hfp.anytask(lib.getvaluesfromcoords,
        #             reciprocal=True,
        #             keys='disttransf',
        #             indict=hfp['true', 'border', '27'],
        #             tkeys='result_true')

        # hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2))
Esempio n. 16
0
        castkey=None
    )
    params = hfp.get_params()
    thisparams = params['find_border_contacts']
    hfp.data_from_file(params['intermedfolder'] + params['largeobjmfile'],
                       skeys=params['largeobjmnames'][0], tkeys='largeobjm')
    hfp.startlogger(filename=params['resultfolder'] + 'find_orphans.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'find_orphans.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n---\n{}', hfp.datastructure2string(maxdepth=1))

        if thisparams['return_bordercontact_images']:
            bordercontacts = find_border_contacts(hfp, ('largeobj', 'largeobjm'))
        else:
            find_border_contacts(hfp, ('largeobj', 'largeobjm'))

        hfp.write(filepath=params['intermedfolder'] + params['locmaxborderfile'])
        if thisparams['return_bordercontact_images']:
            bordercontacts.write(filepath=params['intermedfolder'] + params['bordercontactsfile'])

        hfp.logging('\nFinal hfp dictionary structure:\n---\n{}', hfp.datastructure2string())
        yaml=yamlfile,
        yamlspec={'path': 'datafolder', 'filename': 'labelsfile'},
        tkeys='labels',
        castkey=None
    )
    params = hfp.get_params()
    hfp.startlogger(filename=params['resultfolder']+'remove_small_objects.log', type='a')

    try:

        # Create folder for scripts
        if not os.path.exists(params['scriptsfolder']):
            os.makedirs(params['scriptsfolder'])
        else:
            if params['overwriteresults']:
                hfp.logging('remove_small_objects: Warning: Scriptsfolder already exists and content will be overwritten...\n')
            else:
                raise IOError('remove_small_objects: Error: Scriptsfolder already exists!')

        # Create folder for intermediate results
        if not os.path.exists(params['intermedfolder']):
            os.makedirs(params['intermedfolder'])
        else:
            if params['overwriteresults']:
                hfp.logging('remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n')
            else:
                raise IOError('remove_small_objects: Error: Intermedfolder already exists!')

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')
        recursive_search=True        
    )
    params = ipl.get_params()
    thisparams = params['find_border_contacts']
    ipl.data_from_file(params['intermedfolder'] + params['largeobjmfile'],
                       skeys=params['largeobjmnames'][0], recursive_search=True, integrate=True)
    ipl.startlogger(filename=params['resultfolder'] + 'find_border_contacts.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'find_border_contacts.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n---\n{}', ipl.datastructure2string(maxdepth=3))

        bordercontacts = find_border_contacts_image_iteration(ipl)

        ipl.write(filepath=params['intermedfolder'] + params['locmaxborderfile'])
        if thisparams['return_bordercontact_images']:
            bordercontacts.write(filepath=params['intermedfolder'] + params['bordercontactsfile'])

        ipl.logging('\nFinal ipl dictionary structure:\n---\n{}', ipl.datastructure2string())
        if thisparams['return_bordercontact_images']:
            ipl.logging('\nFinal bordercontacts dictionary structure:\n---\n{}', bordercontacts.datastructure2string())
Esempio n. 19
0
from hdf5_image_processing import Hdf5ImageProcessingLib as IPL
import processing_lib as lib

# Sample A probs
probs_a = IPL(
    filepath=
    '/mnt/localdata01/jhennies/neuraldata/cremi_2016/sample_A_train_betas/sample_A_train_mcseg_beta_0.5.h5'
)

probs_a.logging('Probs A datastructure\n---\n{}',
                probs_a.datastructure2string())

probs_a.anytask(lib.swapaxes, 0, 2)

probs_a.write(
    '/mnt/localdata01/jhennies/neuraldata/cremi_2016/sample_A_train_betas/cremi.splA.train.seg_beta_0.5.crop.h5'
)

reskeys = ('0', '1')
split_probs_a = IPL()
split_probs_a['z'] = probs_a.anytask(lib.split,
                                     2,
                                     axis=0,
                                     result_keys=reskeys,
                                     return_only=True,
                                     rtrntype=IPL)
split_probs_a['y'] = probs_a.anytask(lib.split,
                                     2,
                                     axis=1,
                                     result_keys=reskeys,
                                     return_only=True,
Esempio n. 20
0
    infiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.h5'
    ]
    outfiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5'
    ]

    for i in xrange(0, len(infiles)):

        ipl = IPL(
            filepath=infiles[i]
        )
        ipl.logging('Datastructure\n---\n{}', ipl.datastructure2string())

        ipl.crop_bounding_rect(bounds=np.s_[10:110, 200:712, 200:712])

        def shape(image):
            return image.shape
        print ipl.datastructure2string(function=shape)

        ipl_split = split_in_xyz(ipl)

        ipl_split.write(filepath=outfiles[i])

    # # Sample A
    # sample_a = IPL(
    #     filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.h5'
    # )
                       skeys=params['largeobjmnames'][0],
                       recursive_search=True,
                       integrate=True)
    ipl.startlogger(filename=params['resultfolder'] +
                    'find_border_contacts.log',
                    type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'find_border_contacts.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n---\n{}',
                    ipl.datastructure2string(maxdepth=3))

        bordercontacts = find_border_contacts_image_iteration(ipl)

        ipl.write(filepath=params['intermedfolder'] +
                  params['locmaxborderfile'])
        if thisparams['return_bordercontact_images']:
            bordercontacts.write(filepath=params['intermedfolder'] +
                                 params['bordercontactsfile'])

        ipl.logging('\nFinal ipl dictionary structure:\n---\n{}',
              recursive_search=True)
    params = ipl.get_params()
    thisparams = params['localmax_on_disttransf']
    ipl.startlogger(filename=params['resultfolder'] +
                    'localmax_on_disttransf.log',
                    type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        localmax_on_disttransf_image_iteration(ipl)

        ipl.write(filepath=params['intermedfolder'] + params['locmaxfile'])

        ipl.logging('\nFinal dictionary structure:\n---\n{}',
                    ipl.datastructure2string())
        ipl.logging('')
        ipl.stoplogger()
Esempio n. 23
0

if __name__ == "__main__":

    # cremi = IPL(filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_B_20160501.hdf')
    #
    # cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())
    #
    # images = IPL(data={
    #     'raw': cremi['volumes', 'raw'],
    #     'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
    # })
    #
    # images.logging('Datastructure:\n---\n{}', images.datastructure2string())
    #
    # images.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5')



    cremi = IPL(filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_C_20160501.hdf')

    cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())

    images = IPL(data={
        'raw': cremi['volumes', 'raw'],
        'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
    })

    images.logging('Datastructure:\n---\n{}', images.datastructure2string())

    images.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splC.raw_neurons.crop.h5')
Esempio n. 24
0
    features = IPL(
        yaml=yamlfile,
        yamlspec={'path': 'intermedfolder', 'filename': 'featurefile'}
    )
    params = features.get_params()
    thisparams = params['random_forest']
    features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml')
        # Write script and parameters to the logfile
        features.code2log(inspect.stack()[0][1])
        features.logging('')
        features.yaml2log()
        features.logging('')

        features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2))

        result = random_forest_iteration(features)

        result.write(filepath=params['intermedfolder'] + params['randforestfile'])

        features.logging('\nFinal dictionary structure:\n---\n{}', features.datastructure2string(maxdepth=2))
        features.logging('')
        features.stoplogger()

    except:
        raise
    ipl = IPL(
        yaml=yamlfile,
        yamlspec={'path': 'datafolder', 'filename': 'labelsfile'}
    )
    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder']+'remove_small_objects.log', type='a')

    try:

        # Create folder for scripts
        if not os.path.exists(params['scriptsfolder']):
            os.makedirs(params['scriptsfolder'])
        else:
            if params['overwriteresults']:
                ipl.logging('remove_small_objects: Warning: Scriptsfolder already exists and content will be overwritten...\n')
            else:
                raise IOError('remove_small_objects: Error: Scriptsfolder already exists!')

        # Create folder for intermediate results
        if not os.path.exists(params['intermedfolder']):
            os.makedirs(params['intermedfolder'])
        else:
            if params['overwriteresults']:
                ipl.logging('remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n')
            else:
                raise IOError('remove_small_objects: Error: Intermedfolder already exists!')

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')
                  'filename': 'labelsfile'
              })
    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder'] +
                    'remove_small_objects.log',
                    type='a')

    try:

        # Create folder for scripts
        if not os.path.exists(params['scriptsfolder']):
            os.makedirs(params['scriptsfolder'])
        else:
            if params['overwriteresults']:
                ipl.logging(
                    'remove_small_objects: Warning: Scriptsfolder already exists and content will be overwritten...\n'
                )
            else:
                raise IOError(
                    'remove_small_objects: Error: Scriptsfolder already exists!'
                )

        # Create folder for intermediate results
        if not os.path.exists(params['intermedfolder']):
            os.makedirs(params['intermedfolder'])
        else:
            if params['overwriteresults']:
                ipl.logging(
                    'remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n'
                )
            else:
Esempio n. 27
0
    # split_sample_a = IPL()
    # split_sample_a['z'] = sample_a.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True)
    # split_sample_a['y'] = sample_a.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True)
    # split_sample_a['x'] = sample_a.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True)
    #
    # split_sample_a = split_sample_a.switch_levels(1, 2)
    # sample_a.logging('Split sample A datastructure\n---\n{}', split_sample_a.datastructure2string())
    #
    # split_sample_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.split_xyz.h5')

    # Sample B
    sample = IPL(
        filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5'
    )

    sample.logging('Sample B datastructure\n---\n{}', sample.datastructure2string())

    reskeys = ('0', '1')
    split_sample = IPL()
    split_sample['z'] = sample.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True)
    split_sample['y'] = sample.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True)
    split_sample['x'] = sample.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True)

    split_sample = split_sample.switch_levels(1, 2)
    sample.logging('Split sample B datastructure\n---\n{}', split_sample.datastructure2string())

    split_sample.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.split_xyz.h5')

    # Sample C
    sample = IPL(
        filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splC.raw_neurons.crop.h5'
Esempio n. 28
0
    # )
    # params = hfp.get_params()
    # hfp.logging('params = {}', params)
    # hfp.data_from_file(
    #     filepath=params['intermedfolder'] + params['pathsfalsefile'],
    #     tkeys='false',
    #     castkey=None
    # )
    hfp.startlogger(filename=params['intermedfolder'] +
                    'features_of_paths.log',
                    type='a')

    try:

        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}',
                    hfp.datastructure2string(maxdepth=1))

        # Done: Iterate over paths and accumulate features
        # Done: Implement data iterator

        # Done: Make path image (true)
        # Done: Make path image (false)
        # TODO: Get topological features
        # Done:     Topological feature: Length
        # TODO:     Topological feature: Statistics on curvature
        # TODO: Get data features on path (raw, probabilities, distance transform)
    ipl.rename_layer('largeobjm', 'false')
    ipl.remove_layer('path')

    # ipl['false', 'border'] = IPL(data=ipl['largeobjm', 'border_locmax_m', 'path'])
    # ipl['false', 'locmax'] = IPL(data=ipl['largeobjm', 'locmaxm', 'path'])
    # ipl.pop('largeobjm')
    #
    # ipl.pop('pathsim')
    # ipl.pop('overlay')

    ipl.startlogger(filename=params['resultfolder']+'features_of_paths.log', type='w')

    try:

        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=4))

        # Done: Make path image (true)
        # Done: Make path image (false)
        # TODO: Get topological features
        # TODO:     Topological feature: Length (with respect to anisotropy!)
        # TODO:     Topological feature: Statistics on curvature
        # TODO: Get data features on path (raw, probabilities, distance transform)
        # TODO: Get data features on end points (raw, probabilities, distance transform)
        # Done: Cross-computation of two ImageProcessing instances

        # Done: This is total bullshit! I need to iterate over all paths and extract the region features individually!
        yaml=yamlfile,
        yamlspec={'path': 'datafolder', 'filename': 'largeobjfile', 'skeys': 'largeobjname'},
        tkeys='largeobj',
        castkey=None
    )
    params = hfp.get_params()
    hfp.startlogger(filename=params['resultfolder'] + 'merge_adjacent_objects.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'merge_adjacent_objects.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1))

        merge_adjacent_objects(hfp)

        hfp.write(filepath=params['intermedfolder'] + params['largeobjmfile'])

        hfp.logging('\nFinal dictionary structure:\n---\n{}', hfp.datastructure2string())
        hfp.logging('')
        hfp.stoplogger()

    except:
Esempio n. 31
0
if __name__ == "__main__":

    infiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.h5'
    ]
    outfiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5'
    ]

    for i in xrange(0, len(infiles)):

        ipl = IPL(filepath=infiles[i])
        ipl.logging('Datastructure\n---\n{}', ipl.datastructure2string())

        ipl.crop_bounding_rect(bounds=np.s_[10:110, 200:712, 200:712])

        def shape(image):
            return image.shape

        print ipl.datastructure2string(function=shape)

        ipl_split = split_in_xyz(ipl)

        ipl_split.write(filepath=outfiles[i])

    # # Sample A
    # sample_a = IPL(
    #     filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.h5'
Esempio n. 32
0
    # cremi = IPL(filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_B_20160501.hdf')
    #
    # cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())
    #
    # images = IPL(data={
    #     'raw': cremi['volumes', 'raw'],
    #     'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
    # })
    #
    # images.logging('Datastructure:\n---\n{}', images.datastructure2string())
    #
    # images.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5')

    cremi = IPL(
        filepath=
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_C_20160501.hdf'
    )

    cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())

    images = IPL(
        data={
            'raw': cremi['volumes', 'raw'],
            'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
        })

    images.logging('Datastructure:\n---\n{}', images.datastructure2string())

    images.write(
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splC.raw_neurons.crop.h5'
    )
Esempio n. 33
0
    # split_sample_a['z'] = sample_a.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True)
    # split_sample_a['y'] = sample_a.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True)
    # split_sample_a['x'] = sample_a.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True)
    #
    # split_sample_a = split_sample_a.switch_levels(1, 2)
    # sample_a.logging('Split sample A datastructure\n---\n{}', split_sample_a.datastructure2string())
    #
    # split_sample_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.split_xyz.h5')

    # Sample B
    sample = IPL(
        filepath=
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5'
    )

    sample.logging('Sample B datastructure\n---\n{}',
                   sample.datastructure2string())

    reskeys = ('0', '1')
    split_sample = IPL()
    split_sample['z'] = sample.anytask(lib.split,
                                       2,
                                       axis=0,
                                       result_keys=reskeys,
                                       return_only=True)
    split_sample['y'] = sample.anytask(lib.split,
                                       2,
                                       axis=1,
                                       result_keys=reskeys,
                                       return_only=True)
    split_sample['x'] = sample.anytask(lib.split,
                                       2,
              tkeys='labels',
              castkey=None)
    params = hfp.get_params()
    hfp.startlogger(filename=params['resultfolder'] +
                    'remove_small_objects.log',
                    type='a')

    try:

        # Create folder for scripts
        if not os.path.exists(params['scriptsfolder']):
            os.makedirs(params['scriptsfolder'])
        else:
            if params['overwriteresults']:
                hfp.logging(
                    'remove_small_objects: Warning: Scriptsfolder already exists and content will be overwritten...\n'
                )
            else:
                raise IOError(
                    'remove_small_objects: Error: Scriptsfolder already exists!'
                )

        # Create folder for intermediate results
        if not os.path.exists(params['intermedfolder']):
            os.makedirs(params['intermedfolder'])
        else:
            if params['overwriteresults']:
                hfp.logging(
                    'remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n'
                )
            else:
Esempio n. 35
0
from hdf5_image_processing import Hdf5ImageProcessingLib as IPL
import processing_lib as lib

# Sample A probs
probs_a = IPL(
    filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.probs_cantorV1.h5'
)

probs_a.logging('Probs A datastructure\n---\n{}', probs_a.datastructure2string())

probs_a.anytask(lib.swapaxes, 0, 2)

probs_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.h5')

reskeys = ('0', '1')
split_probs_a = IPL()
split_probs_a['z'] = probs_a.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True, rtrntype=IPL)
split_probs_a['y'] = probs_a.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True, rtrntype=IPL)
split_probs_a['x'] = probs_a.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True, rtrntype=IPL)

split_probs_a = split_probs_a.switch_levels(1, 2)
probs_a.logging('Split sample A datastructure\n---\n{}', split_probs_a.datastructure2string())

split_probs_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.split_xyz.h5')