def run_remove_small_objects(yamlfile):

    ipl = IPL(
        yaml=yamlfile,
        yamlspec={'path': 'datafolder', 'filename': 'labelsfile', 'skeys': 'labelsname'},
        recursive_search=True,
        nodata=True
    )

    # Set indentation of the logging
    ipl.set_indent(1)

    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='w', name='RemoveSmallObjects')

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        remove_small_objects(ipl)

        ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
Exemple #2
0
def split_in_xyz(ipl):

    ipl.logging('Datastructure\n---\n{}', ipl.datastructure2string())

    reskeys = ('0', '1')
    ipl_split = IPL()
    ipl_split['z'] = ipl.anytask(lib.split,
                                 2,
                                 axis=0,
                                 result_keys=reskeys,
                                 return_only=True,
                                 rtrntype=IPL)
    ipl_split['y'] = ipl.anytask(lib.split,
                                 2,
                                 axis=1,
                                 result_keys=reskeys,
                                 return_only=True,
                                 rtrntype=IPL)
    ipl_split['x'] = ipl.anytask(lib.split,
                                 2,
                                 axis=2,
                                 result_keys=reskeys,
                                 return_only=True,
                                 rtrntype=IPL)

    ipl_split = ipl_split.switch_levels(1, 2)
    ipl.logging('Split sample datastructure\n---\n{}',
                ipl_split.datastructure2string())

    return ipl_split
Exemple #3
0
def run_remove_small_objects(yamlfile):

    ipl = IPL(yaml=yamlfile,
              yamlspec={
                  'path': 'datafolder',
                  'filename': 'labelsfile',
                  'skeys': 'labelsname'
              },
              recursive_search=True,
              nodata=True)

    # Set indentation of the logging
    ipl.set_indent(1)

    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder'] +
                    'remove_small_objects.log',
                    type='w',
                    name='RemoveSmallObjects')

    try:

        # # Copy the script file and the parameters to the scriptsfolder
        # copy(inspect.stack()[0][1], params['scriptsfolder'])
        # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')

        ipl.logging('\nipl datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        remove_small_objects(ipl)

        ipl.logging('\nFinal datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
def split_in_xyz(ipl):

    ipl.logging('Datastructure\n---\n{}', ipl.datastructure2string())

    reskeys = ('0', '1')
    ipl_split = IPL()
    ipl_split['z'] = ipl.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True, rtrntype=IPL)
    ipl_split['y'] = ipl.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True, rtrntype=IPL)
    ipl_split['x'] = ipl.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True, rtrntype=IPL)

    ipl_split = ipl_split.switch_levels(1, 2)
    ipl.logging('Split sample datastructure\n---\n{}', ipl_split.datastructure2string())

    return ipl_split
    infiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.h5'
    ]
    outfiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5'
    ]

    for i in xrange(0, len(infiles)):

        ipl = IPL(
            filepath=infiles[i]
        )
        ipl.logging('Datastructure\n---\n{}', ipl.datastructure2string())

        ipl.crop_bounding_rect(bounds=np.s_[10:110, 200:712, 200:712])

        def shape(image):
            return image.shape
        print ipl.datastructure2string(function=shape)

        ipl_split = split_in_xyz(ipl)

        ipl_split.write(filepath=outfiles[i])

    # # Sample A
    # sample_a = IPL(
    #     filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.h5'
    # )
Exemple #6
0
    # split_sample_a['y'] = sample_a.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True)
    # split_sample_a['x'] = sample_a.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True)
    #
    # split_sample_a = split_sample_a.switch_levels(1, 2)
    # sample_a.logging('Split sample A datastructure\n---\n{}', split_sample_a.datastructure2string())
    #
    # split_sample_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.split_xyz.h5')

    # Sample B
    sample = IPL(
        filepath=
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5'
    )

    sample.logging('Sample B datastructure\n---\n{}',
                   sample.datastructure2string())

    reskeys = ('0', '1')
    split_sample = IPL()
    split_sample['z'] = sample.anytask(lib.split,
                                       2,
                                       axis=0,
                                       result_keys=reskeys,
                                       return_only=True)
    split_sample['y'] = sample.anytask(lib.split,
                                       2,
                                       axis=1,
                                       result_keys=reskeys,
                                       return_only=True)
    split_sample['x'] = sample.anytask(lib.split,
                                       2,
    # split_sample_a = IPL()
    # split_sample_a['z'] = sample_a.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True)
    # split_sample_a['y'] = sample_a.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True)
    # split_sample_a['x'] = sample_a.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True)
    #
    # split_sample_a = split_sample_a.switch_levels(1, 2)
    # sample_a.logging('Split sample A datastructure\n---\n{}', split_sample_a.datastructure2string())
    #
    # split_sample_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.split_xyz.h5')

    # Sample B
    sample = IPL(
        filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5'
    )

    sample.logging('Sample B datastructure\n---\n{}', sample.datastructure2string())

    reskeys = ('0', '1')
    split_sample = IPL()
    split_sample['z'] = sample.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True)
    split_sample['y'] = sample.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True)
    split_sample['x'] = sample.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True)

    split_sample = split_sample.switch_levels(1, 2)
    sample.logging('Split sample B datastructure\n---\n{}', split_sample.datastructure2string())

    split_sample.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.split_xyz.h5')

    # Sample C
    sample = IPL(
        filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splC.raw_neurons.crop.h5'
Exemple #8
0
    # cremi = IPL(filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_B_20160501.hdf')
    #
    # cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())
    #
    # images = IPL(data={
    #     'raw': cremi['volumes', 'raw'],
    #     'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
    # })
    #
    # images.logging('Datastructure:\n---\n{}', images.datastructure2string())
    #
    # images.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5')

    cremi = IPL(
        filepath=
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_C_20160501.hdf'
    )

    cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())

    images = IPL(
        data={
            'raw': cremi['volumes', 'raw'],
            'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
        })

    images.logging('Datastructure:\n---\n{}', images.datastructure2string())

    images.write(
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splC.raw_neurons.crop.h5'
    )
                       integrate=True)
    ipl.data_from_file(params['intermedfolder'] + params['locmaxborderfile'],
                       skeys=(params['locmaxbordernames'][0], params['locmaxbordernames'][2]),
                       recursive_search=True,
                       integrate=True)

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'paths_within_labels.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        paths = paths_within_labels_image_iteration(ipl)

        paths.write(filepath=params['intermedfolder'] + params['pathstruefile'])

        ipl.logging('\nFinal dictionary structure:\n---\n{}', ipl.datastructure2string())
        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
                raise IOError(
                    'remove_small_objects: Error: Intermedfolder already exists!'
                )

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'remove_small_objects.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        remove_small_objects(ipl)

        ipl.logging('\nFinal datastructure: \n\n{}',
                    ipl.datastructure2string(maxdepth=3))

        ipl.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
Exemple #11
0
        filepath=params['intermedfolder'] + params['locmaxborderfile'],
        skeys=('disttransf', 'disttransfm'),
        tkeys=('disttransf', 'disttransfm')
    )

    hfp.data_from_file(
        filepath=params['datafolder'] + params['rawdatafile'],
        skeys=params['rawdataname'],
        tkeys='raw'
    )

    hfp.startlogger()

    try:

        hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2))

        # hfp.anytask(lib.getvaluesfromcoords,
        #     reciprocal=True,
        #     keys='disttransfm',
        #     indict=hfp['false', '6155_9552'],
        #     tkeys='result_false'
        # )

        # hfp.anytask(lib.getvaluesfromcoords,
        #             reciprocal=True,
        #             keys='disttransf',
        #             indict=hfp['true', 'border', '27'],
        #             tkeys='result_true')

        # hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2))
Exemple #12
0
    hfp.data_from_file(params['intermedfolder'] + params['largeobjmfile'],
                       skeys=params['largeobjmnames'][0], tkeys='largeobjm')
    hfp.startlogger(filename=params['resultfolder'] + 'find_orphans.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'find_orphans.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n---\n{}', hfp.datastructure2string(maxdepth=1))

        if thisparams['return_bordercontact_images']:
            bordercontacts = find_border_contacts(hfp, ('largeobj', 'largeobjm'))
        else:
            find_border_contacts(hfp, ('largeobj', 'largeobjm'))

        hfp.write(filepath=params['intermedfolder'] + params['locmaxborderfile'])
        if thisparams['return_bordercontact_images']:
            bordercontacts.write(filepath=params['intermedfolder'] + params['bordercontactsfile'])

        hfp.logging('\nFinal hfp dictionary structure:\n---\n{}', hfp.datastructure2string())
        if thisparams['return_bordercontact_images']:
            hfp.logging('\nFinal bordercontacts dictionary structure:\n---\n{}', bordercontacts.datastructure2string())

        hfp.logging('')
        if not os.path.exists(params['intermedfolder']):
            os.makedirs(params['intermedfolder'])
        else:
            if params['overwriteresults']:
                hfp.logging('remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n')
            else:
                raise IOError('remove_small_objects: Error: Intermedfolder already exists!')

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1))

        remove_small_objects(hfp)

        hfp.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        hfp.logging('')
        hfp.stoplogger()

    except:

        hfp.errout('Unexpected error')

from hdf5_image_processing import Hdf5ImageProcessingLib as IPL
import processing_lib as lib

# Sample A probs
probs_a = IPL(
    filepath=
    '/mnt/localdata01/jhennies/neuraldata/cremi_2016/sample_A_train_betas/sample_A_train_mcseg_beta_0.5.h5'
)

probs_a.logging('Probs A datastructure\n---\n{}',
                probs_a.datastructure2string())

probs_a.anytask(lib.swapaxes, 0, 2)

probs_a.write(
    '/mnt/localdata01/jhennies/neuraldata/cremi_2016/sample_A_train_betas/cremi.splA.train.seg_beta_0.5.crop.h5'
)

reskeys = ('0', '1')
split_probs_a = IPL()
split_probs_a['z'] = probs_a.anytask(lib.split,
                                     2,
                                     axis=0,
                                     result_keys=reskeys,
                                     return_only=True,
                                     rtrntype=IPL)
split_probs_a['y'] = probs_a.anytask(lib.split,
                                     2,
                                     axis=1,
                                     result_keys=reskeys,
                                     return_only=True,
from hdf5_image_processing import Hdf5ImageProcessingLib as IPL
import processing_lib as lib

# Sample A probs
probs_a = IPL(
    filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.probs_cantorV1.h5'
)

probs_a.logging('Probs A datastructure\n---\n{}', probs_a.datastructure2string())

probs_a.anytask(lib.swapaxes, 0, 2)

probs_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.h5')

reskeys = ('0', '1')
split_probs_a = IPL()
split_probs_a['z'] = probs_a.anytask(lib.split, 2, axis=0, result_keys=reskeys, return_only=True, rtrntype=IPL)
split_probs_a['y'] = probs_a.anytask(lib.split, 2, axis=1, result_keys=reskeys, return_only=True, rtrntype=IPL)
split_probs_a['x'] = probs_a.anytask(lib.split, 2, axis=2, result_keys=reskeys, return_only=True, rtrntype=IPL)

split_probs_a = split_probs_a.switch_levels(1, 2)
probs_a.logging('Split sample A datastructure\n---\n{}', split_probs_a.datastructure2string())

split_probs_a.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.split_xyz.h5')
Exemple #16
0
        if not os.path.exists(params['intermedfolder']):
            os.makedirs(params['intermedfolder'])
        else:
            if params['overwriteresults']:
                hfp.logging('remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n')
            else:
                raise IOError('remove_small_objects: Error: Intermedfolder already exists!')

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1))

        remove_small_objects(hfp, 'labels')

        hfp.write(filepath=params['intermedfolder'] + params['largeobjfile'])

        hfp.logging('')
        hfp.stoplogger()

    except:

        hfp.errout('Unexpected error')

    )
    params = features.get_params()
    thisparams = params['random_forest']
    features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml')
        # Write script and parameters to the logfile
        features.code2log(inspect.stack()[0][1])
        features.logging('')
        features.yaml2log()
        features.logging('')

        features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2))

        result = random_forest_iteration(features)

        result.write(filepath=params['intermedfolder'] + params['randforestfile'])

        features.logging('\nFinal dictionary structure:\n---\n{}', features.datastructure2string(maxdepth=2))
        features.logging('')
        features.stoplogger()

    except:
        raise
        features.errout('Unexpected error')


if __name__ == "__main__":

    # cremi = IPL(filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_B_20160501.hdf')
    #
    # cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())
    #
    # images = IPL(data={
    #     'raw': cremi['volumes', 'raw'],
    #     'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
    # })
    #
    # images.logging('Datastructure:\n---\n{}', images.datastructure2string())
    #
    # images.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splB.raw_neurons.crop.h5')



    cremi = IPL(filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/sample_C_20160501.hdf')

    cremi.logging('Datastructure:\n---\n{}', cremi.datastructure2string())

    images = IPL(data={
        'raw': cremi['volumes', 'raw'],
        'neuron_ids': cremi['volumes', 'labels', 'neuron_ids']
    })

    images.logging('Datastructure:\n---\n{}', images.datastructure2string())

    images.write('/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splC.raw_neurons.crop.h5')
Exemple #19
0
                    'localmax_on_disttransf.log',
                    type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}',
                    hfp.datastructure2string(maxdepth=1))

        localmax_on_disttransf(hfp, ('disttransf', 'disttransfm'))

        hfp.write(filepath=params['intermedfolder'] + params['locmaxfile'])

        hfp.logging('\nFinal dictionary structure:\n---\n{}',
                    hfp.datastructure2string())
        hfp.logging('')
        hfp.stoplogger()

    except:

        hfp.errout('Unexpected error')
        yaml=yamlfile,
        yamlspec={'path': 'intermedfolder', 'filename': 'largeobjfile'}
    )
    params = ipl.get_params()
    ipl.startlogger(filename=params['resultfolder'] + 'merge_adjacent_objects.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'merge_adjacent_objects.parameters.yml')
        # Write script and parameters to the logfile
        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3))

        merged = merge_adjacent_objects_image_iteration(ipl)

        merged.write(filepath=params['intermedfolder'] + params['largeobjmfile'])

        ipl.logging('\nFinal dictionary structure:\n---\n{}', merged.datastructure2string())
        ipl.logging('')
        ipl.stoplogger()

    except:

        ipl.errout('Unexpected error')
        tkeys='largeobj',
        castkey=None
    )
    params = hfp.get_params()
    hfp.startlogger(filename=params['resultfolder'] + 'merge_adjacent_objects.log', type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile, params['scriptsfolder'] + 'merge_adjacent_objects.parameters.yml')
        # Write script and parameters to the logfile
        hfp.code2log(inspect.stack()[0][1])
        hfp.logging('')
        hfp.yaml2log()
        hfp.logging('')

        hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1))

        merge_adjacent_objects(hfp)

        hfp.write(filepath=params['intermedfolder'] + params['largeobjmfile'])

        hfp.logging('\nFinal dictionary structure:\n---\n{}', hfp.datastructure2string())
        hfp.logging('')
        hfp.stoplogger()

    except:

        hfp.errout('Unexpected error')
    # ipl['false', 'locmax'] = IPL(data=ipl['largeobjm', 'locmaxm', 'path'])
    # ipl.pop('largeobjm')
    #
    # ipl.pop('pathsim')
    # ipl.pop('overlay')

    ipl.startlogger(filename=params['resultfolder']+'features_of_paths.log', type='w')

    try:

        ipl.code2log(inspect.stack()[0][1])
        ipl.logging('')
        ipl.yaml2log()
        ipl.logging('')

        ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=4))

        # Done: Make path image (true)
        # Done: Make path image (false)
        # TODO: Get topological features
        # TODO:     Topological feature: Length (with respect to anisotropy!)
        # TODO:     Topological feature: Statistics on curvature
        # TODO: Get data features on path (raw, probabilities, distance transform)
        # TODO: Get data features on end points (raw, probabilities, distance transform)
        # Done: Cross-computation of two ImageProcessing instances

        # Done: This is total bullshit! I need to iterate over all paths and extract the region features individually!

        # Store all feature images in here
        disttransf_images = IPL(
            yaml=yamlfile,
                         type='w')

    try:

        # Copy the script file and the parameters to the scriptsfolder
        copy(inspect.stack()[0][1], params['scriptsfolder'])
        copy(yamlfile,
             params['scriptsfolder'] + 'random_forest.parameters.yml')
        # Write script and parameters to the logfile
        features.code2log(inspect.stack()[0][1])
        features.logging('')
        features.yaml2log()
        features.logging('')

        features.logging('\nfeatures datastructure: \n---\n{}',
                         features.datastructure2string(maxdepth=2))

        result = random_forest_iteration(features)

        result.write(filepath=params['intermedfolder'] +
                     params['randforestfile'])

        features.logging('\nFinal dictionary structure:\n---\n{}',
                         features.datastructure2string(maxdepth=2))
        features.logging('')
        features.stoplogger()

    except:
        raise
        features.errout('Unexpected error')
Exemple #24
0
if __name__ == "__main__":

    infiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.h5'
    ]
    outfiles = [
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.probs.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5',
        '/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.train.raw_neurons.crop.crop_x10_110_y200_712_z200_712.split_xyz.h5'
    ]

    for i in xrange(0, len(infiles)):

        ipl = IPL(filepath=infiles[i])
        ipl.logging('Datastructure\n---\n{}', ipl.datastructure2string())

        ipl.crop_bounding_rect(bounds=np.s_[10:110, 200:712, 200:712])

        def shape(image):
            return image.shape

        print ipl.datastructure2string(function=shape)

        ipl_split = split_in_xyz(ipl)

        ipl_split.write(filepath=outfiles[i])

    # # Sample A
    # sample_a = IPL(
    #     filepath='/mnt/localdata02/jhennies/neuraldata/cremi_2016/cremi.splA.raw_neurons.crop.h5'