def run_paths_of_merges(yamlfile, logging=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'paths_of_merges.log', type='w', name='PathsOfMerges') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'paths_of_merges.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) paths_of_merges(ipl, params['debug']) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_random_forest(yamlfile, logging=True, make_only_feature_array=False, debug=False, write=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w', name='RandomForest') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if make_only_feature_array: make_feature_arrays(ipl) else: result = IPL() result['result'], result['evaluation'] = random_forest(ipl, debug=debug) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if write: result.write(filepath=params['resultfolder'] + params['resultsfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_remove_small_objects(yamlfile): ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'datafolder', 'filename': 'labelsfile', 'skeys': 'labelsname' }, recursive_search=True, nodata=True) # Set indentation of the logging ipl.set_indent(1) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='w', name='RemoveSmallObjects') try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) remove_small_objects(ipl) ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
if __name__ == '__main__': resultsfolder = '/mnt/localdata02/jhennies/neuraldata/results/cremi_2016/161110_random_forest_of_paths/' yamlfile = resultsfolder + '/parameters.yml' features = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'featurefile' }) params = features.get_params() thisparams = params['random_forest'] features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # Write script and parameters to the logfile features.code2log(inspect.stack()[0][1]) features.logging('') features.yaml2log() features.logging('') features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2))
yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml' hfp = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'locmaxborderfile', 'skeys': { 'locmaxbordernames': (2, 3) } }, tkeys=('disttransf', 'disttransfm'), castkey=None) params = hfp.get_params() thisparams = params['localmax_on_disttransf'] hfp.startlogger(filename=params['resultfolder'] + 'localmax_on_disttransf.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n\n{}',
if __name__ == '__main__': resultsfolder = '/mnt/localdata02/jhennies/neuraldata/results/cremi_2016/161110_random_forest_of_paths/' yamlfile = resultsfolder + '/parameters.yml' ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'datafolder', 'filename': 'labelsfile' }) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='a') try: # Create folder for scripts if not os.path.exists(params['scriptsfolder']): os.makedirs(params['scriptsfolder']) else: if params['overwriteresults']: ipl.logging( 'remove_small_objects: Warning: Scriptsfolder already exists and content will be overwritten...\n' ) else: raise IOError( 'remove_small_objects: Error: Scriptsfolder already exists!'
tkeys='true', castkey=None) params = hfp.get_params() hfp.data_from_file(filepath=params['intermedfolder'] + params['pathsfalsefile'], tkeys='false', castkey=None) hfp.data_from_file(filepath=params['intermedfolder'] + params['locmaxfile'], skeys=('disttransf', 'disttransfm'), tkeys=('disttransf', 'disttransfm')) hfp.startlogger() try: hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2)) hfp.anytask(lib.getvaluesfromcoords, reciprocal=True, keys='disttransfm', indict=hfp['false', '6155_9552'], tkeys='result_false') hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2))
if __name__ == '__main__': resultsfolder = '/mnt/localdata02/jhennies/neuraldata/results/cremi_2016/161110_random_forest_of_paths/' yamlfile = resultsfolder + '/parameters.yml' ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'largeobjfile' }) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'merge_adjacent_objects.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'merge_adjacent_objects.parameters.yml') # Write script and parameters to the logfile ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n\n{}',
# ipl['true', 'locmax'] = IPL(data=ipl['largeobj', 'locmax', 'path']) # ipl.pop('largeobj') ipl.data_from_file(filepath=params['intermedfolder'] + params['pathsfalsefile'], skeys='path', recursive_search=True, integrate=True) ipl.rename_layer('largeobjm', 'false') ipl.remove_layer('path') # ipl['false', 'border'] = IPL(data=ipl['largeobjm', 'border_locmax_m', 'path']) # ipl['false', 'locmax'] = IPL(data=ipl['largeobjm', 'locmaxm', 'path']) # ipl.pop('largeobjm') # # ipl.pop('pathsim') # ipl.pop('overlay') ipl.startlogger(filename=params['resultfolder']+'features_of_paths.log', type='w') try: ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=4)) # Done: Make path image (true) # Done: Make path image (false) # TODO: Get topological features # TODO: Topological feature: Length (with respect to anisotropy!) # TODO: Topological feature: Statistics on curvature
ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'largeobjfile', 'skeys': 'largeobjname' }, recursive_search=True) params = ipl.get_params() thisparams = params['find_border_contacts'] ipl.data_from_file(params['intermedfolder'] + params['largeobjmfile'], skeys=params['largeobjmnames'][0], recursive_search=True, integrate=True) ipl.startlogger(filename=params['resultfolder'] + 'find_border_contacts.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'find_border_contacts.parameters.yml') # Write script and parameters to the logfile ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n---\n{}',
if __name__ == '__main__': yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml' hfp = IPL( yaml=yamlfile, yamlspec={'path': 'intermedfolder', 'filename': 'largeobjfile', 'skeys': 'largeobjname'}, tkeys='largeobj', castkey=None ) params = hfp.get_params() thisparams = params['find_border_contacts'] hfp.data_from_file(params['intermedfolder'] + params['largeobjmfile'], skeys=params['largeobjmnames'][0], tkeys='largeobjm') hfp.startlogger(filename=params['resultfolder'] + 'find_orphans.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'find_orphans.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n---\n{}', hfp.datastructure2string(maxdepth=1)) if thisparams['return_bordercontact_images']:
yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml' hfp = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'locmaxborderfile', 'skeys': { 'locmaxbordernames': (0, 2) } }, tkeys=('border_locmax', 'disttransf'), castkey=None) params = hfp.get_params() thisparams = params['paths_within_labels'] hfp.startlogger(filename=params['resultfolder'] + 'paths_within_labels.log', type='w') hfp.data_from_file(params['intermedfolder'] + params['largeobjfile'], skeys=params['largeobjname'], tkeys='largeobj') hfp.data_from_file(params['intermedfolder'] + params['locmaxfile'], skeys=params['locmaxnames'][0], tkeys='locmax') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'paths_within_labels.parameters.yml') # Write script and parameters to the logfile
# pass if __name__ == '__main__': yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml' hfp = IPL( yaml=yamlfile, yamlspec={'path': 'intermedfolder', 'filename': 'locmaxborderfile', 'skeys': {'locmaxbordernames': (1, 3)}}, tkeys=('border_locmax_m', 'disttransfm'), castkey=None ) params = hfp.get_params() thisparams = params['paths_of_partners'] hfp.startlogger(filename=params['resultfolder'] + 'paths_of_partners.log', type='w') hfp.data_from_file(params['intermedfolder'] + params['largeobjfile'], skeys=params['largeobjname'], tkeys='largeobj') hfp.data_from_file(params['intermedfolder'] + params['largeobjmfile'], skeys=(params['largeobjmnames'][0], params['largeobjmnames'][4]), tkeys=('largeobjm', 'change_hash')) hfp.data_from_file(params['intermedfolder'] + params['locmaxfile'], skeys=params['locmaxnames'][0], tkeys='locmaxm') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'paths_of_partners.parameters.yml')
# # TODO: Insert code here # hfp = Hdf5ImageProcessingLib( # yaml=yamlfile, # yamlspec={'path': 'intermedfolder', 'filename': 'pathstruefile'}, # tkeys='true', # castkey=None # ) # params = hfp.get_params() # hfp.logging('params = {}', params) # hfp.data_from_file( # filepath=params['intermedfolder'] + params['pathsfalsefile'], # tkeys='false', # castkey=None # ) hfp.startlogger(filename=params['intermedfolder'] + 'features_of_paths.log', type='a') try: hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1)) # Done: Iterate over paths and accumulate features # Done: Implement data iterator