def run_find_border_contacts(yamlfile, logging=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'find_border_contacts.log', type='w', name='FindBorderContacts') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'find_border_contacts.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) find_border_contacts(ipl) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_remove_small_objects(yamlfile): ipl = IPL( yaml=yamlfile, yamlspec={'path': 'datafolder', 'filename': 'labelsfile', 'skeys': 'labelsname'}, recursive_search=True, nodata=True ) # Set indentation of the logging ipl.set_indent(1) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='w', name='RemoveSmallObjects') try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) remove_small_objects(ipl) ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_compute_feature_images(yamlfile): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) ipl.startlogger(filename=params['resultfolder'] + 'compute_feature_images.log', type='w', name='ComputeFeatureImages') try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'compute_feature_images.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) compute_feature_images(ipl) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_paths_of_merges(yamlfile, logging=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'paths_of_merges.log', type='w', name='PathsOfMerges') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'paths_of_merges.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) paths_of_merges(ipl, params['debug']) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_random_forest(yamlfile, logging=True, make_only_feature_array=False, debug=False, write=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w', name='RandomForest') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if make_only_feature_array: make_feature_arrays(ipl) else: result = IPL() result['result'], result['evaluation'] = random_forest(ipl, debug=debug) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if write: result.write(filepath=params['resultfolder'] + params['resultsfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_remove_small_objects(yamlfile): ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'datafolder', 'filename': 'labelsfile', 'skeys': 'labelsname' }, recursive_search=True, nodata=True) # Set indentation of the logging ipl.set_indent(1) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='w', name='RemoveSmallObjects') try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) remove_small_objects(ipl) ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_random_forest(yamlfile, logging=True, make_only_feature_array=False, debug=False, write=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w', name='RandomForest') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if make_only_feature_array: make_feature_arrays(ipl) else: result = IPL() result['result'], result['evaluation'] = random_forest(ipl, debug=debug) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if write: result.write(filepath=params['resultfolder'] + params['resultsfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
) params = features.get_params() thisparams = params['random_forest'] features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # Write script and parameters to the logfile features.code2log(inspect.stack()[0][1]) features.logging('') features.yaml2log() features.logging('') features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2)) result = random_forest_iteration(features) result.write(filepath=params['intermedfolder'] + params['randforestfile']) features.logging('\nFinal dictionary structure:\n---\n{}', features.datastructure2string(maxdepth=2)) features.logging('') features.stoplogger() except: raise features.errout('Unexpected error')
type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # Write script and parameters to the logfile features.code2log(inspect.stack()[0][1]) features.logging('') features.yaml2log() features.logging('') features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2)) result = random_forest_iteration(features) result.write(filepath=params['intermedfolder'] + params['randforestfile']) features.logging('\nFinal dictionary structure:\n---\n{}', features.datastructure2string(maxdepth=2)) features.logging('') features.stoplogger() except: raise features.errout('Unexpected error')
tkeys='largeobj', castkey=None ) params = hfp.get_params() hfp.startlogger(filename=params['resultfolder'] + 'merge_adjacent_objects.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'merge_adjacent_objects.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1)) merge_adjacent_objects(hfp) hfp.write(filepath=params['intermedfolder'] + params['largeobjmfile']) hfp.logging('\nFinal dictionary structure:\n---\n{}', hfp.datastructure2string()) hfp.logging('') hfp.stoplogger() except: hfp.errout('Unexpected error')
integrate=True) ipl.data_from_file(params['intermedfolder'] + params['locmaxborderfile'], skeys=(params['locmaxbordernames'][0], params['locmaxbordernames'][2]), recursive_search=True, integrate=True) try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'paths_within_labels.parameters.yml') # Write script and parameters to the logfile ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) paths = paths_within_labels_image_iteration(ipl) paths.write(filepath=params['intermedfolder'] + params['pathstruefile']) ipl.logging('\nFinal dictionary structure:\n---\n{}', ipl.datastructure2string()) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
'localmax_on_disttransf.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1)) localmax_on_disttransf(hfp, ('disttransf', 'disttransfm')) hfp.write(filepath=params['intermedfolder'] + params['locmaxfile']) hfp.logging('\nFinal dictionary structure:\n---\n{}', hfp.datastructure2string()) hfp.logging('') hfp.stoplogger() except: hfp.errout('Unexpected error')
raise IOError( 'remove_small_objects: Error: Intermedfolder already exists!' ) # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml') # Write script and parameters to the logfile ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) remove_small_objects(ipl) ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
# hfp.pop('result_true') hfp.pop('true') hfp.pop('false') hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2)) print hfp['result.6720_13067.8'] y = [] for k, v in hfp.iteritems(): if v: try: y.append(v) x = range(0, len(v)) plt.plot(x, v) # plt.show() lab.savefig(params['intermedfolder'] + 'plots/' + k + '.png') plt.clf() except ValueError: pass except: hfp.errout('Unexpected error', traceback) hfp.stoplogger()
# Done: This is total bullshit! I need to iterate over all paths and extract the region features individually! # Store all feature images in here disttransf_images = IPL( yaml=yamlfile, yamlspec={'path': 'intermedfolder', 'filename': 'locmaxborderfile', 'skeys': {'locmaxbordernames': (2, 3)}}, recursive_search=True ) feature_images = IPL( yaml=yamlfile, yamlspec={'path': 'datafolder', 'filename': 'rawdatafile', 'skeys': 'rawdataname'}, recursive_search=True ) ipl.logging('\nDisttransf images datastructure: \n---\n{}', disttransf_images.datastructure2string(maxdepth=4)) ipl.logging('\nFeature images datastructure: \n---\n{}', feature_images.datastructure2string(maxdepth=4)) feature_images.astype(np.float32) # features = IPL() features = features_of_paths_image_iteration(ipl, disttransf_images, feature_images) features.write(filepath=params['intermedfolder'] + params['featurefile']) ipl.logging('\nFinal datastructure:\n---\n{}', features.datastructure2string()) ipl.logging('') ipl.stoplogger() except ValueError: ipl.errout('Unexpected error', traceback)
# hfp.pop('result_true') hfp.pop('true') hfp.pop('false') hfp.pop('raw') hfp.logging('hfp datastructure:\n---\n{}---', hfp.datastructure2string(maxdepth=2)) # print hfp['result.6720_13067.8'] y = [] for k, v in hfp.iteritems(): if v.values()[0]: try: # y.append(v) x = range(0, len(v.values()[0])) y = np.swapaxes(np.array(v.values()), 0, 1) plt.plot(x, y) # plt.show() lab.savefig(params['intermedfolder'] + 'plots/' + k + '.png') plt.clf() except ValueError: pass except: hfp.errout('Unexpected error', traceback) hfp.stoplogger()
# # feature_images.data_from_file(params['']) # # hfp.logging('\ndisttransf_images datastructure: \n\n{}', disttransf_images.datastructure2string(maxdepth=1)) # # # This is for the path images # paths = IPL() # # # Add the feature images to the paths dictionary # # paths.set_data_dict(feature_images.get_data(), append=True) # # # Create the path images for feature accumulator # make_path_images(paths, hfp, disttransf_images['disttransf'].shape) # # paths.write(filepath='/media/julian/Daten/neuraldata/cremi_2016/test.h5') # # # This is for the features # features = IPL() # # # Get features along the paths # make_features_paths(paths, disttransf_images, feature_images, features) # # hfp.logging('\nCalculated features: \n-------------------\n{}-------------------\n', features.datastructure2string()) # hfp.logging('Possible features: \n{}', features['paths_false', 'disttransfm'].supportedFeatures()) features.write(filepath=params["intermedfolder"] + params["featurefile"]) hfp.logging("") hfp.stoplogger() except ValueError: hfp.errout("Unexpected error", traceback)