def run_paths_of_merges(yamlfile, logging=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'paths_of_merges.log', type='w', name='PathsOfMerges') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'paths_of_merges.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) paths_of_merges(ipl, params['debug']) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_random_forest(yamlfile, logging=True, make_only_feature_array=False, debug=False, write=True): ipl = IPL(yaml=yamlfile) ipl.set_indent(1) params = rdict(data=ipl.get_params()) if logging: ipl.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w', name='RandomForest') else: ipl.startlogger() try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # ipl.logging('\nInitial datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if make_only_feature_array: make_feature_arrays(ipl) else: result = IPL() result['result'], result['evaluation'] = random_forest(ipl, debug=debug) # ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) if write: result.write(filepath=params['resultfolder'] + params['resultsfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
def run_remove_small_objects(yamlfile): ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'datafolder', 'filename': 'labelsfile', 'skeys': 'labelsname' }, recursive_search=True, nodata=True) # Set indentation of the logging ipl.set_indent(1) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='w', name='RemoveSmallObjects') try: # # Copy the script file and the parameters to the scriptsfolder # copy(inspect.stack()[0][1], params['scriptsfolder']) # copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) remove_small_objects(ipl) ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) # ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('') ipl.stoplogger() except: ipl.errout('Unexpected error')
return result if __name__ == '__main__': resultsfolder = '/mnt/localdata02/jhennies/neuraldata/results/cremi_2016/161110_random_forest_of_paths/' yamlfile = resultsfolder + '/parameters.yml' features = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'featurefile' }) params = features.get_params() thisparams = params['random_forest'] features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # Write script and parameters to the logfile features.code2log(inspect.stack()[0][1]) features.logging('') features.yaml2log() features.logging('')
from hdf5_image_processing import Hdf5ImageProcessingLib as IPL import os import numpy as np __author__ = 'jhennies' if __name__ == '__main__': yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml' ipl = IPL(yaml=yamlfile) ipl.logging('Parameters: {}', ipl.get_params()) params = ipl.get_params() ipl.data_from_file(filepath=params['datafolder'] + 'cremi.splA.raw_neurons.crop.h5', skeys='raw', tkeys='raw') ipl.crop_bounding_rect(np.s_[10:110, 200:712, 200:712], keys='raw') ipl.write(filepath=params['datafolder'] + 'cremi.splA.raw_neurons.crop.crop_10-200-200_110-712-712.h5')
if __name__ == '__main__': yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters.yml' hfp = IPL(yaml=yamlfile, yamlspec={ 'path': 'intermedfolder', 'filename': 'locmaxborderfile', 'skeys': { 'locmaxbordernames': (2, 3) } }, tkeys=('disttransf', 'disttransfm'), castkey=None) params = hfp.get_params() thisparams = params['localmax_on_disttransf'] hfp.startlogger(filename=params['resultfolder'] + 'localmax_on_disttransf.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log()
ipl[kl].setlogger(ipl.getlogger()) ipl[kl] = accumulate_small_objects(ipl[kl], k, thisparams) if __name__ == '__main__': resultsfolder = '/mnt/localdata02/jhennies/neuraldata/results/cremi_2016/161110_random_forest_of_paths/' yamlfile = resultsfolder + '/parameters.yml' ipl = IPL(yaml=yamlfile, yamlspec={ 'path': 'datafolder', 'filename': 'labelsfile' }) params = ipl.get_params() ipl.startlogger(filename=params['resultfolder'] + 'remove_small_objects.log', type='a') try: # Create folder for scripts if not os.path.exists(params['scriptsfolder']): os.makedirs(params['scriptsfolder']) else: if params['overwriteresults']: ipl.logging( 'remove_small_objects: Warning: Scriptsfolder already exists and content will be overwritten...\n' ) else: