'path': 'intermedfolder', 'filename': 'featurefile' }) params = features.get_params() thisparams = params['random_forest'] features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # Write script and parameters to the logfile features.code2log(inspect.stack()[0][1]) features.logging('') features.yaml2log() features.logging('') features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2)) result = random_forest_iteration(features) result.write(filepath=params['intermedfolder'] + params['randforestfile']) features.logging('\nFinal dictionary structure:\n---\n{}', features.datastructure2string(maxdepth=2)) features.logging('')
hfp = IPL( yaml=yamlfile, yamlspec={'path': 'datafolder', 'filename': 'largeobjfile', 'skeys': 'largeobjname'}, tkeys='largeobj', castkey=None ) params = hfp.get_params() hfp.startlogger(filename=params['resultfolder'] + 'merge_adjacent_objects.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'merge_adjacent_objects.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1)) merge_adjacent_objects(hfp) hfp.write(filepath=params['intermedfolder'] + params['largeobjmfile']) hfp.logging('\nFinal dictionary structure:\n---\n{}', hfp.datastructure2string()) hfp.logging('') hfp.stoplogger() except:
ipl.data_from_file(params['intermedfolder'] + params['locmaxfile'], skeys=params['locmaxnames'][0], recursive_search=True, integrate=True) ipl.data_from_file(params['intermedfolder'] + params['locmaxborderfile'], skeys=(params['locmaxbordernames'][0], params['locmaxbordernames'][2]), recursive_search=True, integrate=True) try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'paths_within_labels.parameters.yml') # Write script and parameters to the logfile ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) paths = paths_within_labels_image_iteration(ipl) paths.write(filepath=params['intermedfolder'] + params['pathstruefile']) ipl.logging('\nFinal dictionary structure:\n---\n{}', ipl.datastructure2string()) ipl.logging('') ipl.stoplogger() except:
tkeys=('disttransf', 'disttransfm'), castkey=None) params = hfp.get_params() thisparams = params['localmax_on_disttransf'] hfp.startlogger(filename=params['resultfolder'] + 'localmax_on_disttransf.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'localmax_on_disttransf.parameters.yml') # Write script and parameters to the logfile hfp.code2log(inspect.stack()[0][1]) hfp.logging('') hfp.yaml2log() hfp.logging('') hfp.logging('\nhfp datastructure: \n\n{}', hfp.datastructure2string(maxdepth=1)) localmax_on_disttransf(hfp, ('disttransf', 'disttransfm')) hfp.write(filepath=params['intermedfolder'] + params['locmaxfile']) hfp.logging('\nFinal dictionary structure:\n---\n{}', hfp.datastructure2string()) hfp.logging('') hfp.stoplogger()
else: if params['overwriteresults']: ipl.logging( 'remove_small_objects: Warning: Intermedfolder already exists and content will be overwritten...\n' ) else: raise IOError( 'remove_small_objects: Error: Intermedfolder already exists!' ) # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'remove_small_objects.parameters.yml') # Write script and parameters to the logfile ipl.code2log(inspect.stack()[0][1]) ipl.logging('') ipl.yaml2log() ipl.logging('') ipl.logging('\nipl datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) remove_small_objects(ipl) ipl.logging('\nFinal datastructure: \n\n{}', ipl.datastructure2string(maxdepth=3)) ipl.write(filepath=params['intermedfolder'] + params['largeobjfile']) ipl.logging('')
features = IPL( yaml=yamlfile, yamlspec={'path': 'intermedfolder', 'filename': 'featurefile'} ) params = features.get_params() thisparams = params['random_forest'] features.startlogger(filename=params['resultfolder'] + 'random_forest.log', type='w') try: # Copy the script file and the parameters to the scriptsfolder copy(inspect.stack()[0][1], params['scriptsfolder']) copy(yamlfile, params['scriptsfolder'] + 'random_forest.parameters.yml') # Write script and parameters to the logfile features.code2log(inspect.stack()[0][1]) features.logging('') features.yaml2log() features.logging('') features.logging('\nfeatures datastructure: \n---\n{}', features.datastructure2string(maxdepth=2)) result = random_forest_iteration(features) result.write(filepath=params['intermedfolder'] + params['randforestfile']) features.logging('\nFinal dictionary structure:\n---\n{}', features.datastructure2string(maxdepth=2)) features.logging('') features.stoplogger() except: