class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = traits.Directory( mandatory=True, desc="Location of the Nipype working directory") sink_dir = traits.Directory( mandatory=True, desc="Location where the BIP will store the results") crash_dir = traits.Directory(mandatory=False, desc="Location to store crash files") save_script_only = traits.Bool(False) # Execution run_using_plugin = traits.Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = traits.Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(14.0) # DataGrabber datagrabber = traits.Instance(Data, ())
class EELSConfig(t.HasTraits): eels_gos_files_path = t.Directory(guess_gos_path(), label = 'GOS directory', desc = 'The GOS files are required to create the EELS edge components') fine_structure_width = t.CFloat(30, label = 'Fine structure lenght', desc = 'The default lenght of the fine structure from the edge onset') fine_structure_active = t.CBool(False, label = 'Enable fine structure', desc = "If enabled, the regions of the EELS spectrum defined as fine " "structure will be fitted with a spline. Please note that it " "enabling this feature only makes sense when the model is " "convolved to account for multiple scattering") fine_structure_smoothing = t.Range(0., 1., value = 0.3, label = 'Fine structure smoothing factor', desc = 'The lower the value the smoother the fine structure spline fit') synchronize_cl_with_ll = t.CBool(False) preedge_safe_window_width = t.CFloat(2, label = 'Pre-onset region (in eV)', desc = 'Some functions needs to define the regions between two ' 'ionisation edges. Due to limited energy resolution or chemical ' 'shift, the region is limited on its higher energy side by ' 'the next ionisation edge onset minus an offset defined by this ' 'parameters') min_distance_between_edges_for_fine_structure = t.CFloat(0, label = 'Minimum distance between edges', desc = 'When automatically setting the fine structure energy regions, ' 'the fine structure of an EELS edge component is automatically ' 'disable if the next ionisation edge onset distance to the ' 'higher energy side of the fine structure region is lower that ' 'the value of this parameter')
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories sink_dir = traits.Directory(mandatory=True, desc="Location where the BIP will store the results") save_script_only = traits.Bool(False) # DataGrabber datagrabber = traits.Instance(Data, ())
def edition_widget(engine, environment): ''' Edition GUI for FSL config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types import traits.api as traits def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: conf = session.config('fsl', widget.environment) values = {'config_id': 'fsl'} for k in ('directory', 'config', 'prefix'): value = getattr(controller, k) if value is traits.Undefined: value = None values[k] = value if conf is None: session.new_config('fsl', widget.environment, values) else: for k, value in values.items(): if k == 'config_id': continue setattr(conf, k, values[k]) controller = Controller() controller.add_trait( 'directory', traits.Directory(traits.Undefined, desc='Directory where FSL is installed')) controller.add_trait( 'config', traits.File(traits.Undefined, output=False, desc='Parameter to specify the fsl.sh path')) controller.add_trait( 'prefix', traits.String(traits.Undefined, desc='Prefix to add to FSL commands')) conf = engine.settings.select_configurations(environment, {'fsl': 'any'}) if conf: fconf = conf.get('capsul.engine.module.fsl', {}) controller.directory = fconf.get('directory', traits.Undefined) controller.config = fconf.get('config', traits.Undefined) controller.prefix = fconf.get('prefix', traits.Undefined) widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') timeout = traits.Float(14.0) # Subjects datagrabber = traits.Instance(Data, ()) #Normalization norm_template = traits.File(mandatory=True,desc='Template to warp to') use_nearest = traits.Bool(False,desc="use nearest neighbor interpolation") do_segment = traits.Bool(True) surf_dir = traits.Directory() moving_images_4D = traits.Bool(True, usedefault=True, desc="True if your moving image inputs \ are time series images, False if they are 3-dimensional") # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() save_script_only = traits.Bool(False) # Buttons check_func_datagrabber = Button("Check") def _check_func_datagrabber_fired(self): subs = self.subjects template = [self.inputs_template, self.meanfunc_template, self.fsl_mat_template, self.unwarped_brain_template, self.affine_transformation_template, self.warp_field_template] for s in subs: for t in template: try: temp = glob(os.path.join(self.base_dir,t%s)) except TypeError: temp = [] for f in self.fwhm: temp.append(glob(os.path.join(self.base_dir,t%(s,f)))) print temp
def init_default_traits(self): """Automatically initialise necessary parameters for nipype or capsul""" if 'output_directory' not in self.user_traits(): self.add_trait( "output_directory", traits.Directory(output=False, optional=True, userlevel=1)) if self.requirement is not None and 'spm' in self.requirement: if 'use_mcr' not in self.user_traits(): self.add_trait("use_mcr", traits.Bool(optional=True, userlevel=1)) if 'paths' not in self.user_traits(): self.add_trait( "paths", InputMultiObject(traits.Directory(), optional=True, userlevel=1)) if 'matlab_cmd' not in self.user_traits(): self.add_trait( "matlab_cmd", traits_extension.Str(optional=True, userlevel=1)) if 'mfile' not in self.user_traits(): self.add_trait("mfile", traits.Bool(optional=True, userlevel=1)) if 'spm_script_file' not in self.user_traits(): spm_script_file_desc = ( 'The location of the output SPM matlab ' 'script automatically generated at the ' 'run step time (a string representing ' 'a file).') self.add_trait( "spm_script_file", File(output=True, optional=True, input_filename=True, userlevel=1, desc=spm_script_file_desc))
def test_trait(self): """ Method to test trait characterisitics: value, type. """ self.assertTrue(is_trait_value_defined(5)) self.assertFalse(is_trait_value_defined("")) self.assertFalse(is_trait_value_defined(None)) self.assertFalse(is_trait_value_defined(traits.Undefined)) trait = traits.CTrait(0) trait.handler = traits.Float() self.assertFalse(is_trait_pathname(trait)) for handler in [traits.File(), traits.Directory()]: trait.handler = handler self.assertTrue(is_trait_pathname(trait))
def edition_widget(engine, environment): ''' Edition GUI for axon config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types import traits.api as traits def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: conf = session.config('axon', widget.environment) values = {'config_id': 'axon', 'user_level': controller.user_level} if controller.shared_directory in (None, traits.Undefined, ''): values['shared_directory'] = None else: values['shared_directory'] = controller.shared_directory if conf is None: session.new_config('axon', widget.environment, values) else: for k in ('shared_directory', 'user_level'): setattr(conf, k, values[k]) controller = Controller() controller.add_trait( 'shared_directory', traits.Directory(desc='Directory where BrainVisa ' 'shared data is installed')) controller.add_trait( 'user_level', traits.Int(desc='0: basic, 1: advanced, 2: expert, or more. ' 'used to display or hide some advanced features or ' 'process parameters that would be confusing to a novice ' 'user')) conf = engine.settings.select_configurations(environment, {'axon': 'any'}) if conf: controller.shared_directory = conf.get('capsul.engine.module.axon', {}).get('shared_directory', traits.Undefined) controller.user_level = conf.get('capsul.engine.module.axon', {}).get('user_level', 0) widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") # Subjects datagrabber = traits.Instance(Data, ()) #Normalization norm_template = traits.File(mandatory=True, desc='Template to warp to') use_nearest = traits.Bool(False, desc="use nearest neighbor interpolation") do_segment = traits.Bool(True) surf_dir = traits.Directory() moving_images_4D = traits.Bool(True, usedefault=True, desc="True if your moving image inputs \ are time series images, False if they are 3-dimensional" ) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() save_script_only = traits.Bool(False) # Buttons check_func_datagrabber = Button("Check") def _check_func_datagrabber_fired(self): subs = self.subjects template = [ self.inputs_template, self.meanfunc_template, self.fsl_mat_template, self.unwarped_brain_template, self.affine_transformation_template, self.warp_field_template ] for s in subs: for t in template: try: temp = glob(os.path.join(self.base_dir, t % s)) except TypeError: temp = [] for f in self.fwhm: temp.append( glob(os.path.join(self.base_dir, t % (s, f)))) print temp
def edition_widget(engine, environment): ''' Edition GUI for python config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.qt_backend import Qt from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types import traits.api as traits def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: conf = session.config('python', widget.environment) values = {'config_id': 'python', 'path': controller.path} if controller.executable in (None, traits.Undefined, ''): values['executable'] = None else: values['executable'] = controller.executable if conf is None: session.new_config('python', widget.environment, values) else: for k in ('path', 'executable'): setattr(conf, k, values[k]) controller = Controller() controller.add_trait('executable', traits.Str(desc='Full path of the python executable')) controller.add_trait( 'path', traits.List(traits.Directory(), [], desc='paths to prepend to sys.path')) conf = engine.settings.select_configurations(environment, {'python': 'any'}) if conf: controller.executable = conf.get('capsul.engine.module.python', {}).get('executable', traits.Undefined) controller.path = conf.get('capsul.engine.module.python', {}).get('path', []) widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
def edition_widget(engine, environment): ''' Edition GUI for AFNI config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types import traits.api as traits def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: conf = session.config('afni', widget.environment) values = {'config_id': 'afni'} for k in ['directory']: value = getattr(controller, k) if value is traits.Undefined: value = None values[k] = value if conf is None: session.new_config('afni', widget.environment, values) else: for k, value in values.items(): if k == 'config_id': continue setattr(conf, k, values[k]) controller = Controller() controller.add_trait( 'directory', traits.Directory(traits.Undefined, desc='Directory where AFNI is installed')) conf = engine.settings.select_configurations(environment, {'afni': 'any'}) if conf: fconf = conf.get('capsul.engine.module.afni', {}) controller.directory = fconf.get('directory', traits.Undefined) widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
class FileFrame(ta.HasTraits): """ Frame for file selecting """ def_file = '/home/jackdra/LQCD/Scripts/EDM_paper/graphs/FF/FullFFFit/Neutron_ContFit_a.pdf' def_folder = '/home/jackdra/LQCD/Scripts/EDM_paper/graphs/FF/FullFFFit/' file_directory = ta.Directory(def_folder) file_name = ta.File(def_file, filter=['*.pdf']) Add_File = ta.Button() Add_Folder = ta.Button() # Undo_Add = ta.Button() view = tua.View( tua.HSplit( tua.Item('file_directory', style='custom', springy=True), tua.Item('file_name', style='custom', springy=True), tua.VGroup(tua.Item('file_directory', springy=True), tua.Item('file_name', springy=True), tua.Item('Add_File', show_label=False), tua.Item('Add_Folder', show_label=False) # tua.Item('Undo_Add',show_label=False), )), resizable=True, height=1000, width=1500) def _file_name_changed(self): self.file_directory = '/'.join(self.file_name.split('/')[:-1]) + '/' def _file_directory_changed(self): file_list = GetAllPDF(self.file_directory) if len(file_list) > 0: self.file_name = GetAllPDF(self.file_directory)[0] def _Add_File_fired(self): global files_selected files_selected.file_list.append(self.file_name) def _Add_Folder_fired(self): global files_selected files_selected.file_list += GetAllPDF(self.file_directory)
class newDarkPictureDialog(traits.HasTraits): # pathSourceImages = traits.Directory( os.path.join("\\\\192.168.16.71","Humphry","Data","eagleLogs") ) pathSourceImages = traits.Directory( eagleLogsFolder ) pathNewDarkPicture = traits.File( defaultDarkPictureFilename, editor = traitsui.FileEditor(dialog_style='save') ) cancelButton = traitsui.Action(name = 'Cancel', action = '_cancel') okButton = traitsui.Action(name = 'Calculate dark picture', action = '_ok') date = traits.String( time.strftime('%Y %m %d'), desc='Date' ) camera = traits.String( "Andor1" ) interval = traits.Float(0.003) filterCountLi = traits.Int(1) temperature = traits.Float(-40.0) autoFilename = traits.Button('Auto Filename') traits_view = traitsui.View( traitsui.Group( traitsui.Item('pathSourceImages'), traitsui.Group( traitsui.Item('date'), traitsui.Item('camera'), traitsui.Item('interval'), traitsui.Item('temperature'), traitsui.Item('autoFilename'), label='Auto Filename', show_border=True ), traitsui.Item('pathNewDarkPicture') ), buttons = [cancelButton, okButton], handler = newDarkPictureDialogHandler() ) def _autoFilename_fired(self): filename = self.date + ' - dark ' + self.camera + ' - ' filename += 'interval {} '.format(self.interval) filename += 'temperature {} '.format(self.temperature) filename = filename.replace('.','_') # filename += '.gz' filename += '.npy' path = os.path.join( defaultDarkPictureFilename, self.camera) if not os.path.exists( path ): os.mkdir( path ) self.pathNewDarkPicture = os.path.join( path, filename )
class EELSConfig(t.HasTraits): eels_gos_files_path = t.Directory( guess_gos_path(), label='GOS directory', desc='The GOS files are required to create the EELS edge components')
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(14.0) # Subjects """ subjects = traits.List(traits.Str, mandatory=True, usedefault=True, desc="Subject id's. Note: These MUST match the subject id's in the \ Freesurfer directory. For simplicity, the subject id's should \ also match with the location of individual functional files.") fwhm=traits.List(traits.Float()) inputs_template = traits.String('%s/preproc/output/fwhm_%s/*.nii.gz') meanfunc_template = traits.String('%s/preproc/mean/*_mean.nii.gz') fsl_mat_template = traits.String('%s/preproc/bbreg/*.mat') unwarped_brain_template = traits.String('%s/smri/unwarped_brain/*.nii*') affine_transformation_template = traits.String('%s/smri/affine_transformation/*.nii*') warp_field_template = traits.String('%s/smri/warped_field/*.nii*')""" datagrabber = traits.Instance(Data, ()) #Normalization norm_template = traits.File(mandatory=True, desc='Template to warp to') do_segment = traits.Bool(True) surf_dir = traits.Directory() # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check") def _check_func_datagrabber_fired(self): subs = self.subjects template = [ self.inputs_template, self.meanfunc_template, self.fsl_mat_template, self.unwarped_brain_template, self.affine_transformation_template, self.warp_field_template ] for s in subs: for t in template: try: temp = glob(os.path.join(self.base_dir, t % s)) except TypeError: temp = [] for f in self.fwhm: temp.append( glob(os.path.join(self.base_dir, t % (s, f)))) print temp
class DiffStatus(tr.HasStrictTraits): repo = tr.Directory() src = tr.Directory() commits = tr.Dict(tr.Str, tr.Instance(CommitInfo)) first = tr.Str(desc="hash of most recent commit") knots = tr.Dict(tr.Instance(CommitInfo), tr.Instance(MBInfo)) @property def _gitargs(self): gp = os.path.abspath(self.repo) return [ 'git', '--git-dir=%s' % os.path.join(gp, '.git'), '--work-tree=%s' % gp ] def _ask_git(self, args, *a, **kw): try: ret = subprocess.check_output(self._gitargs + args, shell=False, *a, **kw) except subprocess.CalledProcessError as ex: return ex.returncode, ex.output print('nonzero result: ', ex.returncode, ex.cmd, ex.output) if ex.returncode == -1 or True: raise return 0, ret def _call_git(self, args, *a, **kw): try: subprocess.check_call(self._gitargs + args, shell=False, *a, **kw) except subprocess.CalledProcessError as ex: if ex.returncode == -1: raise print('nonzero result: ', ex.returncode, ex.cmd, ex.output) def build_tree(self, *leafs): code, data = self._ask_git(['rev-list', '--parents'] + list(leafs)) assert code >= 0 data = data.decode('ascii') first = None for line in data.split('\n'): if not line.strip(): continue hashes = line.split() hash = hashes[0] if first is None: first = hash parents = hashes[1:] commit = self.commits.setdefault(hash[:7], CommitInfo(hash=hash)) parents = [ self.commits.setdefault(p[:7], CommitInfo(hash=p)) for p in parents ] commit.parents = parents for p in parents: p.children.add(commit) self.first = first def find_net(self): """ Find all commits that don't have exactly one parent and one child. """ ret = dict() for c in self.commits.values(): if len(c.children) == 1 and len(c.parents) == 1: continue n = ret.setdefault(c, MBInfo(commit=c)) for cc in c.children: tmp = cc while len(tmp.children) == 1 and len(tmp.parents) == 1: tmp, = tmp.children nc = ret.setdefault(tmp, MBInfo(commit=tmp)) n.children[cc] = nc for cp in c.parents: tmp = cp while len(tmp.children) == 1 and len(tmp.parents) == 1: tmp, = tmp.parents np = ret.setdefault(tmp, MBInfo(commit=tmp)) n.parents[cp] = np self.knots = ret def fastdiff(self, c): if c.diff is not None: return c.diff self._call_git(['checkout', '-B', 'tmp', c.hash]) gp = os.path.abspath(self.repo) sp = os.path.abspath(self.src) try: raw = subprocess.check_output(['diff', '-burN', '-x.git', gp, sp]) except subprocess.CalledProcessError as ex: if ex.returncode < 0: raise # print('nonzero result: ',ex.returncode,ex.cmd,ex.output[:200]) raw = ex.output ret = raw.decode('utf-8', 'replace') ret = len(ret.split('\n')) c.diff = ret return ret def neighbouring_knots(self, commit): node = self.knots.get(commit, None) if node is None: # fall-back to neighbouring commits return self.neighbouring_commits() return set(node.children.values()) | set(node.parents.values()) def neighbouring_commits(self, commit): return set(commit.children) | set(commit.parents) def follow_commits(self, node, n=100, reset=True, jump='knots', metric='fastdiff'): if isinstance(metric, str): metric = getattr(self, metric) if isinstance(jump, str): jump = getattr(self, 'neighbouring_' + jump) seen = set() bestn = node bestv = metric(node) upcoming = {node: bestv} while upcoming and len(seen) < n: next = min(upcoming.values()) for k, v in upcoming.items(): if v == next: node = k break else: raise RuntimeError del upcoming[node] cur = metric(node) seen.add(node) print('%7s (%9d) ' % (node.hash[:7], cur), 'best!' if cur < bestv else '') if cur < bestv: bestv = cur bestn = node if reset: seen = set() upcoming = {} for new in jump(node) - seen: prev = upcoming.get(new, None) upcoming[new] = cur if prev is None else min(cur, prev) return bestn
def edition_widget(engine, environment): ''' Edition GUI for SPM config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types import traits.api as traits def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: values = {} if controller.directory in (None, traits.Undefined, ''): values['directory'] = None else: values['directory'] = controller.directory values['standalone'] = controller.standalone values['version'] = controller.version id = 'spm%s%s' % (controller.version, '-standalone' if controller.standalone else '') values['config_id'] = id query = 'config_id == "%s"' % id conf = session.config('spm', 'global', selection=query) if conf is None: session.new_config('spm', widget.environment, values) else: for k in ('directory', 'standalone', 'version'): setattr(conf, k, values[k]) controller = Controller() controller.add_trait( "directory", traits.Directory(traits.Undefined, output=False, desc="Directory containing SPM.")) controller.add_trait( "standalone", traits.Bool(True, desc="If True, use the standalone version of SPM.")) controller.add_trait( 'version', traits.Str(traits.Undefined, output=False, desc='Version string for SPM: "12", "8", etc.')) conf = engine.settings.select_configurations(environment, {'spm': 'any'}) if conf: controller.directory = conf.get('capsul.engine.module.spm', {}).get('directory', traits.Undefined) controller.standalone = conf.get('capsul.engine.module.spm', {}).get('standalone', True) controller.version = conf.get('capsul.engine.module.spm', {}).get('version', '12') # TODO handle several configs widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
class Matplotlibify(traits.HasTraits): logFilePlotReference = traits.Instance( logFilePlots.plotObjects.logFilePlot.LogFilePlot) plotPropertiesList = traits.List(PlotProperties) logFilePlot1 = traits.Any() logFilePlot2 = traits.Any() logFilePlotsReference = traits.Instance( logFilePlots.LogFilePlots) #refernce to logFilePlots object isPriviliged = traits.Bool(False) hardCodeLegendBool = traits.Bool( False, desc= "click if you want to write your own legend otherwise it will generate legend based on series and legend replacement dict" ) hardCodeLegendString = traits.String( "", desc="comma seperated string for each legend entry") #xLim = traits.Tuple() replacementStrings = {} savedPrintsDirectory = traits.Directory( os.path.join("\\\\ursa", "AQOGroupFolder", "Experiment Humphry", "Data", "savedPrints")) showWaterMark = traits.Bool(True) matplotlibifyMode = traits.Enum("default", "dual plot") generatePlotScriptButton = traits.Button("generate plot") showPlotButton = traits.Button("show") #templatesFolder = os.path.join( os.path.expanduser('~'),"Google Drive","Thesis","python scripts","matplotlibify") templatesFolder = os.path.join("\\\\ursa", "AQOGroupFolder", "Experiment Humphry", "Experiment Control And Software", "LogFilePlots", "matplotlibify", "templates") templateFile = traits.File( os.path.join(templatesFolder, "matplotlibifyDefaultTemplate.py")) generatedScriptLocation = traits.File( os.path.join(os.path.expanduser('~'), "Google Drive", "Thesis", "python scripts", "matplotlibify", "debug.py")) saveToOneNote = traits.Button("Save to OneNote") printButton = traits.Button("print") dualPlotMode = traits.Enum('sharedXY', 'sharedX', 'sharedY', 'stacked', 'stackedX', 'stackedY') logLibrarianReference = None secondPlotGroup = traitsui.VGroup( traitsui.Item("matplotlibifyMode", label="mode"), traitsui.HGroup( traitsui.Item("logFilePlot1", visible_when="matplotlibifyMode=='dual plot'"), traitsui.Item("logFilePlot2", visible_when="matplotlibifyMode=='dual plot'"), traitsui.Item('dualPlotMode', visible_when="matplotlibifyMode=='dual plot'", show_label=False)), ) plotPropertiesGroup = traitsui.Item( "plotPropertiesList", editor=traitsui.ListEditor(style="custom"), show_label=False, resizable=True) generalGroup = traitsui.VGroup( traitsui.Item("showWaterMark", label="show watermark"), traitsui.HGroup( traitsui.Item("hardCodeLegendBool", label="hard code legend?"), traitsui.Item("hardCodeLegendString", show_label=False, visible_when="hardCodeLegendBool")), traitsui.Item("templateFile"), traitsui.Item("generatedScriptLocation", visible_when='isPriviliged'), traitsui.Item('generatePlotScriptButton', visible_when='isPriviliged'), traitsui.Item('showPlotButton'), traitsui.Item( 'saveToOneNote', enabled_when='True' ), # was deactivated for some time, probably there was an error, I try to debug this now traitsui.Item('printButton')) traits_view = traitsui.View(secondPlotGroup, plotPropertiesGroup, generalGroup, resizable=True, kind='live') def __init__(self, **traitsDict): super(Matplotlibify, self).__init__(**traitsDict) self.plotPropertiesList = [PlotProperties(self.logFilePlotReference)] self.generateReplacementStrings() self.add_trait( "logFilePlot1", traits.Trait( self.logFilePlotReference.logFilePlotsTabName, { lfp.logFilePlotsTabName: lfp for lfp in self.logFilePlotsReference.lfps })) self.add_trait( "logFilePlot2", traits.Trait( self.logFilePlotReference.logFilePlotsTabName, { lfp.logFilePlotsTabName: lfp for lfp in self.logFilePlotsReference.lfps })) def generateReplacementStrings(self): self.replacementStrings = {} if self.matplotlibifyMode == 'default': specific = self.plotPropertiesList[ 0].getReplacementStringsSpecific(identifier="") generic = self.getGlobalReplacementStrings() self.replacementStrings.update(specific) self.replacementStrings.update(generic) elif self.matplotlibifyMode == 'dual plot': specific1 = self.plotPropertiesList[ 0].getReplacementStringsSpecific(identifier="lfp1.") specific2 = self.plotPropertiesList[ 1].getReplacementStringsSpecific(identifier="lfp2.") generic = self.getGlobalReplacementStrings() self.replacementStrings.update(specific1) self.replacementStrings.update(specific2) self.replacementStrings.update(generic) for key in self.replacementStrings.keys( ): #wrap strings in double quotes logger.info("%s = %s" % (self.replacementStrings[key], type(self.replacementStrings[key]))) if isinstance(self.replacementStrings[key], (str, unicode)): if self.replacementStrings[key].startswith("def "): continue #if it is a function definition then dont wrap in quotes! else: self.replacementStrings[key] = unicode( self.wrapInQuotes(self.replacementStrings[key])) def getGlobalReplacementStrings(self, identifier=""): """generates the replacement strings that are specific to a log file plot """ return { '{{%shardCodeLegendBool}}' % identifier: self.hardCodeLegendBool, '{{%shardCodeLegendString}}' % identifier: self.hardCodeLegendString, '{{%smatplotlibifyMode}}' % identifier: self.matplotlibifyMode, '{{%sshowWaterMark}}' % identifier: self.showWaterMark, '{{%sdualPlotMode}}' % identifier: self.dualPlotMode } def wrapInQuotes(self, string): return '"%s"' % string def _isPriviliged_default(self): if os.path.exists( os.path.join("C:", "Users", "tharrison", "Google Drive", "Thesis", "python scripts", "matplotlibify")): return True else: return False def _generatedScriptLocation_default(self): root = os.path.join("C:", "Users", "tharrison", "Google Drive", "Thesis", "python scripts", "matplotlibify") head, tail = os.path.split(self.logFilePlotReference.logFile) matplotlibifyName = os.path.splitext(tail)[0] + "-%s-vs-%s" % ( self.plotPropertiesList[0]._yAxisLabel_default(), self.plotPropertiesList[0]._xAxisLabel_default()) baseName = os.path.join(root, matplotlibifyName) filename = baseName + ".py" c = 0 while os.path.exists(filename + ".py"): filename = baseName + "-%s.py" % c c += 1 return filename def replace_all(self, text, replacementDictionary): for placeholder, new in replacementDictionary.iteritems(): text = text.replace(placeholder, str(new)) return text def _generatePlotScriptButton_fired(self): self.writePlotScriptToFile(self.generatedScriptLocation) def writePlotScriptToFile(self, path): """writes the script that generates the plot to the path """ logger.info("attempting to generate matplotlib script...") self.generateReplacementStrings() with open(self.templateFile, "rb") as template: text = self.replace_all(template.read(), self.replacementStrings) with open(self.generatedScriptLocation, "wb") as output: output.write(text) logger.info("succesfully generated matplotlib script at location %s " % self.generatedScriptLocation) def autoSavePlotWithMatplotlib(self, path): """runs the script with an appended plt.save() and plt.close("all")""" logger.info("attempting to save matplotlib plot...") self.generateReplacementStrings() with open(self.templateFile, "rb") as template: text = self.replace_all(template.read(), self.replacementStrings) ns = {} saveCode = "\n\nplt.savefig(r'%s', dpi=300)\nplt.close('all')" % path logger.info("executing save statement:%s" % saveCode) text += saveCode exec text in ns logger.info("exec completed succesfully...") def _showPlotButton_fired(self): logger.info("attempting to show matplotlib plot...") self.generateReplacementStrings() with open(self.templateFile, "rb") as template: text = self.replace_all(template.read(), self.replacementStrings) ns = {} exec text in ns logger.info("exec completed succesfully...") def _saveToOneNote_fired(self): """calls the lfp function to save the file in the log folder and then save it to oneNote. THis way there is no oneNote code in matplotlibify""" if self.logLibrarianReference is None: self.logFilePlotReference.savePlotAsImage(self) else: self.logFilePlotReference.savePlotAsImage( self, self.logLibrarianReference) def _matplotlibifyMode_changed(self): """change default template depending on whether or not this is a double axis plot """ if self.matplotlibifyMode == "default": self.templateFile = os.path.join( self.templatesFolder, "matplotlibifyDefaultTemplate.py") self.plotPropertiesList = [ PlotProperties(self.logFilePlotReference) ] elif self.matplotlibifyMode == "dual plot": self.templateFile = os.path.join( self.templatesFolder, "matplotlibifyDualPlotTemplate.py") if len(self.plotPropertiesList) > 1: self.plotPropertiesList[1] = PlotProperties( self.logFilePlot2_) #or should it be logFilePlot2_??? logger.info("chanigng second element of plot properties list") elif len(self.plotPropertiesList) == 1: self.plotPropertiesList.append( PlotProperties(self.logFilePlot2_)) logger.info("appending to plot properties list") else: logger.error( "there only be 1 or 2 elements in plot properties but found %s elements" % len(self.plotPropertiesList)) def _logFilePlot1_changed(self): """logFilePlot1 changed so update plotPropertiesList """ logger.info("logFilePlot1 changed. updating plotPropertiesList") self.plotPropertiesList[0] = PlotProperties(self.logFilePlot1_) def _logFilePlot2_changed(self): logger.info("logFilePlot2 changed. updating plotPropertiesList") self.plotPropertiesList[1] = PlotProperties(self.logFilePlot2_) def dualPlotModeUpdates(self): """called when either _logFilePlot1 or _logFilePLot2 change """ if (self.logFilePlot1_.xAxis == self.logFilePlot2_.xAxis ): #Twin X 2 y axes mode if self.logFilePlot1_.yAxis == self.logFilePlot2_.yAxis: self.dualPlotMode = 'sharedXY' else: self.dualPlotMode = 'sharedX' elif self.logFilePlot1_.yAxis == self.logFilePlot2_.yAxis: self.dualPlotMode = 'sharedY' else: self.dualPlotMode = 'stacked' def _printButton_fired(self): """uses windows built in print image functionality to send png of plot to printer """ logFolder, tail = os.path.split(self.logFilePlotReference.logFile) #logName = tail.strip(".csv")+" - "+str(self.selectedLFP.xAxis)+" vs "+str(self.selectedLFP.yAxis) imageFileName = os.path.join(logFolder, "temporary_print.png") self.logFilePlotReference.savePlotAsImage(self, name=imageFileName, oneNote=False) logger.info("attempting to use windows native printing dialog") os.startfile(os.path.normpath(imageFileName), "print") logger.info("saving to savedPrints folder") head, tail = os.path.split(self._generatedScriptLocation_default()) tail = tail.replace(".py", ".png") dst = os.path.join(self.savedPrintsDirectory, tail) shutil.copyfile(os.path.normpath(imageFileName), dst) logger.info("saved to savedPrints folder")
def edition_widget(engine, environment): ''' Edition GUI for FOM config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: conf = session.config('fom', widget.environment) values = {'config_id': 'fom'} for k in ('input_fom', 'output_fom', 'shared_fom', 'volumes_format', 'meshes_format', 'auto_fom', 'fom_path', 'input_directory', 'output_directory'): value = getattr(controller, k) if value is traits.Undefined: if k in ('fom_path', ): value = [] else: value = None values[k] = value if conf is None: session.new_config('fom', widget.environment, values) else: for k, value in values.items(): if k == 'config_id': continue setattr(conf, k, values[k]) controller = Controller() controller.add_trait( 'input_fom', traits.Str(traits.Undefined, output=False, desc='input FOM')) controller.add_trait( 'output_fom', traits.Str(traits.Undefined, output=False, desc='output FOM')) controller.add_trait( 'shared_fom', traits.Str(traits.Undefined, output=False, desc='shared data FOM')) controller.add_trait( 'volumes_format', traits.Str(traits.Undefined, output=False, desc='Format used for volumes')) controller.add_trait( 'meshes_format', traits.Str(traits.Undefined, output=False, desc='Format used for meshes')) controller.add_trait( 'auto_fom', traits.Bool( True, output=False, desc='Look in all FOMs when a process is not found (in ' 'addition to the standard share/foms). Note that auto_fom ' 'looks for the first FOM matching the process to get ' 'completion for, and does not handle ambiguities. Moreover ' 'it brings an overhead (typically 6-7 seconds) the first ' 'time it is used since it has to parse all available FOMs.')) controller.add_trait( 'fom_path', traits.List( traits.Directory(output=False), desc='list of additional directories where to look for FOMs')) # FIXME: until directories are included in another config module controller.add_trait( 'input_directory', traits.Directory(traits.Undefined, output=False, desc='input study data directory')) controller.add_trait( 'output_directory', traits.Directory(traits.Undefined, output=False, desc='output study data directory')) conf = engine.settings.select_configurations(environment, {'fom': 'any'}) if conf: fconf = conf.get('capsul.engine.module.fom', {}) controller.input_fom = fconf.get('input_fom', traits.Undefined) controller.output_fom = fconf.get('output_fom', traits.Undefined) controller.shared_fom = fconf.get('shared_fom', traits.Undefined) controller.volumes_format = fconf.get('volumes_format', traits.Undefined) controller.meshes_format = fconf.get('meshes_format', traits.Undefined) controller.auto_fom = fconf.get('auto_fom', traits.Undefined) controller.fom_path = fconf.get('fom_path', traits.Undefined) controller.input_directory = fconf.get('input_directory', traits.Undefined) controller.output_directory = fconf.get('output_directory', traits.Undefined) widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
class ImageReferenceDialog(traits.HasTraits): referenceName = traits.String(desc="name of reference, should be short as it is used in filenames etc. date will be added automatically") referenceDescription = traits.String(desc="text block to describe reference image in detail") saveToOneNote = traits.Bool(True, desc= "if True, then when user clicks save it will attempt to write details to a page in OneNote") logReferenceDirectory = traits.Directory(os.path.join("\\\ursa", "AQOGroupFolder","Experiment Humphry","Data","eagleReferences"), desc="directory into which references are saved") referenceFolder = None currentImageArray = None currentPixmap = None saveButton = traits.Button("Save") notebookName = traits.String("Humphry's Notebook") sectionName = traits.String("Eagle References") mainGroup = traitsui.Group(traitsui.Item("referenceName", label = "Reference Name"), traitsui.Item("referenceDescription",label="referenceDescription", style="custom"), traitsui.Item("saveToOneNote", label="Save to OneNote?"), traitsui.Item("saveButton")) traits_view = traitsui.View(mainGroup, title="Save Image Reference", resizable = True, handler =ImageReferenceDialogHandler() ) def __init__(self,currentImageArray,currentPixmap,sequenceXML, extraDetails={}, **traitsDict): """construct an ImageReferenceDialog object """ super(ImageReferenceDialog, self).__init__(**traitsDict) self.referenceName = "" self.referenceFolder = None self.currentImageArray = currentImageArray self.currentPixmap = currentPixmap self.sequenceXML = sequenceXML self.extraDetails=extraDetails self.referenceDescription = self.generateExtraDetailsText(self.extraDetails) def createReferenceDirectory(self): """creates a new reference folder. does nothing if reference name is not defined or if name already exists""" if self.referenceName=="": logger.warning("no log file defined. Will not log") return #generate folders if they don't exist todayString = time.strftime("%Y-%m-%d ",time.gmtime(time.time())) self.referenceFolder = os.path.join(self.logReferenceDirectory,todayString+self.referenceName) if not os.path.isdir(self.referenceFolder): logger.info("creating a new reference folder %s" % self.referenceFolder) os.mkdir(self.referenceFolder) def generateExtraDetailsText(self,extraDetails): """produced a formatted block of text from dictionary """ preamble = "\nAutomated Extra Details:\n\n" stringList = [str(key)+" --> "+str(value) for key,value in extraDetails.iteritems() ] return preamble+"\n".join(stringList) def saveReferenceTextToFile(self): if self.referenceFolder is None or self.referenceName == "": logger.warning("cannot save reference to textfile as no reference Folder is defined") return textFile = os.path.join(self.referenceFolder,"comments.txt") with open(textFile, "a+") as commentsFile: commentsFile.write(self.referenceDescription) return textFile def createOneNotePage(self,paths): """use OneNote module to make a pretty one Note page """ import oneNotePython import oneNotePython.eagleReferencesOneNote #get name of onenote page to be created todayString = time.strftime("%Y-%m-%d ",time.gmtime(time.time())) referenceName = todayString+self.referenceName #create page eagleRefOneNote = oneNotePython.eagleReferencesOneNote.EagleReferenceOneNote(notebookName = self.notebookName, sectionName = self.sectionName) eagleRefOneNote.createNewEagleReferencePage(referenceName) #add description text eagleRefOneNote.setOutline("description", self.referenceDescription, rewrite=False) #add images and links eagleRefOneNote.addRawImage(paths["rawImage"], self.currentImageArray.shape, rewrite = False)# TODO or is shape in array in the wrong order?????! eagleRefOneNote.addScreenshot(paths["screenshot"], (self.currentPixmap.width(), self.currentPixmap.height()), rewrite=False) eagleRefOneNote.setDataOutline(referenceName,self.referenceFolder, paths["rawImage"],paths["screenshot"],paths["sequenceName"],paths["comments"], rewrite=False) #write data to onenote eagleRefOneNote.currentPage.rewritePage() #now to get resizing done well we want to completely repull the XML and data #brute force method: eagleRefOneNote = oneNotePython.eagleReferencesOneNote.EagleReferenceOneNote(notebookName = self.notebookName, sectionName = self.sectionName) page = eagleRefOneNote.setPage(referenceName)#this sets current page of eagleOneNote eagleRefOneNote.organiseOutlineSizes() def saveImageAndScreenshot(self): """pixmap of eagle screen and name of image file are passed to ImageReferenceFDialog on creation. This function saves them returns imageName and screenshotName """ imageName = os.path.join(self.referenceFolder, self.referenceName+" raw image.png") scipy.misc.imsave(imageName, self.currentImageArray) screenshotName = os.path.join(self.referenceFolder, self.referenceName+" screenshot.png") self.currentPixmap.save(screenshotName,"png") return imageName, screenshotName def saveXML(self): """save a copy of the XML to the reference folder""" if self.sequenceXML is None: logger.warning("sequence XML was none. Not saving sequence") return "" todayString = time.strftime("%Y-%m-%d ",time.gmtime(time.time())) referenceName = todayString+self.referenceName sequenceName = os.path.join(self.referenceFolder, referenceName+".ctr") sequenceNameBackup = os.path.join(self.referenceFolder, referenceName+"-BACKUP of Original.xml") self.sequenceXML.write(sequenceName) self.sequenceXML.write(sequenceNameBackup) return sequenceName def saveReference(self): """main function called by the imageReferenceDialog that performs the save""" self.createReferenceDirectory() commentsName = self.saveReferenceTextToFile() imageName, screenshotName = self.saveImageAndScreenshot() sequenceName = self.saveXML() if self.saveToOneNote: paths = {"rawImage":imageName, "screenshot":screenshotName, "comments":commentsName,"sequenceName":sequenceName} self.createOneNotePage(paths)
class ImagePlotInspector(traits.HasTraits): #Traits view definitions: settingsGroup = traitsui.VGroup( traitsui.VGroup( traitsui.Item("watchFolderBool", label="Watch Folder?"), traitsui.HGroup(traitsui.Item("selectedFile", label="Select a File"), visible_when="not watchFolderBool"), traitsui.HGroup(traitsui.Item("watchFolder", label="Select a Directory"), visible_when="watchFolderBool"), traitsui.HGroup(traitsui.Item("searchString", label="Filename sub-string"), visible_when="watchFolderBool"), label="File Settings", show_border=True), traitsui.VGroup(traitsui.HGroup('autoRangeColor', 'colorMapRangeLow', 'colorMapRangeHigh'), traitsui.HGroup('horizontalAutoRange', 'horizontalLowerLimit', 'horizontalUpperLimit'), traitsui.HGroup('verticalAutoRange', 'verticalLowerLimit', 'verticalUpperLimit'), label="axis limits", show_border=True), traitsui.VGroup(traitsui.HGroup('object.model.scale', 'object.model.offset'), traitsui.HGroup( traitsui.Item('object.model.pixelsX', label="Pixels X"), traitsui.Item('object.model.pixelsY', label="Pixels Y")), traitsui.HGroup( traitsui.Item('object.model.ODCorrectionBool', label="Correct OD?"), traitsui.Item('object.model.ODSaturationValue', label="OD saturation value")), traitsui.HGroup( traitsui.Item('contourLevels', label="Contour Levels"), traitsui.Item('colormap', label="Colour Map")), traitsui.HGroup( traitsui.Item('fixAspectRatioBool', label="Fix Plot Aspect Ratio?")), traitsui.HGroup( traitsui.Item('updatePhysicsBool', label="Update Physics with XML?")), traitsui.HGroup( traitsui.Item("cameraModel", label="Update Camera Settings to:")), label="advanced", show_border=True), label="settings") plotGroup = traitsui.Group( traitsui.Item('container', editor=ComponentEditor(size=(800, 600)), show_label=False)) fitsGroup = traitsui.Group(traitsui.Item('fitList', style="custom", editor=traitsui.ListEditor( use_notebook=True, selected="selectedFit", deletable=False, export='DockWindowShell', page_name=".name"), label="Fits", show_label=False), springy=True) mainPlotGroup = traitsui.HSplit(plotGroup, fitsGroup, label="Image") fftGroup = traitsui.Group(label="Fourier Transform") physicsGroup = traitsui.Group(traitsui.Item( "physics", editor=traitsui.InstanceEditor(), style="custom", show_label=False), label="Physics") logFilePlotGroup = traitsui.Group(traitsui.Item( "logFilePlotObject", editor=traitsui.InstanceEditor(), style="custom", show_label=False), label="Log File Plotter") eagleMenubar = traitsmenu.MenuBar( traitsmenu.Menu( traitsui.Action(name='Save Image Copy As...', action='_save_image_as'), traitsui.Action(name='Save Image Copy', action='_save_image_default'), name="File", )) traits_view = traitsui.View(settingsGroup, mainPlotGroup, physicsGroup, logFilePlotGroup, buttons=traitsmenu.NoButtons, menubar=eagleMenubar, handler=EagleHandler, title="Experiment Eagle", statusbar="selectedFile", icon=pyface.image_resource.ImageResource( os.path.join('icons', 'eagles.ico')), resizable=True) model = CameraImage() physics = physicsProperties.physicsProperties.PhysicsProperties( ) #create a physics properties object logFilePlotObject = logFilePlot.LogFilePlot() fitList = model.fitList selectedFit = traits.Instance(fits.Fit) drawFitRequest = traits.Event drawFitBool = traits.Bool(False) # true when drawing a fit as well selectedFile = traits.File() watchFolderBool = traits.Bool(False) watchFolder = traits.Directory() searchString = traits.String( desc= "sub string that must be contained in file name for it to be shown in Eagle. Can be used to allow different instances of Eagle to detect different saved images." ) oldFiles = set() contourLevels = traits.Int(15) colormap = traits.Enum(colormaps.color_map_name_dict.keys()) autoRangeColor = traits.Bool(True) colorMapRangeLow = traits.Float colorMapRangeHigh = traits.Float horizontalAutoRange = traits.Bool(True) horizontalLowerLimit = traits.Float horizontalUpperLimit = traits.Float verticalAutoRange = traits.Bool(True) verticalLowerLimit = traits.Float verticalUpperLimit = traits.Float fixAspectRatioBool = traits.Bool(False) updatePhysicsBool = traits.Bool(True) cameraModel = traits.Enum("Custom", "ALTA0", "ANDOR0", "ALTA1", "ANDOR1") #--------------------------------------------------------------------------- # Private Traits #--------------------------------------------------------------------------- _image_index = traits.Instance(chaco.GridDataSource) _image_value = traits.Instance(chaco.ImageData) _cmap = traits.Trait(colormaps.jet, traits.Callable) #--------------------------------------------------------------------------- # Public View interface #--------------------------------------------------------------------------- def __init__(self, *args, **kwargs): super(ImagePlotInspector, self).__init__(*args, **kwargs) #self.update(self.model) self.create_plot() for fit in self.fitList: fit.imageInspectorReference = self fit.physics = self.physics self.selectedFit = self.fitList[0] self.logFilePlotObject.physicsReference = self.physics #self._selectedFile_changed() logger.info("initialisation of experiment Eagle complete") def create_plot(self): # Create the mapper, etc self._image_index = chaco.GridDataSource(scipy.array([]), scipy.array([]), sort_order=("ascending", "ascending")) image_index_range = chaco.DataRange2D(self._image_index) self._image_index.on_trait_change(self._metadata_changed, "metadata_changed") self._image_value = chaco.ImageData(data=scipy.array([]), value_depth=1) image_value_range = chaco.DataRange1D(self._image_value) # Create the contour plots #self.polyplot = ContourPolyPlot(index=self._image_index, self.polyplot = chaco.CMapImagePlot( index=self._image_index, value=self._image_value, index_mapper=chaco.GridMapper(range=image_index_range), color_mapper=self._cmap(image_value_range)) # Add a left axis to the plot left = chaco.PlotAxis(orientation='left', title="y", mapper=self.polyplot.index_mapper._ymapper, component=self.polyplot) self.polyplot.overlays.append(left) # Add a bottom axis to the plot bottom = chaco.PlotAxis(orientation='bottom', title="x", mapper=self.polyplot.index_mapper._xmapper, component=self.polyplot) self.polyplot.overlays.append(bottom) # Add some tools to the plot self.polyplot.tools.append( tools.PanTool(self.polyplot, constrain_key="shift", drag_button="middle")) self.polyplot.overlays.append( tools.ZoomTool(component=self.polyplot, tool_mode="box", always_on=False)) self.lineInspectorX = clickableLineInspector.ClickableLineInspector( component=self.polyplot, axis='index_x', inspect_mode="indexed", write_metadata=True, is_listener=False, color="white") self.lineInspectorY = clickableLineInspector.ClickableLineInspector( component=self.polyplot, axis='index_y', inspect_mode="indexed", write_metadata=True, color="white", is_listener=False) self.polyplot.overlays.append(self.lineInspectorX) self.polyplot.overlays.append(self.lineInspectorY) self.boxSelection2D = boxSelection2D.BoxSelection2D( component=self.polyplot) self.polyplot.overlays.append(self.boxSelection2D) # Add these two plots to one container self.centralContainer = chaco.OverlayPlotContainer(padding=0, use_backbuffer=True, unified_draw=True) self.centralContainer.add(self.polyplot) # Create a colorbar cbar_index_mapper = chaco.LinearMapper(range=image_value_range) self.colorbar = chaco.ColorBar( index_mapper=cbar_index_mapper, plot=self.polyplot, padding_top=self.polyplot.padding_top, padding_bottom=self.polyplot.padding_bottom, padding_right=40, resizable='v', width=30) self.plotData = chaco.ArrayPlotData( line_indexHorizontal=scipy.array([]), line_valueHorizontal=scipy.array([]), scatter_indexHorizontal=scipy.array([]), scatter_valueHorizontal=scipy.array([]), scatter_colorHorizontal=scipy.array([]), fitLine_indexHorizontal=scipy.array([]), fitLine_valueHorizontal=scipy.array([])) self.crossPlotHorizontal = chaco.Plot(self.plotData, resizable="h") self.crossPlotHorizontal.height = 100 self.crossPlotHorizontal.padding = 20 self.crossPlotHorizontal.plot( ("line_indexHorizontal", "line_valueHorizontal"), line_style="dot") self.crossPlotHorizontal.plot( ("scatter_indexHorizontal", "scatter_valueHorizontal", "scatter_colorHorizontal"), type="cmap_scatter", name="dot", color_mapper=self._cmap(image_value_range), marker="circle", marker_size=4) self.crossPlotHorizontal.index_range = self.polyplot.index_range.x_range self.plotData.set_data("line_indexVertical", scipy.array([])) self.plotData.set_data("line_valueVertical", scipy.array([])) self.plotData.set_data("scatter_indexVertical", scipy.array([])) self.plotData.set_data("scatter_valueVertical", scipy.array([])) self.plotData.set_data("scatter_colorVertical", scipy.array([])) self.plotData.set_data("fitLine_indexVertical", scipy.array([])) self.plotData.set_data("fitLine_valueVertical", scipy.array([])) self.crossPlotVertical = chaco.Plot(self.plotData, width=140, orientation="v", resizable="v", padding=20, padding_bottom=160) self.crossPlotVertical.plot( ("line_indexVertical", "line_valueVertical"), line_style="dot") self.crossPlotVertical.plot( ("scatter_indexVertical", "scatter_valueVertical", "scatter_colorVertical"), type="cmap_scatter", name="dot", color_mapper=self._cmap(image_value_range), marker="circle", marker_size=4) self.crossPlotVertical.index_range = self.polyplot.index_range.y_range # Create a container and add components self.container = chaco.HPlotContainer(padding=40, fill_padding=True, bgcolor="white", use_backbuffer=False) inner_cont = chaco.VPlotContainer(padding=40, use_backbuffer=True) inner_cont.add(self.crossPlotHorizontal) inner_cont.add(self.centralContainer) self.container.add(self.colorbar) self.container.add(inner_cont) self.container.add(self.crossPlotVertical) def initialiseFitPlot(self): """called if this is the first Fit Plot to be drawn """ xstep = 1.0 ystep = 1.0 self.contourXS = scipy.linspace(xstep / 2., self.model.pixelsX - xstep / 2., self.model.pixelsX - 1) self.contourYS = scipy.linspace(ystep / 2., self.model.pixelsY - ystep / 2., self.model.pixelsY - 1) logger.debug("contour initialise fit debug. xs shape %s" % self.contourXS.shape) logger.debug("contour initialise xs= %s" % self.contourXS) self._fit_value = chaco.ImageData(data=scipy.array([]), value_depth=1) self.lineplot = chaco.ContourLinePlot( index=self._image_index, value=self._fit_value, index_mapper=chaco.GridMapper( range=self.polyplot.index_mapper.range), levels=self.contourLevels) self.centralContainer.add(self.lineplot) self.plotData.set_data("fitLine_indexHorizontal", self.model.xs) self.plotData.set_data("fitLine_indexVertical", self.model.ys) self.crossPlotVertical.plot( ("fitLine_indexVertical", "fitLine_valueVertical"), type="line", name="fitVertical") self.crossPlotHorizontal.plot( ("fitLine_indexHorizontal", "fitLine_valueHorizontal"), type="line", name="fitHorizontal") logger.debug("initialise fit plot %s " % self.crossPlotVertical.plots) def addFitPlot(self, fit): """add a contour plot on top using fitted data and add additional plots to sidebars (TODO) """ logger.debug("adding fit plot with fit %s " % fit) if not fit.fitted: logger.error( "cannot add a fitted plot for unfitted data. Run fit first") return if not self.drawFitBool: logger.info("first fit plot so initialising contour plot") self.initialiseFitPlot() logger.info("attempting to set fit data") self.contourPositions = [ scipy.tile(self.contourXS, len(self.contourYS)), scipy.repeat(self.contourYS, len(self.contourXS)) ] #for creating data necessary for gauss2D function zsravelled = fit.fitFunc(self.contourPositions, *fit._getCalculatedValues()) # logger.debug("zs ravelled shape %s " % zsravelled.shape) self.contourZS = zsravelled.reshape( (len(self.contourYS), len(self.contourXS))) # logger.debug("zs contour shape %s " % self.contourZS.shape) # logger.info("shape contour = %s " % self.contourZS) self._fit_value.data = self.contourZS self.container.invalidate_draw() self.container.request_redraw() self.drawFitBool = True def update(self, model): logger.info("updating plot") # if self.selectedFile=="": # logger.warning("selected file was empty. Will not attempt to update plot.") # return if self.autoRangeColor: self.colorbar.index_mapper.range.low = model.minZ self.colorbar.index_mapper.range.high = model.maxZ self._image_index.set_data(model.xs, model.ys) self._image_value.data = model.zs self.plotData.set_data("line_indexHorizontal", model.xs) self.plotData.set_data("line_indexVertical", model.ys) if self.drawFitBool: self.plotData.set_data("fitLine_indexHorizontal", self.contourXS) self.plotData.set_data("fitLine_indexVertical", self.contourYS) self.updatePlotLimits() self._image_index.metadata_changed = True self.container.invalidate_draw() self.container.request_redraw() #--------------------------------------------------------------------------- # Event handlers #--------------------------------------------------------------------------- def _metadata_changed(self, old, new): """ This function takes out a cross section from the image data, based on the line inspector selections, and updates the line and scatter plots.""" if self.horizontalAutoRange: self.crossPlotHorizontal.value_range.low = self.model.minZ self.crossPlotHorizontal.value_range.high = self.model.maxZ if self.verticalAutoRange: self.crossPlotVertical.value_range.low = self.model.minZ self.crossPlotVertical.value_range.high = self.model.maxZ if self._image_index.metadata.has_key("selections"): selections = self._image_index.metadata["selections"] if not selections: #selections is empty list return #don't need to do update lines as no mouse over screen. This happens at beginning of script x_ndx, y_ndx = selections if y_ndx and x_ndx: self.plotData.set_data("line_valueHorizontal", self._image_value.data[y_ndx, :]) self.plotData.set_data("line_valueVertical", self._image_value.data[:, x_ndx]) xdata, ydata = self._image_index.get_data() xdata, ydata = xdata.get_data(), ydata.get_data() self.plotData.set_data("scatter_indexHorizontal", scipy.array([xdata[x_ndx]])) self.plotData.set_data("scatter_indexVertical", scipy.array([ydata[y_ndx]])) self.plotData.set_data( "scatter_valueHorizontal", scipy.array([self._image_value.data[y_ndx, x_ndx]])) self.plotData.set_data( "scatter_valueVertical", scipy.array([self._image_value.data[y_ndx, x_ndx]])) self.plotData.set_data( "scatter_colorHorizontal", scipy.array([self._image_value.data[y_ndx, x_ndx]])) self.plotData.set_data( "scatter_colorVertical", scipy.array([self._image_value.data[y_ndx, x_ndx]])) if self.drawFitBool: self.plotData.set_data("fitLine_valueHorizontal", self._fit_value.data[y_ndx, :]) self.plotData.set_data("fitLine_valueVertical", self._fit_value.data[:, x_ndx]) else: self.plotData.set_data("scatter_valueHorizontal", scipy.array([])) self.plotData.set_data("scatter_valueVertical", scipy.array([])) self.plotData.set_data("line_valueHorizontal", scipy.array([])) self.plotData.set_data("line_valueVertical", scipy.array([])) self.plotData.set_data("fitLine_valueHorizontal", scipy.array([])) self.plotData.set_data("fitLine_valueVertical", scipy.array([])) def _colormap_changed(self): self._cmap = colormaps.color_map_name_dict[self.colormap] if hasattr(self, "polyplot"): value_range = self.polyplot.color_mapper.range self.polyplot.color_mapper = self._cmap(value_range) value_range = self.crossPlotHorizontal.color_mapper.range self.crossPlotHorizontal.color_mapper = self._cmap(value_range) # FIXME: change when we decide how best to update plots using # the shared colormap in plot object self.crossPlotHorizontal.plots["dot"][0].color_mapper = self._cmap( value_range) self.crossPlotVertical.plots["dot"][0].color_mapper = self._cmap( value_range) self.container.request_redraw() def _colorMapRangeLow_changed(self): self.colorbar.index_mapper.range.low = self.colorMapRangeLow def _colorMapRangeHigh_changed(self): self.colorbar.index_mapper.range.high = self.colorMapRangeHigh def _horizontalLowerLimit_changed(self): self.crossPlotHorizontal.value_range.low = self.horizontalLowerLimit def _horizontalUpperLimit_changed(self): self.crossPlotHorizontal.value_range.high = self.horizontalUpperLimit def _verticalLowerLimit_changed(self): self.crossPlotVertical.value_range.low = self.verticalLowerLimit def _verticalUpperLimit_changed(self): self.crossPlotVertical.value_range.high = self.verticalUpperLimit def _autoRange_changed(self): if self.autoRange: self.colorbar.index_mapper.range.low = self.minz self.colorbar.index_mapper.range.high = self.maxz def _num_levels_changed(self): if self.num_levels > 3: self.polyplot.levels = self.num_levels self.lineplot.levels = self.num_levels def _colorMapRangeLow_default(self): logger.debug("setting color map rangle low default") return self.model.minZ def _colorMapRangeHigh_default(self): return self.model.maxZ def _horizontalLowerLimit_default(self): return self.model.minZ def _horizontalUpperLimit_default(self): return self.model.maxZ def _verticalLowerLimit_default(self): return self.model.minZ def _verticalUpperLimit_default(self): return self.model.maxZ def _selectedFit_changed(self, selected): logger.debug("selected fit was changed") def _fixAspectRatioBool_changed(self): if self.fixAspectRatioBool: #using zoom range works but then when you reset zoom this function isn't called... # rangeObject = self.polyplot.index_mapper.range # xrangeValue = rangeObject.high[0]-rangeObject.low[0] # yrangeValue = rangeObject.high[1]-rangeObject.low[1] # logger.info("xrange = %s, yrange = %s " % (xrangeValue, yrangeValue)) # aspectRatioSquare = (xrangeValue)/(yrangeValue) # self.polyplot.aspect_ratio=aspectRatioSquare self.centralContainer.aspect_ratio = float( self.model.pixelsX) / float(self.model.pixelsY) #self.polyplot.aspect_ratio = self.model.pixelsX/self.model.pixelsY else: self.centralContainer.aspect_ratio = None #self.polyplot.aspect_ratio = None self.container.request_redraw() self.centralContainer.request_redraw() def updatePlotLimits(self): """just updates the values in the GUI """ if self.autoRangeColor: self.colorMapRangeLow = self.model.minZ self.colorMapRangeHigh = self.model.maxZ if self.horizontalAutoRange: self.horizontalLowerLimit = self.model.minZ self.horizontalUpperLimit = self.model.maxZ if self.verticalAutoRange: self.verticalLowerLimit = self.model.minZ self.verticalUpperLimit = self.model.maxZ def _selectedFile_changed(self): self.model.getImageData(self.selectedFile) if self.updatePhysicsBool: self.physics.updatePhysics() for fit in self.fitList: fit.fitted = False fit.fittingStatus = fit.notFittedForCurrentStatus if fit.autoFitBool: #we should automatically start fitting for this Fit fit._fit_routine( ) #starts a thread to perform the fit. auto guess and auto draw will be handled automatically self.update_view() #update log file plot if autorefresh is selected if self.logFilePlotObject.autoRefresh: try: self.logFilePlotObject.refreshPlot() except Exception as e: logger.error("failed to update log plot - %s...." % e.message) def _cameraModel_changed(self): """camera model enum can be used as a helper. It just sets all the relevant editable parameters to the correct values. e.g. pixels size, etc. cameras: "Andor Ixon 3838", "Apogee ALTA" """ logger.info("camera model changed") if self.cameraModel == "ANDOR0": self.model.pixelsX = 512 self.model.pixelsY = 512 self.physics.pixelSize = 16.0 self.physics.magnification = 2.0 self.searchString = "ANDOR0" elif self.cameraModel == "ALTA0": self.model.pixelsX = 768 self.model.pixelsY = 512 self.physics.pixelSize = 9.0 self.physics.magnification = 0.5 self.searchString = "ALTA0" elif self.cameraModel == "ALTA1": self.model.pixelsX = 768 self.model.pixelsY = 512 self.physics.pixelSize = 9.0 self.physics.magnification = 4.25 self.searchString = "ALTA1" elif self.cameraModel == "ANDOR1": self.model.pixelsX = 512 self.model.pixelsY = 512 self.physics.pixelSize = 16.0 self.physics.magnification = 2.0 self.searchString = "ANDOR1" else: logger.error("unrecognised camera model") self.refreshFitReferences() self.model.getImageData(self.selectedFile) def refreshFitReferences(self): """When aspects of the image change so that the fits need to have properties updated, it should be done by this function""" for fit in self.fitList: fit.endX = self.model.pixelsX fit.endY = self.model.pixelsY def _pixelsX_changed(self): """If pixelsX or pixelsY change, we must send the new arrays to the fit functions """ logger.info("pixels X Change detected") self.refreshFitReferences() self.update(self.model) self.model.getImageData(self.selectedFile) def _pixelsY_changed(self): """If pixelsX or pixelsY change, we must send the new arrays to the fit functions """ logger.info("pixels Y Change detected") self.refreshFitReferences() self.update(self.model) self.model.getImageData(self.selectedFile) @traits.on_trait_change('model') def update_view(self): if self.model is not None: self.update(self.model) def _save_image(self, originalFilePath, newFilePath): """given the original file path this saves a new copy to new File path """ shutil.copy2(originalFilePath, newFilePath) def _save_image_as(self): """ opens a save as dialog and allows user to save a copy of current image to a custom location with a custom name""" originalFilePath = str( self.selectedFile ) #so that this can't be affected by auto update after the dialog is open file_wildcard = str("PNG (*.png)|All files|*") default_directory = os.path.join("\\\\ursa", "AQOGroupFolder", "Experiment Humphry", "Data", "savedEagleImages") dialog = FileDialog(action="save as", default_directory=default_directory, wildcard=file_wildcard) dialog.open() if dialog.return_code == OK: self._save_image(originalFilePath, dialog.path) logger.debug("custom image copy made") def _save_image_default(self): head, tail = os.path.split(self.selectedFile) default_file = os.path.join("\\\\ursa", "AQOGroupFolder", "Experiment Humphry", "Data", "savedEagleImages", tail) self._save_image(self.selectedFile, default_file) logger.debug("default image copy made")
class PipelineConfiguration(traits.HasTraits): # project settings project_dir = traits.Directory( exists=False, desc="data path to where the project is stored") # project metadata (for connectome file) project_metadata = traits.Dict( desc="project metadata to be stored in the connectome file") # DEPRECATED: this field is deprecated after version >1.0.2 generator = traits.Str() # parcellation scheme parcellation_scheme = traits.Enum("NativeFreesurfer", ["Lausanne2008", "NativeFreesurfer"], desc="used parcellation scheme") # choose between 'L' (linear) and 'N' (non-linear) and 'B' (bbregister) registration_mode = traits.Enum( "Linear", ["Linear", "Nonlinear", "BBregister"], desc="registration mode: linear or non-linear or bbregister") diffusion_imaging_model = traits.Enum("DSI", ["DSI", "DTI", "QBALL"]) # DSI nr_of_gradient_directions = traits.Str('515') nr_of_sampling_directions = traits.Str('181') odf_recon_param = traits.Str('-b0 1 -dsi -p 4 -sn 0') hardi_recon_param = traits.Str('-b0 1 -p 3 -sn 0') # DTI gradient_table_file = traits.File(exists=False) gradient_table = traits.Enum('siemens_64', [ 'custom', 'mgh_dti_006', 'mgh_dti_018', 'mgh_dti_030', 'mgh_dti_042', 'mgh_dti_060', 'mgh_dti_072', 'mgh_dti_090', 'mgh_dti_120', 'mgh_dti_144', 'siemens_06', 'siemens_12', 'siemens_20', 'siemens_256', 'siemens_30', 'siemens_64' ]) nr_of_b0 = traits.Str('1') max_b0_val = traits.Str('1000') dti_recon_param = traits.Str('') dtb_dtk2dir_param = traits.Str('') # tractography streamline_param = traits.Str('--angle 60 --seeds 32') # registration lin_reg_param = traits.Str('-usesqform -nosearch -dof 6 -cost mutualinfo') nlin_reg_bet_T2_param = traits.Str('-f 0.35 -g 0.15') nlin_reg_bet_b0_param = traits.Str('-f 0.2 -g 0.2') nlin_reg_fnirt_param = traits.Str( '--subsamp=8,4,2,2 --miter==5,5,5,5 --lambda=240,120,90,30 --splineorder=3 --applyinmask=0,0,1,1 --applyrefmask=0,0,1,1' ) bb_reg_param = traits.Str('--init-header --dti') # dicom converter do_convert_diffusion = traits.Bool(True) do_convert_T1 = traits.Bool(True) do_convert_T2 = traits.Bool(False) do_convert_fMRI = traits.Bool(False) # rsfmri # choose between 'L' (linear) and 'B' (bbregister) rsfmri_registration_mode = traits.Enum( "Linear", ["Linear", "BBregister"], desc="registration mode: linear or bbregister") rsfmri_lin_reg_param = traits.Str( '-usesqform -nosearch -dof 6 -cost mutualinfo') rsfmri_bb_reg_param = traits.Str('--init-header --dti') do_save_mat = traits.Bool(True) # rsfmri PREPROCESSING STEPS rsfmri_slice_timing = traits.Enum("none", [ "none", "bottom-top interleaved", "top-bottom interleaved", "bottom-top", "top-bottom" ], desc="time slicing mode") rsfmri_smoothing = traits.Str('0') rsfmri_discard = traits.Str('5') rsfmri_nuisance_global = traits.Bool(False) rsfmri_nuisance_WM = traits.Bool(True) rsfmri_nuisance_CSF = traits.Bool(True) rsfmri_nuisance_motion = traits.Bool(True) rsfmri_detrending = traits.Bool(True) rsfmri_lowpass = traits.Str('1') rsfmri_scrubbing_parameters = traits.Bool(True) rsfmri_scrubbing_apply = traits.Bool(True) rsfmri_scrubbing_FD = traits.Str('0.5') rsfmri_scrubbing_DVARS = traits.Str('5') # DEPRECATED: subject_raw_glob_diffusion = traits.Str("*.*") subject_raw_glob_T1 = traits.Str("*.*") subject_raw_glob_T2 = traits.Str("*.*") extract_diffusion_metadata = traits.Bool(False) # subject subject_name = traits.Str() subject_timepoint = traits.Str() subject_workingdir = traits.Directory() subject_logger = None subject_metadata = [ KeyValue(key='description', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), ] active_createfolder = traits.Bool(True) active_dicomconverter = traits.Bool(False) active_registration = traits.Bool(False) active_segmentation = traits.Bool(False) active_parcellation = traits.Bool(False) active_applyregistration = traits.Bool(False) active_reconstruction = traits.Bool(False) active_tractography = traits.Bool(False) active_fiberfilter = traits.Bool(False) active_connectome = traits.Bool(False) active_statistics = traits.Bool(False) active_cffconverter = traits.Bool(False) active_rsfmri_registration = traits.Bool(False) active_rsfmri_preprocessing = traits.Bool(False) active_rsfmri_connectionmatrix = traits.Bool(False) skip_completed_stages = traits.Bool(False) # metadata creator = traits.Str() email = traits.Str() publisher = traits.Str() created = traits.Date() modified = traits.Date() license = traits.Str() # rights = traits.Str() reference = traits.Str() # relation = traits.Str() species = traits.Str('H**o sapiens') description = traits.Str() # segmentation recon_all_param = traits.Str('-all -no-isrunning') # parcellation custompar_nrroi = traits.Int() custompar_nodeinfo = traits.File() custompar_volumeparcell = traits.File() # fiber filtering apply_splinefilter = traits.Bool( True, desc='apply the spline filtering from diffusion toolkit') apply_fiberlength = traits.Bool(True, desc='apply cutoff to fiber lengths') fiber_cutoff_lower = traits.Float( 20.0, desc='cut fibers that are shorter in length than given length in mm') fiber_cutoff_upper = traits.Float( 500.0, desc='cut fibers that are longer in length than given length in mm') # measures connection_P0 = traits.Bool(False) connection_gfa = traits.Bool(False) connection_kurtosis = traits.Bool(False) connection_skewness = traits.Bool(False) connection_adc = traits.Bool(False) connection_fa = traits.Bool(False) # cff converter cff_fullnetworkpickle = traits.Bool( True, desc='stores the full network pickle generated by connectome creation') cff_cmatpickle = traits.Bool(True) cff_originalfibers = traits.Bool(True, desc='stores original fibers') cff_filteredfibers = traits.Bool(True, desc='stores filtered fibers') cff_finalfiberlabels = traits.Bool( True, desc='stores final fibers and their labelarrays') cff_fiberarr = traits.Bool(True) cff_rawdiffusion = traits.Bool(True) cff_scalars = traits.Bool(True) cff_rawT1 = traits.Bool(True) cff_rawT2 = traits.Bool(True) cff_roisegmentation = traits.Bool( True, desc='stores multi-resolution parcellation volumes') cff_surfaces = traits.Bool(True, desc='stores individually genertated surfaces') cff_surfacelabels = traits.Bool( True, desc='stores individually genertated surfaces') # do you want to do manual white matter mask correction? wm_handling = traits.Enum( 1, [1, 2, 3], desc="in what state should the freesurfer step be processed") # custom parcellation parcellation = traits.Dict( desc="provide the dictionary with your parcellation.") # start up fslview inspect_registration = traits.Bool( False, desc='start fslview to inspect the the registration results') fsloutputtype = traits.Enum('NIFTI', ['NIFTI']) # connectome creation compute_curvature = traits.Bool(False) # email notification, needs a local smtp server # sudo apt-get install postfix emailnotify = traits.ListStr( [], desc='the email address to send stage completion status message') freesurfer_home = traits.Directory(exists=False, desc="path to Freesurfer") fsl_home = traits.Directory(exists=False, desc="path to FSL") dtk_home = traits.Directory(exists=False, desc="path to diffusion toolkit") # This file stores descriptions of the inputs/outputs to each stage of the # CMP pipeline. It can be queried using the PipelineStatus python object pipeline_status_file = traits.Str("cmp.status") # Pipeline status object pipeline_status = pipeline_status.PipelineStatus() def _get_lausanne_parcellation(self, parcel="NativeFreesurfer"): if parcel == "Lausanne2008": return { 'scale33': { 'number_of_regions': 83, # contains name, url, color, freesurfer_label, etc. used for connection matrix 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution83'), 'resolution83.graphml'), # scalar node values on fsaverage? or atlas? 'surface_parcellation': None, # scalar node values in fsaverage volume? 'volume_parcellation': None, # the subdirectory name from where to copy parcellations, with hemispheric wildcard 'fs_label_subdir_name': 'regenerated_%s_36', # should we subtract the cortical rois for the white matter mask? 'subtract_from_wm_mask': 1, }, 'scale60': { 'number_of_regions': 129, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution150'), 'resolution150.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_60', 'subtract_from_wm_mask': 1, }, 'scale125': { 'number_of_regions': 234, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution258'), 'resolution258.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_125', 'subtract_from_wm_mask': 1, }, 'scale250': { 'number_of_regions': 463, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution500'), 'resolution500.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_250', 'subtract_from_wm_mask': 1, }, 'scale500': { 'number_of_regions': 1015, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution1015'), 'resolution1015.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_500', 'subtract_from_wm_mask': 1, }, } else: return { 'freesurferaparc': { 'number_of_regions': 83, # contains name, url, color, freesurfer_label, etc. used for connection matrix 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('freesurferaparc'), 'resolution83.graphml'), # scalar node values on fsaverage? or atlas? 'surface_parcellation': None, # scalar node values in fsaverage volume? 'volume_parcellation': None, } } def __init__(self, **kwargs): # NOTE: In python 2.6, object.__init__ no longer accepts input # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. super(PipelineConfiguration, self).__init__(**kwargs) # the default parcellation provided self.parcellation = self._get_lausanne_parcellation( parcel="NativeFreesurfer") self.can_use_dipy = dipy_here # no email notify self.emailnotify = [] # default gradient table for DTI self.gradient_table_file = self.get_cmp_gradient_table('siemens_64') # try to discover paths from environment variables try: self.freesurfer_home = op.join(os.environ['FREESURFER_HOME']) self.fsl_home = op.join(os.environ['FSLDIR']) self.dtk_home = os.environ['DTDIR'] self.dtk_matrices = op.join(self.dtk_home, 'matrices') except KeyError: pass self.fsloutputtype = 'NIFTI' os.environ['FSLOUTPUTTYPE'] = self.fsloutputtype os.environ['FSLOUTPUTTYPE'] = 'NIFTI' def consistency_check(self): """ Provides a checking facility for configuration objects """ # project name not empty if not op.exists(self.project_dir): msg = 'Your project directory does not exist!' raise Exception(msg) # check metadata if self.creator == '': raise Exception('You need to enter creator metadata!') if self.publisher == '': raise Exception('You need to enter publisher metadata!') if self.email == '': raise Exception('You need to enter email of a contact person!') # check if software paths exists pas = { 'configuration.freesurfer_home': self.freesurfer_home, 'configuration.fsl_home': self.fsl_home, 'configuration.dtk_home': self.dtk_home, 'configuration.dtk_matrices': self.dtk_matrices } for k, p in pas.items(): if not op.exists(p): msg = 'Required software path for %s does not exists: %s' % (k, p) raise Exception(msg) if self.subject_workingdir == '': msg = 'No working directory defined for subject' raise Exception(msg) # else: # wdir = self.get_subj_dir() # if not op.exists(wdir): # msg = 'Working directory %s does not exists for subject' % (wdir) # raise Exception(msg) # else: # wdiff = op.join(self.get_raw_diffusion()) # print wdiff # if not op.exists(wdiff): # msg = 'Diffusion MRI subdirectory %s does not exists for the subject' % wdiff # raise Exception(msg) # wt1 = op.join(self.get_rawt1()) # if not op.exists(wt1): # msg = 'Structural MRI subdirectory %s T1 does not exist in RAWDATA' % wt1 # raise Exception(msg) def get_cmp_home(self): """ Return the cmp home path """ return op.dirname(__file__) def get_rawdata(self): """ Return raw data path for the subject """ return op.join(self.get_subj_dir(), 'RAWDATA') def get_log(self): """ Get subject log dir """ return op.join(self.get_subj_dir(), 'LOG') def get_logname(self, suffix='.log'): """ Get a generic name for the log and pickle files """ a = dt.datetime.now() return 'pipeline-%s-%02i%02i-%s-%s%s' % ( a.date().isoformat(), a.time().hour, a.time().minute, self.subject_name, self.subject_timepoint, suffix) def get_logger(self): """ Get the logger instance created """ if self.subject_logger is None: # setup logger for the subject self.subject_logger = \ getLog(os.path.join(self.get_log(), self.get_logname())) return self.subject_logger else: return self.subject_logger def get_rawglob(self, modality): """ DEPRECATED: Get the file name endings for modality """ if modality == 'diffusion': if not self.subject_raw_glob_diffusion == '': return self.subject_raw_glob_diffusion else: raise Exception('No raw_glob_diffusion defined for subject') elif modality == 'T1': if not self.subject_raw_glob_T1 == '': return self.subject_raw_glob_T1 else: raise Exception('No raw_glob_T1 defined for subject') elif modality == 'T2': if not self.subject_raw_glob_T2 == '': return self.subject_raw_glob_T2 else: raise Exception('No raw_glob_T2 defined for subject') def get_dicomfiles(self, modality): """ Get a list of dicom files for the requested modality. Tries to discover them automatically """ from glob import glob if modality == 'diffusion': pat = self.get_raw_diffusion() elif modality == 'T1': pat = self.get_rawt1() elif modality == 'T2': pat = self.get_rawt2() elif modality == 'fMRI': pat = self.get_rawrsfmri() # discover files with *.* and * difiles = sorted(glob(op.join(pat, '*.*')) + glob(op.join(pat, '*'))) # exclude potential .nii and .nii.gz files difiles = [ e for e in difiles if not e.endswith('.nii') and not e.endswith('.nii.gz') ] # check if no files and throw exception if len(difiles) == 0: raise Exception('Could not find any DICOM files in folder %s' % pat) return difiles def get_rawrsfmri(self): """ Get raw functional MRI path for subject """ return op.join(self.get_rawdata(), 'fMRI') def get_rawt1(self): """ Get raw structural MRI T1 path for subject """ return op.join(self.get_rawdata(), 'T1') def get_rawt2(self): """ Get raw structural MRI T2 path for subject """ return op.join(self.get_rawdata(), 'T2') def get_subj_dir(self): return self.subject_workingdir def get_raw_diffusion(self): """ Get the raw diffusion path for subject """ if self.diffusion_imaging_model == 'DSI': return op.join(self.get_subj_dir(), 'RAWDATA', 'DSI') elif self.diffusion_imaging_model == 'DTI': return op.join(self.get_subj_dir(), 'RAWDATA', 'DTI') elif self.diffusion_imaging_model == 'QBALL': return op.join(self.get_subj_dir(), 'RAWDATA', 'QBALL') def get_fs(self): """ Returns the subject root folder path for freesurfer files """ return op.join(self.get_subj_dir(), 'FREESURFER') def get_stats(self): """ Return statistic output path """ return op.join(self.get_subj_dir(), 'STATS') def get_cffdir(self): """ Returns path to store connectome file """ return op.join(self.get_cmp(), 'cff') def get_nifti(self): """ Returns the subject root folder path for nifti files """ return op.join(self.get_subj_dir(), 'NIFTI') def get_nifti_trafo(self): """ Returns the path to the subjects transformation / registration matrices """ return op.join(self.get_nifti(), 'transformations') def get_nifti_bbregister(self): """ Returns the path to the subjects transformation / registration matrices, bbregister mode """ return op.join(self.get_nifti(), 'bbregister') def get_diffusion_metadata(self): """ Diffusion metadata, i.e. where gradient_table.txt is stored """ return op.join(self.get_nifti(), 'diffusion_metadata') def get_nifti_wm_correction(self): """ Returns the path to the subjects wm_correction path """ return op.join(self.get_nifti(), 'wm_correction') def get_cmp(self): return op.join(self.get_subj_dir(), 'CMP') def get_cmp_rawdiff(self, ): return op.join(self.get_cmp(), 'raw_diffusion') def get_cmp_rawdiff_reconout(self): """ Returns the output path for diffusion reconstruction without prefix""" if self.diffusion_imaging_model == 'DSI': return op.join(self.get_cmp(), 'raw_diffusion', 'odf_0') elif self.diffusion_imaging_model == 'DTI': return op.join(self.get_cmp(), 'raw_diffusion', 'dti_0') elif self.diffusion_imaging_model == 'QBALL': return op.join(self.get_cmp(), 'raw_diffusion', 'qball_0') def get_cmp_rawdiff_resampled(self): return op.join(self.get_cmp_rawdiff(), '2x2x2') def get_cmp_fsout(self): return op.join(self.get_cmp(), 'fs_output') def get_cmp_fibers(self): return op.join(self.get_cmp(), 'fibers') def get_cmp_scalars(self): return op.join(self.get_cmp(), 'scalars') def get_cmp_matrices(self): return op.join(self.get_cmp_fibers(), 'matrices') def get_cmp_fmri(self): return op.join(self.get_cmp(), 'fMRI') def get_cmp_fmri_preproc(self): return op.join(self.get_cmp_fmri(), 'preprocessing') def get_cmp_fmri_matrices(self): return op.join(self.get_cmp_fmri(), 'matrices') def get_cmp_fmri_timeseries(self): return op.join(self.get_cmp_fmri(), 'timeseries') def get_cmp_tracto_mask(self): return op.join(self.get_cmp_fsout(), 'HR') def get_cmp_tracto_mask_tob0(self): return op.join(self.get_cmp_fsout(), 'HR__registered-TO-b0') def get_custom_gradient_table(self): """ Returns the absolute path to the custom gradient table with optional b-values in the 4th row """ return self.gradient_table_file def get_cmp_gradient_table(self, name): """ Return default gradient tables shipped with CMP. These are mainly derived from Diffusion Toolkit """ cmp_path = op.dirname(__file__) return op.join(cmp_path, 'data', 'diffusion', 'gradient_tables', name + '.txt') def get_dtb_streamline_vecs_file(self, as_text=False): """ Returns the odf directions file used for DTB_streamline """ cmp_path = op.dirname(__file__) if as_text: return op.join(cmp_path, 'data', 'diffusion', 'odf_directions', '181_vecs.txt') else: return op.join(cmp_path, 'data', 'diffusion', 'odf_directions', '181_vecs.dat') # XXX def get_cmp_scalarfields(self): """ Returns a list with tuples with the scalar field name and the absolute path to its nifti file """ ret = [] if self.diffusion_imaging_model == 'DSI': # add gfa per default ret.append(('gfa', op.join(self.get_cmp_scalars(), 'dsi_gfa.nii.gz'))) # XXX: add adc per default elif self.diffusion_imaging_model == 'DTI': # nothing to add yet for DTI pass return ret def get_dtk_dsi_matrix(self): """ Returns the DSI matrix from Diffusion Toolkit The parameters have to be set in the configuration object with keys: 1. number of gradient directions : 'nr_of_gradient_directions' 2. number of sampling directions : 'nr_of_sampling_directions' Example ------- confobj.nr_of_gradient_directions = 515 confobj.nr_of_sampling_directions = 181 Returns matrix including absolute path to DSI_matrix_515x181.dat """ grad = self.nr_of_gradient_directions samp = self.nr_of_sampling_directions fpath = op.join(self.dtk_matrices, "DSI_matrix_%sx%s.dat" % (grad, samp)) if not op.exists(fpath): msg = "DSI matrix does not exists: %s" % fpath raise Exception(msg) return fpath def get_lausanne_atlas(self, name=None): """ Return the absolute path to the lausanne parcellation atlas for the resolution name """ cmp_path = op.dirname(__file__) provided_atlases = [ 'myatlas_36_rh.gcs', 'myatlasP1_16_rh.gcs', 'myatlasP17_28_rh.gcs', 'myatlasP29_36_rh.gcs', 'myatlas_60_rh.gcs', 'myatlas_125_rh.gcs', 'myatlas_250_rh.gcs', 'myatlas_36_lh.gcs', 'myatlasP1_16_lh.gcs', 'myatlasP17_28_lh.gcs', 'myatlasP29_36_lh.gcs', 'myatlas_60_lh.gcs', 'myatlas_125_lh.gcs', 'myatlas_250_lh.gcs' ] if name in provided_atlases: return op.join(cmp_path, 'data', 'colortable_and_gcs', 'my_atlas_gcs', name) else: msg = "Atlas %s does not exists" % name raise Exception(msg) def get_freeview_lut(self, name): """ Returns the Look-Up-Table as text file for a given parcellation scheme in a dictionary """ cmp_path = op.dirname(__file__) if name == "NativeFreesurfer": return { 'freesurferaparc': op.join(cmp_path, 'data', 'parcellation', 'nativefreesurfer', 'freesurferaparc', 'FreeSurferColorLUT_adapted.txt') } else: return "" def get_lausanne_parcellation_path(self, parcellationname): cmp_path = op.dirname(__file__) if self.parcellation_scheme == "Lausanne2008": allowed_default_parcel = [ 'resolution83', 'resolution150', 'resolution258', 'resolution500', 'resolution1015' ] if parcellationname in allowed_default_parcel: return op.join(cmp_path, 'data', 'parcellation', 'lausanne2008', parcellationname) else: msg = "Not a valid default parcellation name for the lausanne2008 parcellation scheme" raise Exception(msg) else: allowed_default_parcel = ['freesurferaparc'] if parcellationname in allowed_default_parcel: return op.join(cmp_path, 'data', 'parcellation', 'nativefreesurfer', parcellationname) else: msg = "Not a valid default parcellation name for the NativeFreesurfer parcellation scheme" raise Exception(msg) def get_cmp_binary_path(self): """ Returns the path to the binary files for the current platform and architecture """ if sys.platform == 'linux2': import platform as pf if '32' in pf.architecture()[0]: return op.join(op.dirname(__file__), "binary", "linux2", "bit32") elif '64' in pf.architecture()[0]: return op.join(op.dirname(__file__), "binary", "linux2", "bit64") else: raise ('No binary files compiled for your platform!') def get_pipeline_status_file(self): """Returns the absolute path of the pipeline status file""" return op.join(self.get_subj_dir(), self.pipeline_status_file) def init_pipeline_status(self): """Create the 'cmp.status'. The 'cmp.status' file contains information about the inputs/outputs of each pipeline stage""" status_file = op.join(self.get_subj_dir(), self.pipeline_status_file) self.pipeline_status.Pipeline.name = "cmp" self.pipeline_status.SaveToFile(status_file) def update_pipeline_status(self): """Update the pipeline status on disk with the current status in memory""" status_file = op.join(self.get_subj_dir(), self.pipeline_status_file) self.pipeline_status.SaveToFile(status_file)