class N4ITKInputSpec(CommandLineInputSpec): inputImageName = File(exists="True", argstr="--inputimage %s") maskImageName = File(exists="True", argstr="--maskimage %s") outputImageName = traits.Either(traits.Bool, File, argstr="--outputimage %s") outputBiasFieldName = traits.Either(traits.Bool, File, argstr="--outputbiasfield %s") Force2D = traits.Bool(argstr="--force2D ") numberOfIterations = traits.List("traits.Int", sep=",", argstr="--iterations %d") convergenceThreshold = traits.Float(argstr="--convergencethreshold %f") initialMeshResolution = traits.List("traits.Float", sep=",", argstr="--meshresolution %f") splineDistance = traits.Float(argstr="--splinedistance %f") shrinkFactor = traits.Int(argstr="--shrinkfactor %d") bsplineOrder = traits.Int(argstr="--bsplineorder %d") weightImageName = File(exists="True", argstr="--weightimage %s") alpha = traits.Float(argstr="--bsplinealpha %f") beta = traits.Float(argstr="--bsplinebeta %f") histogramSharpening = traits.List("traits.Float", sep=",", argstr="--histogramsharpening %f") biasFieldFullWidthAtHalfMaximum = traits.Float( argstr="--biasFieldFullWidthAtHalfMaximum %f") weinerFilterNoise = traits.Float(argstr="--weinerFilterNoise %f") numberOfHistogramBins = traits.Int(argstr="--numberOfHistogramBins %d")
class itkBinaryThresholdImageInputSpec(CommandLineInputSpec): inFilename = File(argstr='%s', desc="inFilename", exists=True, mandatory=True, position=0) fileMode = traits.Str(argstr='%s', desc="fileMode", exists=True, mandatory=True, position=1) min = traits.Int(argstr='%s', desc="min", exists=True, mandatory=True, position=2) max = traits.Int(argstr='%s', desc="max", exists=True, mandatory=True, position=3) outFilename = traits.Str(argstr='%s', desc="outFilename", exists=True, mandatory=True, position=4)
class gtractFiberTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( exists = "True",argstr = "--inputTensorVolume %s") inputAnisotropyVolume = File( exists = "True",argstr = "--inputAnisotropyVolume %s") inputStartingSeedsLabelMapVolume = File( exists = "True",argstr = "--inputStartingSeedsLabelMapVolume %s") startingSeedsLabel = traits.Int( argstr = "--startingSeedsLabel %d") inputEndingSeedsLabelMapVolume = File( exists = "True",argstr = "--inputEndingSeedsLabelMapVolume %s") endingSeedsLabel = traits.Int( argstr = "--endingSeedsLabel %d") inputTract = traits.Str( argstr = "--inputTract %s") outputTract = traits.Str( argstr = "--outputTract %s") writeXMLPolyDataFile = traits.Bool( argstr = "--writeXMLPolyDataFile ") trackingMethod = traits.Enum("Guided","Free","Streamline","GraphSearch", argstr = "--trackingMethod %s") guidedCurvatureThreshold = traits.Float( argstr = "--guidedCurvatureThreshold %f") maximumGuideDistance = traits.Float( argstr = "--maximumGuideDistance %f") seedThreshold = traits.Float( argstr = "--seedThreshold %f") trackingThreshold = traits.Float( argstr = "--trackingThreshold %f") curvatureThreshold = traits.Float( argstr = "--curvatureThreshold %f") branchingThreshold = traits.Float( argstr = "--branchingThreshold %f") maximumBranchPoints = traits.Int( argstr = "--maximumBranchPoints %d") useRandomWalk = traits.Bool( argstr = "--useRandomWalk ") randomSeed = traits.Int( argstr = "--randomSeed %d") branchingAngle = traits.Float( argstr = "--branchingAngle %f") minimumLength = traits.Float( argstr = "--minimumLength %f") maximumLength = traits.Float( argstr = "--maximumLength %f") stepSize = traits.Float( argstr = "--stepSize %f") useLoopDetection = traits.Bool( argstr = "--useLoopDetection ") useTend = traits.Bool( argstr = "--useTend ") tendF = traits.Float( argstr = "--tendF %f") tendG = traits.Float( argstr = "--tendG %f")
class BRAINSClassPlugsInputSpec(CommandLineInputSpec): t1Volume = File(exists="True", argstr="--t1Volume %s") t2Volume = File(exists="True", argstr="--t2Volume %s") pdVolume = File(exists="True", argstr="--pdVolume %s") searchVolume = File(exists="True", argstr="--searchVolume %s") gmPlugs = traits.Either(traits.Bool, File, argstr="--gmPlugs %s") wmPlugs = traits.Either(traits.Bool, File, argstr="--wmPlugs %s") csfPlugs = traits.Either(traits.Bool, File, argstr="--csfPlugs %s") plugClassNames = traits.List("traits.Str", sep=",", argstr="--plugClassNames %s") t1ClassMeans = traits.List("traits.Float", sep=",", argstr="--t1ClassMeans %f") t2ClassMeans = traits.List("traits.Float", sep=",", argstr="--t2ClassMeans %f") pdClassMeans = traits.List("traits.Float", sep=",", argstr="--pdClassMeans %f") randomSeed = traits.Int(argstr="--randomSeed %d") numberOfPlugs = traits.Int(argstr="--numberOfPlugs %d") coverage = traits.Float(argstr="--coverage %f") permissiveness = traits.Float(argstr="--permissiveness %f") meanOutlier = traits.Float(argstr="--meanOutlier %f") varOutlier = traits.Float(argstr="--varOutlier %f") plugSize = traits.Float(argstr="--plugSize %f") partitions = traits.List("traits.Int", sep=",", argstr="--partitions %d") numberOfClassPlugs = traits.List("traits.Int", sep=",", argstr="--numberOfClassPlugs %d") bloodMode = traits.Enum("Manual", "Top", "Bottom", argstr="--bloodMode %s") bloodImage = traits.Enum("T1", "T2", "PD", argstr="--bloodImage %s") vbPlugs = File(exists="True", argstr="--vbPlugs %s")
class DtiSkullStripB0InputSpec(CommandLineInputSpec): B0File = File(argstr='%s', desc="B0File", exists=True, mandatory=True, position=0) ClippedB0File = traits.Str(argstr='%s', desc="ClippedB0File", exists=True, mandatory=True, position=1) Threshold = traits.Int(argstr='%s', desc="Threshold", exists=True, mandatory=True, position=2) ErodeSize = traits.Int(argstr='%s', desc="ErodeSize", exists=True, mandatory=True, position=3) DilateSize = traits.Int(argstr='%s', desc="DilateSize", exists=True, mandatory=True, position=4)
class gtractFastMarchingTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( exists = "True",argstr = "--inputTensorVolume %s") inputAnisotropyVolume = File( exists = "True",argstr = "--inputAnisotropyVolume %s") inputCostVolume = File( exists = "True",argstr = "--inputCostVolume %s") inputStartingSeedsLabelMapVolume = File( exists = "True",argstr = "--inputStartingSeedsLabelMapVolume %s") startingSeedsLabel = traits.Int( argstr = "--startingSeedsLabel %d") outputTract = traits.Str( argstr = "--outputTract %s") writeXMLPolyDataFile = traits.Bool( argstr = "--writeXMLPolyDataFile ") numberOfIterations = traits.Int( argstr = "--numberOfIterations %d") seedThreshold = traits.Float( argstr = "--seedThreshold %f") trackingThreshold = traits.Float( argstr = "--trackingThreshold %f") costStepSize = traits.Float( argstr = "--costStepSize %f") maximumStepSize = traits.Float( argstr = "--maximumStepSize %f") minimumStepSize = traits.Float( argstr = "--minimumStepSize %f")
class AnalogInputChannelViewer(traits.HasTraits): index = traits.Array(dtype=np.float) data = traits.Array(dtype=np.float) device_channel_num = traits.Int(label='ADC') traits_view = View( Group( Item('device_channel_num', style='readonly'), ChacoPlotItem( 'index', 'data', #x_label = "elapsed time (sec)", x_label="index", y_label="data", show_label=False, y_bounds=(-1, 2**10 + 1), y_auto=False, resizable=True, title='Analog input', ), ), resizable=True, width=800, height=200, )
class JFIEmulatorClass(remote_traits.MaybeRemoteHasTraits): """base class of worker subclass, and also runs in GUI process""" max_voltage = traits.Float(4.0) volts_to_adc_units = traits.Float(1000) # conversion factor BoardNum = traits.Int(0) chan_left = traits.Int(0) chan_right = traits.Int(1) min_angle = traits.Float(-45) # in same units as sent by strokelitude max_angle = traits.Float(90) # in same units as sent by strokelitude gain = UL.UNI4VOLTS # works on USB 1208FS traits_view = View( Group(( ## Item(name='max_voltage'), ## Item(name='volts_to_adc_units'), )), )
class compareTractInclusionInputSpec(CommandLineInputSpec): testFiber = traits.Str( argstr = "--testFiber %s") standardFiber = traits.Str( argstr = "--standardFiber %s") closeness = traits.Float( argstr = "--closeness %f") numberOfPoints = traits.Int( argstr = "--numberOfPoints %d") testForBijection = traits.Bool( argstr = "--testForBijection ") testForFiberCardinality = traits.Bool( argstr = "--testForFiberCardinality ") writeXMLPolyDataFile = traits.Bool( argstr = "--writeXMLPolyDataFile ")
class image(TConfig): aspect = T.Trait('equal', 'equal', 'auto', T.Float) interpolation = T.Trait('bilinear', 'bilinear', 'nearest', 'bicubic', 'spline16', 'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric', 'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos', 'blackman') cmap = T.Trait('jet', *mplT.colormaps) lut = T.Int(256) origin = T.Trait('upper', 'upper', 'lower')
class legend(TConfig): isaxes = T.true numpoints = T.Int(3) fontsize = T.Trait('medium', 'xx-small', 'x-small', 'small', 'medium', 'large', 'x-large', 'xx-large', T.Float) pad = T.Float(0.2) markerscale = T.Float(1.0) labelsep = T.Float(0.01) handlelen = T.Float(0.05) handletextsep = T.Float(0.02) axespad = T.Float(0.02) shadow = T.false
class ColumnEditor(traits.HasTraits): """ Define the main column Editor class. Complex part is handled by get columns function below that defines the view and editor.""" columns = traits.List() numberOfColumns = traits.Int() selectAllButton = traits.Button('Select/Deselect All') selectDeselectBool = traits.Bool(True) def _selectAllButton_fired(self): if self.selectDeselectBool: self.columns = [] self.selectDeselectBool = not self.selectDeselectBool else: self.columns = range(0, self.numberOfColumns) self.selectDeselectBool = not self.selectDeselectBool
class legend(TConfig): loc = T.Trait('upper right', 'best', 'upper right', 'upper left', 'lower left', 'lower right', 'right', 'center left', 'center right', 'lower center', 'upper center', 'center') isaxes = T.true numpoints = T.Int(3) fontsize = T.Trait('medium', 'xx-small', 'x-small', 'small', 'medium', 'large', 'x-large', 'xx-large', T.Float) pad = T.Float(0.2) markerscale = T.Float(1.0) labelsep = T.Float(0.01) handlelen = T.Float(0.05) handletextsep = T.Float(0.02) axespad = T.Float(0.02) shadow = T.false
class IntRangeFeature(traits.HasTraits): """ Defines a feature that is settable by slider """ value = traits.Range('low','high','value_') value_ = traits.CInt(0.) low = traits.CInt(-10000.) high = traits.CInt(10000.) is_settable = traits.Bool(False) id = traits.Property(depends_on = 'name') index = traits.Int(0) name = 'gain' view = ui.View(ui.Item('value', show_label = False, style = 'custom')) def _get_id(self): return _SINGLE_VALUED_FEATURES.get(self.name)
class DeviceTimer3State(traits.HasTraits): """encapsulate all (relevant) timer3 state on the device Making these variables a member of their own HasTraits class means that updates to the device can be treated in an atomic way. """ # Timer/Counter3 state timer3_top = traits.Int(200) # The conversion from a clock select (CS) prescaler bit code. timer3_CS = traits.Trait(64, { 0.0: 0x00, # off 1.0: 0x01, 8.0: 0x02, 64.0: 0x03, 256.0: 0x04, 1024.0:0x05}) ocr3a = traits.Int ocr3b = traits.Int ocr3c = traits.Int
class itkRelabelComponentImageInputSpec(CommandLineInputSpec): inFilename = File(argstr='%s', desc="inFilename", exists=True, mandatory=True, position=0) fileMode = traits.Str(argstr='%s', desc="fileMode", exists=True, mandatory=True, position=1) val = traits.Int(argstr='%s', desc="val", exists=True, mandatory=True, position=2) outFilename = traits.Str(argstr='%s', desc="outFilename", exists=True, mandatory=True, position=3)
class IPythonConfig(TConfig): """Class Docstring More And more... """ m = T.Int(123) select = T.Trait('only', 'one', 'of', 'these') class InitOnly(TConfig): """ The Read-only part of the configuration. More than one line of info... """ n = T.Int x = T.Float class Protocol(TConfig): """Specify the Protocol More text... """ include = T.Str ptype = T.Str class Handler(TConfig): """Specify the handler, a string. More...""" key = T.Str key2 = T.Str class Machine(TConfig): """Set the machine by ip address and port number.""" ip = T.Str port = T.Int
class itkObjectMorphologyInputSpec(CommandLineInputSpec): inFilename = File(argstr='%s', desc="inFilename", exists=True, mandatory=True, position=0) fileMode = traits.Str(argstr='%s', desc="fileMode", exists=True, mandatory=True, position=1) var1 = traits.String(argstr='%s', desc="var1", exists=True, mandatory=True, position=2) var2 = traits.String(argstr='%s', desc="var2", exists=True, mandatory=True, position=3) radius = traits.List(argstr='%s', type="traits.Float", sep=",", desc="radius", exists=True, mandatory=True, position=4) var3 = traits.Int(argstr='%s', desc="var3", exists=True, mandatory=True, position=5) outFilename = traits.Str(argstr='%s', desc="outFilename", exists=True, mandatory=True, position=6)
class Passenger(PrtEvent): """A passenger.""" mass = traits.Int() _loc = traits.Either(traits.Instance('pyprt.sim.station.Station'), traits.Instance('pyprt.sim.vehicle.BaseVehicle'), None) traits_view = ui.View( ui.Item(name='label'), ui.Item(name='ID'), ui.Item(name='loc'), ui.Item(name='mass'), # in kg ui.Item(name='trip_success'), ui.Item(name='wait_time', format_func=sec_to_hms), ui.Item(name='walk_time', format_func=sec_to_hms), ui.Item(name='ride_time', format_func=sec_to_hms), ui.Item(name='will_share'), ui.Item(name='src_station'), ui.Item(name='dest_station'), ui.Item(name='load_delay', format_func=sec_to_hms), ui.Item(name='unload_delay', format_func=sec_to_hms), style='readonly', handler=NoWritebackOnCloseHandler()) ## # Subset of passenger data in table format. ## table_editor = ui.TableEditor( ## columns = [ui_tc.ObjectColumn(name='ID', label='ID'), ## ui_tc.ObjectColumn(name='src_station', label='Origin'), ## ui_tc.ObjectColumn(name='dest_station', label='Destination'), ## ui_tc.ExpressionColumn(label='Waiting', ## expression='sec_to_hms(object.wait_time)', ## globals={'sec_to_hms':sec_to_hms}, ## tooltip='Time spent waiting'), ## ui_tc.ExpressionColumn(label='Riding', ## expression='sec_to_hms(object.ride_time)', ## globals={'sec_to_hms':sec_to_hms}, ## tooltip='Time spent riding'), ## ui_tc.ExpressionColumn(label='Walking', ## expression='sec_to_hms(object.walk_time)', ## globals={'sec_to_hms':sec_to_hms}, ## tooltip='Time spent walking'), ## ui_tc.ExpressionColumn(label='Total', ## expression='sec_to_hms(object.total_time)', ## globals={'sec_to_hms':sec_to_hms}, ## tooltip='Total time spent on trip'), ## ui_tc.ObjectColumn(name='trip_success', label='Success', ## tooltip='Sucessfully reached destination'), ## ui_tc.ObjectColumn(name='loc', label='Current Location') ## ], ## other_columns = [ui_tc.ObjectColumn(name='label', label='Label'), ## ui_tc.ObjectColumn(name='will_share', label='Will Share', ## tooltip='Willing to share vehicle when destinations match'), ## ui_tc.ObjectColumn(name='load_delay', label='Load Delay', ## tooltip='Time that passenger takes to embark'), ## ui_tc.ObjectColumn(name='unload_delay', label='Unload Delay', ## tooltip='Time that passenger takes to disembark'), ## ui_tc.ObjectColumn(name='mass', label='Mass', ## tooltip='Includes luggage (kg)') ## ], ## # more... ## deletable = False, ## editable=False, ## sortable = True, ## sort_model = False, ## auto_size = True, ## orientation = 'vertical', ## show_toolbar = True, ## reorderable = False, ## rows = 15, ## row_factory = traits.This) def __init__(self, time, ID, src_station, dest_station, load_delay, unload_delay, will_share, mass): super(Passenger, self).__init__(time, ID) self.src_station = src_station self.dest_station = dest_station self.load_delay = load_delay self.unload_delay = unload_delay self.will_share = will_share # Willing to share pod (if same dest) self.mass = mass self.trip_success = False self._loc = src_station # For the following, where start and end are times in seconds, with 0 being the start of the sim. self._wait_times = [[time, None, self._loc] ] # contains triples: [[start, end, loc], ...] self._walk_times = [ ] # containing pairs: [[start, end], [start, end], ...] self._ride_times = [ ] # contains triples: [[start, end, vehicle], [start, end, vehicle], ...] self._start_time = time self._end_time = None @property def wait_time(self): # in seconds total = 0 for start, end, loc in self._wait_times: if end is None: total += Sim.now() - start else: total += end - start return total @property def ride_time(self): # in seconds total = 0 for start, end, vehicle in self._ride_times: if end is None: total += Sim.now() - start else: total += end - start return total @property def walk_time(self): # in seconds total = 0 for start, end in self._walk_times: if end is None: total += Sim.now() - start else: total += end - start return total @property def total_time(self): if self._end_time is None: return Sim.now() - self._start_time else: return self._end_time - self._start_time def get_loc(self): return self._loc def set_loc(self, loc): """Changes the loc, and keeps track of how much time is spent in each mode of transit: waiting, riding, or walking.""" ### Track time spent in each mode of transit ### if self._loc is None: # Was walking self._walk_times[-1][1] = Sim.now() elif hasattr(self._loc, 'vehicle_mass'): # was in vehicle self._ride_times[-1][1] = Sim.now() elif hasattr(self._loc, 'platforms'): # was at station self._wait_times[-1][1] = Sim.now() else: raise Exception("Unknown loc type") ### Note if trip is completed. ### if loc is self.dest_station: self._end_time = Sim.now() self.trip_success = True ### More time tracking ### if not self.trip_success: if loc is None: self._walk_times.append([Sim.now(), None]) elif hasattr(loc, 'vehicle_mass'): self._ride_times.append([Sim.now(), None, loc]) # isinstance(loc, BaseVehicle) elif hasattr(loc, 'platforms'): self._wait_times.append([Sim.now(), None, loc]) # isinstance(loc, TrackSegment) else: raise Exception("Unknown loc type") self._loc = loc loc = property( get_loc, set_loc, doc="loc is expected to be a Station, a " "Vehicle, or None (which indicates walking from one station " "to another). Setting the loc has side-effects, see set_loc.") def walk(self, origin_station, dest_station, travel_time, cmd_msg, cmd_id): assert self._loc is origin_station assert travel_time >= 0 assert isinstance(cmd_msg, api.CtrlCmdPassengerWalk) assert isinstance(cmd_id, int) self.loc = None common.AlarmClock(Sim.now() + travel_time, self._post_walk, dest_station, cmd_msg, cmd_id) def _post_walk(self, dest_station, cmd_msg, cmd_id): """Updates stats, changes location, and sends a SimCompletePassengerWalk message. To be called once the walk is complete.""" assert self._loc is None self.loc = dest_station msg = api.SimCompletePassengerWalk() msg.msgID = cmd_id msg.cmd.CopyFrom(cmd_msg) msg.time = Sim.now() common.interface.send(api.SIM_COMPLETE_PASSENGER_WALK, msg) def fill_PassengerStatus(self, ps): ps.pID = self.ID # I'd much rather use isinstance checks, but circular imports are killing me if self._loc is None: ps.loc_type = api.WALKING ps.locID = api.NONE_ID elif hasattr(self._loc, 'vehicle_mass'): # a vehicle ps.loc_type = api.VEHICLE ps.locID = self._loc.ID elif hasattr(self._loc, 'platforms'): # a station ps.loc_type = api.STATION ps.locID = self._loc.ID else: raise Exception, "Unknown passenger location type: %s" % self._loc ps.src_stationID = self.src_station.ID ps.dest_stationID = self.dest_station.ID ps.creation_time = self._start_time ps.mass = self.mass ps.trip_success = self.trip_success
class DataAxis(t.HasTraits): name = t.Str() units = t.Str() scale = t.Float() offset = t.Float() size = t.Int() index_in_array = t.Int() low_value = t.Float() high_value = t.Float() value = t.Range('low_value', 'high_value') low_index = t.Int(0) high_index = t.Int() slice = t.Instance(slice) slice_bool = t.Bool(False) index = t.Range('low_index', 'high_index') axis = t.Array() def __init__(self, size, index_in_array, name='', scale=1., offset=0., units='undefined', slice_bool=False): super(DataAxis, self).__init__() self.name = name self.units = units self.scale = scale self.offset = offset self.size = size self.high_index = self.size - 1 self.low_index = 0 self.index = 0 self.index_in_array = index_in_array self.update_axis() self.on_trait_change(self.update_axis, ['scale', 'offset', 'size']) self.on_trait_change(self.update_value, 'index') self.on_trait_change(self.set_index_from_value, 'value') self.on_trait_change(self._update_slice, 'slice_bool') self.on_trait_change(self.update_index_bounds, 'size') self.slice_bool = slice_bool def __repr__(self): if self.name is not None: return self.name + ' index: ' + str(self.index_in_array) def update_index_bounds(self): self.high_index = self.size - 1 def update_axis(self): self.axis = generate_axis(self.offset, self.scale, self.size) self.low_value, self.high_value = self.axis.min(), self.axis.max() # self.update_value() def _update_slice(self, value): if value is True: self.slice = slice(None) else: self.slice = None def get_axis_dictionary(self): adict = { 'name': self.name, 'scale': self.scale, 'offset': self.offset, 'size': self.size, 'units': self.units, 'index_in_array': self.index_in_array, 'slice_bool': self.slice_bool } return adict def update_value(self): self.value = self.axis[self.index] def value2index(self, value): """Return the closest index to the given value if between the limits, otherwise it will return either the upper or lower limits Parameters ---------- value : float Returns ------- int """ if value is None: return None else: index = int(round((value - self.offset) / \ self.scale)) if self.size > index >= 0: return index elif index < 0: messages.warning("The given value is below the axis limits") return 0 else: messages.warning("The given value is above the axis limits") return int(self.size - 1) def index2value(self, index): return self.axis[index] def set_index_from_value(self, value): self.index = self.value2index(value) # If the value is above the limits we must correct the value self.value = self.index2value(self.index) def calibrate(self, value_tuple, index_tuple, modify_calibration=True): scale = (value_tuple[1] - value_tuple[0]) /\ (index_tuple[1] - index_tuple[0]) offset = value_tuple[0] - scale * index_tuple[0] if modify_calibration is True: self.offset = offset self.scale = scale else: return offset, scale traits_view = \ tui.View( tui.Group( tui.Group( tui.Item(name = 'name'), tui.Item(name = 'size', style = 'readonly'), tui.Item(name = 'index_in_array', style = 'readonly'), tui.Item(name = 'index'), tui.Item(name = 'value', style = 'readonly'), tui.Item(name = 'units'), tui.Item(name = 'slice_bool', label = 'slice'), show_border = True,), tui.Group( tui.Item(name = 'scale'), tui.Item(name = 'offset'), label = 'Calibration', show_border = True,), label = "Data Axis properties", show_border = True,), )
class AxesManager(t.HasTraits): axes = t.List(DataAxis) _slicing_axes = t.List() _non_slicing_axes = t.List() _step = t.Int(1) def __init__(self, axes_list): super(AxesManager, self).__init__() ncoord = len(axes_list) self.axes = [None] * ncoord for axis_dict in axes_list: self.axes[axis_dict['index_in_array']] = DataAxis(**axis_dict) slices = [i.slice_bool for i in self.axes if hasattr(i, 'slice_bool')] # set_view is called only if there is no current view if not slices or np.all(np.array(slices) == False): self.set_view() self.set_signal_dimension() self.on_trait_change(self.set_signal_dimension, 'axes.slice') self.on_trait_change(self.set_signal_dimension, 'axes.index') def set_signal_dimension(self): getitem_tuple = [] indexes = [] values = [] self._slicing_axes = [] self._non_slicing_axes = [] for axis in self.axes: if axis.slice is None: getitem_tuple.append(axis.index) indexes.append(axis.index) values.append(axis.value) self._non_slicing_axes.append(axis) else: getitem_tuple.append(axis.slice) self._slicing_axes.append(axis) self._getitem_tuple = getitem_tuple self._indexes = np.array(indexes) self._values = np.array(values) self.signal_dimension = len(self._slicing_axes) self.navigation_dimension = len(self._non_slicing_axes) self.navigation_shape = [axis.size for axis in self._non_slicing_axes] def set_not_slicing_indexes(self, nsi): for index, axis in zip(nsi, self.axes): axis.index = index def set_view(self, view='hyperspectrum'): """view : 'hyperspectrum' or 'image' """ tl = [False] * len(self.axes) if view == 'hyperspectrum': # We limit the signal_dimension to 1 to get a spectrum tl[0] = True elif view == 'image': tl[:2] = True, True for axis in self.axes: axis.slice_bool = tl.pop() def set_slicing_axes(self, slicing_axes): '''Easily choose which axes are slicing Parameters ---------- slicing_axes: tuple of ints A list of the axis indexes that we want to slice ''' for axis in self.axes: if axis.index_in_array in slicing_axes: axis.slice_bool = True else: axis.slice_bool = False def connect(self, f): for axis in self.axes: if axis.slice is None: axis.on_trait_change(f, 'index') def disconnect(self, f): for axis in self.axes: if axis.slice is None: axis.on_trait_change(f, 'index', remove=True) def key_navigator(self, event): if len(self._non_slicing_axes) not in (1, 2): return x = self._non_slicing_axes[-1] if event.key == "right" or event.key == "6": x.index += self._step elif event.key == "left" or event.key == "4": x.index -= self._step elif event.key == "pageup": self._step += 1 elif event.key == "pagedown": if self._step > 1: self._step -= 1 if len(self._non_slicing_axes) == 2: y = self._non_slicing_axes[-2] if event.key == "up" or event.key == "8": y.index -= self._step elif event.key == "down" or event.key == "2": y.index += self._step def edit_axes_traits(self): for axis in self.axes: axis.edit_traits() def copy(self): return (copy.copy(self)) def deepcopy(self): return (copy.deepcopy(self)) def __deepcopy__(self, *args): return AxesManager(self._get_axes_dicts()) def _get_axes_dicts(self): axes_dicts = [] for axis in self.axes: axes_dicts.append(axis.get_axis_dictionary()) return axes_dicts def _get_slicing_axes_dicts(self): axes_dicts = [] i = 0 for axis in self._slicing_axes: axes_dicts.append(axis.get_axis_dictionary()) axes_dicts[-1]['index_in_array'] = i i += 1 return axes_dicts def _get_non_slicing_axes_dicts(self): axes_dicts = [] i = 0 for axis in self._non_slicing_axes: axes_dicts.append(axis.get_axis_dictionary()) axes_dicts[-1]['index_in_array'] = i i += 1 return axes_dicts traits_view = tui.View(tui.Item('axes', style='custom'))
class itkConstantImageMathInputSpec(CommandLineInputSpec): inFilename = File(argstr='%s', desc = "inFilename", exists = True, mandatory = True, position = 0) fileMode = traits.Str(argstr='%s', desc = "fileMode", exists = True, mandatory = True, position = 1) value = traits.Int(argstr='%s', desc = "value", exists = True, mandatory = True, position = 2) operation = traits.String(argstr='%s', desc = "operation", exists = True, mandatory = True, position = 3) outFilename = traits.Str(argstr='%s', desc = "outFilename", exists = True, mandatory = True, position = 4)
class QuadMeshSmoothingInputSpec(CommandLineInputSpec): inputSurface = traits.Str( argstr = "----inputSurface %s") numberOfIterations = traits.Int( argstr = "--numberOfIterations %d") relaxationFactor = traits.Float( argstr = "--relaxationFactor %f") delaunayConforming = traits.Bool( argstr = "----delaunayConforming ") outputSurface = traits.Str( argstr = "----outputSurface %s")
class QuadMeshDecimationInputSpec(CommandLineInputSpec): inputSurface = traits.Str(argstr="--inputSurface %s") numberOfElements = traits.Int(argstr="--numberOfElements %d") topologyChange = traits.Bool(argstr="--topologyChange ") outputSurface = traits.Str(argstr="--outputSurface %s")
class StandardizeImageIntensityInputSpec(CommandLineInputSpec): imageFilename = File(argstr='%s', desc = "ImageFilename", exists = True, mandatory = True, position = 0) brainLabelImageFilename = File(argstr='%s', desc = "BrainLabelImageFilename", exists = True, mandatory = True, position = 1) resultImageFilename = traits.Str(argstr='%s', desc = "ResultImageFilename", exists = True, mandatory = True, position = 2) minLabel = traits.Int(argstr='%s', desc = "MinLabel", exists = True, mandatory = True, position = 3) maxLabel = traits.Int(argstr='%s', desc = "MaxLabel", exists = True, mandatory = True, position = 4)
class PipelineConfiguration(traits.HasTraits): # project settings project_dir = traits.Directory( exists=False, desc="data path to where the project is stored") # project metadata (for connectome file) project_metadata = traits.Dict( desc="project metadata to be stored in the connectome file") # DEPRECATED: this field is deprecated after version >1.0.2 generator = traits.Str() # parcellation scheme parcellation_scheme = traits.Enum("NativeFreesurfer", ["Lausanne2008", "NativeFreesurfer"], desc="used parcellation scheme") # choose between 'L' (linear) and 'N' (non-linear) and 'B' (bbregister) registration_mode = traits.Enum( "Linear", ["Linear", "Nonlinear", "BBregister"], desc="registration mode: linear or non-linear or bbregister") # choose between 'L' (linear) and 'B' (bbregister) rsfmri_registration_mode = traits.Enum( "Linear", ["Linear", "BBregister"], desc="registration mode: linear or bbregister") diffusion_imaging_model = traits.Enum("DSI", ["DSI", "DTI", "QBALL"]) # DSI nr_of_gradient_directions = traits.Str('515') nr_of_sampling_directions = traits.Str('181') odf_recon_param = traits.Str('-b0 1 -dsi -p 4 -sn 0') hardi_recon_param = traits.Str('-b0 1 -p 3 -sn 0') # DTI gradient_table_file = traits.File(exists=False) gradient_table = traits.Enum('siemens_64', [ 'custom', 'mgh_dti_006', 'mgh_dti_018', 'mgh_dti_030', 'mgh_dti_042', 'mgh_dti_060', 'mgh_dti_072', 'mgh_dti_090', 'mgh_dti_120', 'mgh_dti_144', 'siemens_06', 'siemens_12', 'siemens_20', 'siemens_256', 'siemens_30', 'siemens_64' ]) nr_of_b0 = traits.Str('1') max_b0_val = traits.Str('1000') dti_recon_param = traits.Str('') dtb_dtk2dir_param = traits.Str('') # tractography streamline_param = traits.Str('--angle 60 --seeds 32') # registration lin_reg_param = traits.Str('-usesqform -nosearch -dof 6 -cost mutualinfo') nlin_reg_bet_T2_param = traits.Str('-f 0.35 -g 0.15') nlin_reg_bet_b0_param = traits.Str('-f 0.2 -g 0.2') nlin_reg_fnirt_param = traits.Str( '--subsamp=8,4,2,2 --miter==5,5,5,5 --lambda=240,120,90,30 --splineorder=3 --applyinmask=0,0,1,1 --applyrefmask=0,0,1,1' ) bb_reg_param = traits.Str('--init-header --dti') # dicom converter do_convert_diffusion = traits.Bool(True) do_convert_T1 = traits.Bool(True) do_convert_T2 = traits.Bool(False) do_convert_fMRI = traits.Bool(False) # rsfmri rsfmri_lin_reg_param = traits.Str( '-usesqform -nosearch -dof 6 -cost mutualinfo') rsfmri_bb_reg_param = traits.Str('--init-header --dti') do_save_mat = traits.Bool(True) # DEPRECATED: subject_raw_glob_diffusion = traits.Str("*.*") subject_raw_glob_T1 = traits.Str("*.*") subject_raw_glob_T2 = traits.Str("*.*") extract_diffusion_metadata = traits.Bool(False) # subject subject_name = traits.Str() subject_timepoint = traits.Str() subject_workingdir = traits.Directory() subject_logger = None subject_metadata = [ KeyValue(key='description', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), ] active_createfolder = traits.Bool(True) active_dicomconverter = traits.Bool(False) active_registration = traits.Bool(False) active_segmentation = traits.Bool(False) active_parcellation = traits.Bool(False) active_applyregistration = traits.Bool(False) active_reconstruction = traits.Bool(False) active_tractography = traits.Bool(False) active_fiberfilter = traits.Bool(False) active_connectome = traits.Bool(False) active_statistics = traits.Bool(False) active_rsfmri = traits.Bool(False) active_cffconverter = traits.Bool(False) skip_completed_stages = traits.Bool(False) # metadata creator = traits.Str() email = traits.Str() publisher = traits.Str() created = traits.Date() modified = traits.Date() license = traits.Str() # rights = traits.Str() reference = traits.Str() # relation = traits.Str() species = traits.Str('H**o sapiens') description = traits.Str() # segmentation recon_all_param = traits.Str('-all -no-isrunning') # parcellation custompar_nrroi = traits.Int() custompar_nodeinfo = traits.File() custompar_volumeparcell = traits.File() # fiber filtering apply_splinefilter = traits.Bool( True, desc='apply the spline filtering from diffusion toolkit') apply_fiberlength = traits.Bool(True, desc='apply cutoff to fiber lengths') fiber_cutoff_lower = traits.Float( 20.0, desc='cut fibers that are shorter in length than given length in mm') fiber_cutoff_upper = traits.Float( 500.0, desc='cut fibers that are longer in length than given length in mm') # measures connection_P0 = traits.Bool(False) connection_gfa = traits.Bool(False) connection_kurtosis = traits.Bool(False) connection_skewness = traits.Bool(False) connection_adc = traits.Bool(False) connection_fa = traits.Bool(False) # cff converter cff_fullnetworkpickle = traits.Bool( True, desc='stores the full network pickle generated by connectome creation') cff_cmatpickle = traits.Bool(True) cff_originalfibers = traits.Bool(True, desc='stores original fibers') cff_filteredfibers = traits.Bool(True, desc='stores filtered fibers') cff_finalfiberlabels = traits.Bool( True, desc='stores final fibers and their labelarrays') cff_fiberarr = traits.Bool(True) cff_rawdiffusion = traits.Bool(True) cff_scalars = traits.Bool(True) cff_rawT1 = traits.Bool(True) cff_rawT2 = traits.Bool(True) cff_roisegmentation = traits.Bool( True, desc='stores multi-resolution parcellation volumes') cff_surfaces = traits.Bool(True, desc='stores individually genertated surfaces') cff_surfacelabels = traits.Bool( True, desc='stores individually genertated surfaces') # do you want to do manual white matter mask correction? wm_handling = traits.Enum( 1, [1, 2, 3], desc="in what state should the freesurfer step be processed") # custom parcellation parcellation = traits.Dict( desc="provide the dictionary with your parcellation.") # start up fslview inspect_registration = traits.Bool( False, desc='start fslview to inspect the the registration results') fsloutputtype = traits.Enum('NIFTI', ['NIFTI']) # connectome creation compute_curvature = traits.Bool(False) # email notification, needs a local smtp server # sudo apt-get install postfix emailnotify = traits.ListStr( [], desc='the email address to send stage completion status message') freesurfer_home = traits.Directory(exists=False, desc="path to Freesurfer") fsl_home = traits.Directory(exists=False, desc="path to FSL") dtk_home = traits.Directory(exists=False, desc="path to diffusion toolkit") # This file stores descriptions of the inputs/outputs to each stage of the # CMP pipeline. It can be queried using the PipelineStatus python object pipeline_status_file = traits.Str("cmp.status") # Pipeline status object pipeline_status = pipeline_status.PipelineStatus() def _get_lausanne_parcellation(self, parcel="NativeFreesurfer"): if parcel == "Lausanne2008": return { 'scale33': { 'number_of_regions': 83, # contains name, url, color, freesurfer_label, etc. used for connection matrix 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution83'), 'resolution83.graphml'), # scalar node values on fsaverage? or atlas? 'surface_parcellation': None, # scalar node values in fsaverage volume? 'volume_parcellation': None, # the subdirectory name from where to copy parcellations, with hemispheric wildcard 'fs_label_subdir_name': 'regenerated_%s_36', # should we subtract the cortical rois for the white matter mask? 'subtract_from_wm_mask': 1, }, 'scale60': { 'number_of_regions': 129, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution150'), 'resolution150.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_60', 'subtract_from_wm_mask': 1, }, 'scale125': { 'number_of_regions': 234, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution258'), 'resolution258.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_125', 'subtract_from_wm_mask': 1, }, 'scale250': { 'number_of_regions': 463, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution500'), 'resolution500.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_250', 'subtract_from_wm_mask': 1, }, 'scale500': { 'number_of_regions': 1015, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution1015'), 'resolution1015.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_500', 'subtract_from_wm_mask': 1, }, } else: return { 'freesurferaparc': { 'number_of_regions': 83, # contains name, url, color, freesurfer_label, etc. used for connection matrix 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('freesurferaparc'), 'resolution83.graphml'), # scalar node values on fsaverage? or atlas? 'surface_parcellation': None, # scalar node values in fsaverage volume? 'volume_parcellation': None, } } def __init__(self, **kwargs): # NOTE: In python 2.6, object.__init__ no longer accepts input # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. super(PipelineConfiguration, self).__init__(**kwargs) # the default parcellation provided self.parcellation = self._get_lausanne_parcellation( parcel="NativeFreesurfer") self.can_use_dipy = dipy_here # no email notify self.emailnotify = [] # default gradient table for DTI self.gradient_table_file = self.get_cmp_gradient_table('siemens_64') # try to discover paths from environment variables try: self.freesurfer_home = op.join(os.environ['FREESURFER_HOME']) self.fsl_home = op.join(os.environ['FSLDIR']) self.dtk_home = os.environ['DTDIR'] self.dtk_matrices = op.join(self.dtk_home, 'matrices') except KeyError: pass self.fsloutputtype = 'NIFTI' os.environ['FSLOUTPUTTYPE'] = self.fsloutputtype os.environ['FSLOUTPUTTYPE'] = 'NIFTI' def consistency_check(self): """ Provides a checking facility for configuration objects """ # project name not empty if not op.exists(self.project_dir): msg = 'Your project directory does not exist!' raise Exception(msg) # check metadata if self.creator == '': raise Exception('You need to enter creator metadata!') if self.publisher == '': raise Exception('You need to enter publisher metadata!') if self.email == '': raise Exception('You need to enter email of a contact person!') # check if software paths exists pas = { 'configuration.freesurfer_home': self.freesurfer_home, 'configuration.fsl_home': self.fsl_home, 'configuration.dtk_home': self.dtk_home, 'configuration.dtk_matrices': self.dtk_matrices } for k, p in pas.items(): if not op.exists(p): msg = 'Required software path for %s does not exists: %s' % (k, p) raise Exception(msg) if self.subject_workingdir == '': msg = 'No working directory defined for subject' raise Exception(msg) # else: # wdir = self.get_subj_dir() # if not op.exists(wdir): # msg = 'Working directory %s does not exists for subject' % (wdir) # raise Exception(msg) # else: # wdiff = op.join(self.get_raw_diffusion()) # print wdiff # if not op.exists(wdiff): # msg = 'Diffusion MRI subdirectory %s does not exists for the subject' % wdiff # raise Exception(msg) # wt1 = op.join(self.get_rawt1()) # if not op.exists(wt1): # msg = 'Structural MRI subdirectory %s T1 does not exist in RAWDATA' % wt1 # raise Exception(msg) def get_cmp_home(self): """ Return the cmp home path """ return op.dirname(__file__) def get_rawdata(self): """ Return raw data path for the subject """ return op.join(self.get_subj_dir(), 'RAWDATA') def get_log(self): """ Get subject log dir """ return op.join(self.get_subj_dir(), 'LOG') def get_logname(self, suffix='.log'): """ Get a generic name for the log and pickle files """ a = dt.datetime.now() return 'pipeline-%s-%02i%02i-%s-%s%s' % ( a.date().isoformat(), a.time().hour, a.time().minute, self.subject_name, self.subject_timepoint, suffix) def get_logger(self): """ Get the logger instance created """ if self.subject_logger is None: # setup logger for the subject self.subject_logger = \ getLog(os.path.join(self.get_log(), self.get_logname())) return self.subject_logger else: return self.subject_logger def get_rawglob(self, modality): """ DEPRECATED: Get the file name endings for modality """ if modality == 'diffusion': if not self.subject_raw_glob_diffusion == '': return self.subject_raw_glob_diffusion else: raise Exception('No raw_glob_diffusion defined for subject') elif modality == 'T1': if not self.subject_raw_glob_T1 == '': return self.subject_raw_glob_T1 else: raise Exception('No raw_glob_T1 defined for subject') elif modality == 'T2': if not self.subject_raw_glob_T2 == '': return self.subject_raw_glob_T2 else: raise Exception('No raw_glob_T2 defined for subject') def get_dicomfiles(self, modality): """ Get a list of dicom files for the requested modality. Tries to discover them automatically """ from glob import glob if modality == 'diffusion': pat = self.get_raw_diffusion() elif modality == 'T1': pat = self.get_rawt1() elif modality == 'T2': pat = self.get_rawt2() elif modality == 'fMRI': pat = self.get_rawrsfmri() # discover files with *.* and * difiles = sorted(glob(op.join(pat, '*.*')) + glob(op.join(pat, '*'))) # exclude potential .nii and .nii.gz files difiles = [ e for e in difiles if not e.endswith('.nii') and not e.endswith('.nii.gz') ] # check if no files and throw exception if len(difiles) == 0: raise Exception('Could not find any DICOM files in folder %s' % pat) return difiles def get_rawrsfmri(self): """ Get raw functional MRI path for subject """ return op.join(self.get_rawdata(), 'fMRI') def get_rawt1(self): """ Get raw structural MRI T1 path for subject """ return op.join(self.get_rawdata(), 'T1') def get_rawt2(self): """ Get raw structural MRI T2 path for subject """ return op.join(self.get_rawdata(), 'T2') def get_subj_dir(self): return self.subject_workingdir def get_raw_diffusion(self): """ Get the raw diffusion path for subject """ if self.diffusion_imaging_model == 'DSI': return op.join(self.get_subj_dir(), 'RAWDATA', 'DSI') elif self.diffusion_imaging_model == 'DTI': return op.join(self.get_subj_dir(), 'RAWDATA', 'DTI') elif self.diffusion_imaging_model == 'QBALL': return op.join(self.get_subj_dir(), 'RAWDATA', 'QBALL') def get_fs(self): """ Returns the subject root folder path for freesurfer files """ return op.join(self.get_subj_dir(), 'FREESURFER') def get_stats(self): """ Return statistic output path """ return op.join(self.get_subj_dir(), 'STATS') def get_cffdir(self): """ Returns path to store connectome file """ return op.join(self.get_cmp(), 'cff') def get_nifti(self): """ Returns the subject root folder path for nifti files """ return op.join(self.get_subj_dir(), 'NIFTI') def get_nifti_trafo(self): """ Returns the path to the subjects transformation / registration matrices """ return op.join(self.get_nifti(), 'transformations') def get_nifti_bbregister(self): """ Returns the path to the subjects transformation / registration matrices, bbregister mode """ return op.join(self.get_nifti(), 'bbregister') def get_diffusion_metadata(self): """ Diffusion metadata, i.e. where gradient_table.txt is stored """ return op.join(self.get_nifti(), 'diffusion_metadata') def get_nifti_wm_correction(self): """ Returns the path to the subjects wm_correction path """ return op.join(self.get_nifti(), 'wm_correction') def get_cmp(self): return op.join(self.get_subj_dir(), 'CMP') def get_cmp_rawdiff(self, ): return op.join(self.get_cmp(), 'raw_diffusion') def get_cmp_rawdiff_reconout(self): """ Returns the output path for diffusion reconstruction without prefix""" if self.diffusion_imaging_model == 'DSI': return op.join(self.get_cmp(), 'raw_diffusion', 'odf_0') elif self.diffusion_imaging_model == 'DTI': return op.join(self.get_cmp(), 'raw_diffusion', 'dti_0') elif self.diffusion_imaging_model == 'QBALL': return op.join(self.get_cmp(), 'raw_diffusion', 'qball_0') def get_cmp_rawdiff_resampled(self): return op.join(self.get_cmp_rawdiff(), '2x2x2') def get_cmp_fsout(self): return op.join(self.get_cmp(), 'fs_output') def get_cmp_fibers(self): return op.join(self.get_cmp(), 'fibers') def get_cmp_scalars(self): return op.join(self.get_cmp(), 'scalars') def get_cmp_matrices(self): return op.join(self.get_cmp_fibers(), 'matrices') def get_cmp_fmri(self): return op.join(self.get_cmp(), 'fMRI') def get_cmp_tracto_mask(self): return op.join(self.get_cmp_fsout(), 'HR') def get_cmp_tracto_mask_tob0(self): return op.join(self.get_cmp_fsout(), 'HR__registered-TO-b0') def get_custom_gradient_table(self): """ Returns the absolute path to the custom gradient table with optional b-values in the 4th row """ return self.gradient_table_file def get_cmp_gradient_table(self, name): """ Return default gradient tables shipped with CMP. These are mainly derived from Diffusion Toolkit """ cmp_path = op.dirname(__file__) return op.join(cmp_path, 'data', 'diffusion', 'gradient_tables', name + '.txt') def get_dtb_streamline_vecs_file(self, as_text=False): """ Returns the odf directions file used for DTB_streamline """ cmp_path = op.dirname(__file__) if as_text: return op.join(cmp_path, 'data', 'diffusion', 'odf_directions', '181_vecs.txt') else: return op.join(cmp_path, 'data', 'diffusion', 'odf_directions', '181_vecs.dat') # XXX def get_cmp_scalarfields(self): """ Returns a list with tuples with the scalar field name and the absolute path to its nifti file """ ret = [] if self.diffusion_imaging_model == 'DSI': # add gfa per default ret.append(('gfa', op.join(self.get_cmp_scalars(), 'dsi_gfa.nii.gz'))) # XXX: add adc per default elif self.diffusion_imaging_model == 'DTI': # nothing to add yet for DTI pass return ret def get_dtk_dsi_matrix(self): """ Returns the DSI matrix from Diffusion Toolkit The parameters have to be set in the configuration object with keys: 1. number of gradient directions : 'nr_of_gradient_directions' 2. number of sampling directions : 'nr_of_sampling_directions' Example ------- confobj.nr_of_gradient_directions = 515 confobj.nr_of_sampling_directions = 181 Returns matrix including absolute path to DSI_matrix_515x181.dat """ grad = self.nr_of_gradient_directions samp = self.nr_of_sampling_directions fpath = op.join(self.dtk_matrices, "DSI_matrix_%sx%s.dat" % (grad, samp)) if not op.exists(fpath): msg = "DSI matrix does not exists: %s" % fpath raise Exception(msg) return fpath def get_lausanne_atlas(self, name=None): """ Return the absolute path to the lausanne parcellation atlas for the resolution name """ cmp_path = op.dirname(__file__) provided_atlases = [ 'myatlas_36_rh.gcs', 'myatlasP1_16_rh.gcs', 'myatlasP17_28_rh.gcs', 'myatlasP29_36_rh.gcs', 'myatlas_60_rh.gcs', 'myatlas_125_rh.gcs', 'myatlas_250_rh.gcs', 'myatlas_36_lh.gcs', 'myatlasP1_16_lh.gcs', 'myatlasP17_28_lh.gcs', 'myatlasP29_36_lh.gcs', 'myatlas_60_lh.gcs', 'myatlas_125_lh.gcs', 'myatlas_250_lh.gcs' ] if name in provided_atlases: return op.join(cmp_path, 'data', 'colortable_and_gcs', 'my_atlas_gcs', name) else: msg = "Atlas %s does not exists" % name raise Exception(msg) def get_freeview_lut(self, name): """ Returns the Look-Up-Table as text file for a given parcellation scheme in a dictionary """ cmp_path = op.dirname(__file__) if name == "NativeFreesurfer": return { 'freesurferaparc': op.join(cmp_path, 'data', 'parcellation', 'nativefreesurfer', 'freesurferaparc', 'FreeSurferColorLUT_adapted.txt') } else: return "" def get_lausanne_parcellation_path(self, parcellationname): cmp_path = op.dirname(__file__) if self.parcellation_scheme == "Lausanne2008": allowed_default_parcel = [ 'resolution83', 'resolution150', 'resolution258', 'resolution500', 'resolution1015' ] if parcellationname in allowed_default_parcel: return op.join(cmp_path, 'data', 'parcellation', 'lausanne2008', parcellationname) else: msg = "Not a valid default parcellation name for the lausanne2008 parcellation scheme" raise Exception(msg) else: allowed_default_parcel = ['freesurferaparc'] if parcellationname in allowed_default_parcel: return op.join(cmp_path, 'data', 'parcellation', 'nativefreesurfer', parcellationname) else: msg = "Not a valid default parcellation name for the NativeFreesurfer parcellation scheme" raise Exception(msg) def get_cmp_binary_path(self): """ Returns the path to the binary files for the current platform and architecture """ if sys.platform == 'linux2': import platform as pf if '32' in pf.architecture()[0]: return op.join(op.dirname(__file__), "binary", "linux2", "bit32") elif '64' in pf.architecture()[0]: return op.join(op.dirname(__file__), "binary", "linux2", "bit64") else: raise ('No binary files compiled for your platform!') def get_pipeline_status_file(self): """Returns the absolute path of the pipeline status file""" return op.join(self.get_subj_dir(), self.pipeline_status_file) def init_pipeline_status(self): """Create the 'cmp.status'. The 'cmp.status' file contains information about the inputs/outputs of each pipeline stage""" status_file = op.join(self.get_subj_dir(), self.pipeline_status_file) self.pipeline_status.Pipeline.name = "cmp" self.pipeline_status.SaveToFile(status_file) def update_pipeline_status(self): """Update the pipeline status on disk with the current status in memory""" status_file = op.join(self.get_subj_dir(), self.pipeline_status_file) self.pipeline_status.SaveToFile(status_file)
class gtractCreateGuideFiberInputSpec(CommandLineInputSpec): inputFiber = traits.Str( argstr = "--inputFiber %s") numberOfPoints = traits.Int( argstr = "--numberOfPoints %d") outputFiber = traits.Str( argstr = "--outputFiber %s") writeXMLPolyDataFile = traits.Bool( argstr = "--writeXMLPolyDataFile ")
class LiveTimestampModelerWithAnalogInput(LiveTimestampModeler): view_AIN = traits.Button(label='view analog input (AIN)') viewer = traits.Instance(AnalogInputViewer) # the actual analog data (as a wordstream) ain_data_raw = traits.Array(dtype=np.uint16, transient=True) old_data_raw = traits.Array(dtype=np.uint16, transient=True) timer3_top = traits.Property( ) # necessary to calculate precise timestamps for AIN data channel_names = traits.Property() Vcc = traits.Property(depends_on='_trigger_device') ain_overflowed = traits.Int( 0, transient=True) # integer for display (boolean readonly editor ugly) ain_wordstream_buffer = traits.Any() traits_view = View( Group( Item('synchronize', show_label=False), Item('view_time_model_plot', show_label=False), Item('ain_overflowed', style='readonly'), Item( name='gain', style='readonly', editor=TextEditor(evaluate=float, format_func=myformat), ), Item( name='offset', style='readonly', editor=TextEditor(evaluate=float, format_func=myformat2), ), Item( name='residual_error', style='readonly', editor=TextEditor(evaluate=float, format_func=myformat), ), Item('view_AIN', show_label=False), ), title='Timestamp modeler', ) @traits.cached_property def _get_Vcc(self): return self._trigger_device.Vcc def _get_timer3_top(self): return self._trigger_device.timer3_top def _get_channel_names(self): return self._trigger_device.enabled_channel_names def update_analog_input(self): """call this function frequently to avoid overruns""" new_data_raw = self._trigger_device.get_analog_input_buffer_rawLE() data_raw = np.hstack((new_data_raw, self.old_data_raw)) self.ain_data_raw = new_data_raw newdata_all = [] chan_all = [] any_overflow = False #cum_framestamps = [] while len(data_raw): result = cDecode.process(data_raw) (N, samples, channels, did_overflow, framestamp) = result if N == 0: # no data was able to be processed break data_raw = data_raw[N:] newdata_all.append(samples) chan_all.append(channels) if did_overflow: any_overflow = True # Save framestamp data. # This is not done yet: ## if framestamp is not None: ## cum_framestamps.append( framestamp ) self.old_data_raw = data_raw # save unprocessed data for next run if any_overflow: # XXX should move to logging the error. self.ain_overflowed = 1 raise AnalogDataOverflowedError() if len(chan_all) == 0: # no data return chan_all = np.hstack(chan_all) newdata_all = np.hstack(newdata_all) USB_channel_numbers = np.unique(chan_all) #print len(newdata_all),'new samples on channels',USB_channel_numbers ## F_OSC = 8000000.0 # 8 MHz ## adc_prescaler = 128 ## downsample = 20 # maybe 21? ## n_chan = 3 ## F_samp = F_OSC/adc_prescaler/downsample/n_chan ## dt=1.0/F_samp ## ## print '%.1f Hz sampling. %.3f msec dt'%(F_samp,dt*1e3) ## MAXLEN_SEC=0.3 ## #MAXLEN = int(MAXLEN_SEC/dt) MAXLEN = 5000 #int(MAXLEN_SEC/dt) ## ## print 'MAXLEN',MAXLEN ## ## print for USB_chan in USB_channel_numbers: vi = self.viewer.usb_device_number2index[USB_chan] cond = chan_all == USB_chan newdata = newdata_all[cond] oldidx = self.viewer.channels[vi].index olddata = self.viewer.channels[vi].data if len(oldidx): baseidx = oldidx[-1] + 1 else: baseidx = 0.0 newidx = np.arange(len(newdata), dtype=np.float) + baseidx tmpidx = np.hstack((oldidx, newidx)) tmpdata = np.hstack((olddata, newdata)) if len(tmpidx) > MAXLEN: # clip to MAXLEN self.viewer.channels[vi].index = tmpidx[-MAXLEN:] self.viewer.channels[vi].data = tmpdata[-MAXLEN:] else: self.viewer.channels[vi].index = tmpidx self.viewer.channels[vi].data = tmpdata def _view_AIN_fired(self): self.viewer.edit_traits()
from nipype.interfaces.base import CommandLine, CommandLineInputSpec, TraitedSpec import enthought.traits.api as traits import os from nipype.interfaces.traits import File from nipype.interfaces.traits import Directory from nipype.utils.misc import isdefined class test.xmlInputSpec(CommandLineInputSpec): houghEyeDetectorMode = traits.Int( argstr = "--houghEyeDetectorMode %d") inputTemplateModel = File( exists = "True",argstr = "--inputTemplateModel %s") inputLLSModel = File( exists = "True",argstr = "--inputLLSModel %s") inputEPCAModelMat = File( exists = "True",argstr = "--inputEPCAModelMat %s") inputEPCAModelTxt = File( exists = "True",argstr = "--inputEPCAModelTxt %s") inputVolume = File( exists = "True",argstr = "--inputVolume %s") outputVolume = traits.Either(traits.Bool, File, argstr = "--outputVolume %s") outputResampledVolume = traits.Either(traits.Bool, File, argstr = "--outputResampledVolume %s") outputTransform = traits.Either(traits.Bool, File, argstr = "--outputTransform %s") outputLandmarksInInputSpace = traits.Either(traits.Bool, File, argstr = "--outputLandmarksInInputSpace %s") outputLandmarksInACPCAlignedSpace = traits.Either(traits.Bool, File, argstr = "--outputLandmarksInACPCAlignedSpace %s") inputLandmarksPaired = File( exists = "True",argstr = "--inputLandmarksPaired %s") outputLandmarksPaired = traits.Either(traits.Bool, File, argstr = "--outputLandmarksPaired %s") outputMRML = traits.Either(traits.Bool, File, argstr = "--outputMRML %s") outputVerificationScript = traits.Either(traits.Bool, File, argstr = "--outputVerificationScript %s") mspQualityLevel = traits.Int( argstr = "--mspQualityLevel %d") otsuPercentileThreshold = traits.Float( argstr = "--otsuPercentileThreshold %f") acLowerBound = traits.Float( argstr = "--acLowerBound %f") cutOutHeadInOutputVolume = traits.Bool( argstr = "--cutOutHeadInOutputVolume ") outputUntransformedClippedVolume = traits.Either(traits.Bool, File, argstr = "--outputUntransformedClippedVolume %s") rescaleIntensities = traits.Bool( argstr = "--rescaleIntensities ") trimRescaledIntensities = traits.Float( argstr = "--trimRescaledIntensities %f") rescaleIntensitiesOutputRange = traits.List("traits.Int", sep = ",",argstr = "--rescaleIntensitiesOutputRange %d")
class Protocol(TConfig): """Specify the Protocol""" ptype = T.Trait('http', 'ftp', 'ssh') max_users = T.Int(1)