class BRAINSClassPlugsInputSpec(CommandLineInputSpec): t1Volume = File(exists="True", argstr="--t1Volume %s") t2Volume = File(exists="True", argstr="--t2Volume %s") pdVolume = File(exists="True", argstr="--pdVolume %s") searchVolume = File(exists="True", argstr="--searchVolume %s") gmPlugs = traits.Either(traits.Bool, File, argstr="--gmPlugs %s") wmPlugs = traits.Either(traits.Bool, File, argstr="--wmPlugs %s") csfPlugs = traits.Either(traits.Bool, File, argstr="--csfPlugs %s") plugClassNames = traits.List("traits.Str", sep=",", argstr="--plugClassNames %s") t1ClassMeans = traits.List("traits.Float", sep=",", argstr="--t1ClassMeans %f") t2ClassMeans = traits.List("traits.Float", sep=",", argstr="--t2ClassMeans %f") pdClassMeans = traits.List("traits.Float", sep=",", argstr="--pdClassMeans %f") randomSeed = traits.Int(argstr="--randomSeed %d") numberOfPlugs = traits.Int(argstr="--numberOfPlugs %d") coverage = traits.Float(argstr="--coverage %f") permissiveness = traits.Float(argstr="--permissiveness %f") meanOutlier = traits.Float(argstr="--meanOutlier %f") varOutlier = traits.Float(argstr="--varOutlier %f") plugSize = traits.Float(argstr="--plugSize %f") partitions = traits.List("traits.Int", sep=",", argstr="--partitions %d") numberOfClassPlugs = traits.List("traits.Int", sep=",", argstr="--numberOfClassPlugs %d") bloodMode = traits.Enum("Manual", "Top", "Bottom", argstr="--bloodMode %s") bloodImage = traits.Enum("T1", "T2", "PD", argstr="--bloodImage %s") vbPlugs = File(exists="True", argstr="--vbPlugs %s")
class gtractFiberTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( exists = "True",argstr = "--inputTensorVolume %s") inputAnisotropyVolume = File( exists = "True",argstr = "--inputAnisotropyVolume %s") inputStartingSeedsLabelMapVolume = File( exists = "True",argstr = "--inputStartingSeedsLabelMapVolume %s") startingSeedsLabel = traits.Int( argstr = "--startingSeedsLabel %d") inputEndingSeedsLabelMapVolume = File( exists = "True",argstr = "--inputEndingSeedsLabelMapVolume %s") endingSeedsLabel = traits.Int( argstr = "--endingSeedsLabel %d") inputTract = traits.Str( argstr = "--inputTract %s") outputTract = traits.Str( argstr = "--outputTract %s") writeXMLPolyDataFile = traits.Bool( argstr = "--writeXMLPolyDataFile ") trackingMethod = traits.Enum("Guided","Free","Streamline","GraphSearch", argstr = "--trackingMethod %s") guidedCurvatureThreshold = traits.Float( argstr = "--guidedCurvatureThreshold %f") maximumGuideDistance = traits.Float( argstr = "--maximumGuideDistance %f") seedThreshold = traits.Float( argstr = "--seedThreshold %f") trackingThreshold = traits.Float( argstr = "--trackingThreshold %f") curvatureThreshold = traits.Float( argstr = "--curvatureThreshold %f") branchingThreshold = traits.Float( argstr = "--branchingThreshold %f") maximumBranchPoints = traits.Int( argstr = "--maximumBranchPoints %d") useRandomWalk = traits.Bool( argstr = "--useRandomWalk ") randomSeed = traits.Int( argstr = "--randomSeed %d") branchingAngle = traits.Float( argstr = "--branchingAngle %f") minimumLength = traits.Float( argstr = "--minimumLength %f") maximumLength = traits.Float( argstr = "--maximumLength %f") stepSize = traits.Float( argstr = "--stepSize %f") useLoopDetection = traits.Bool( argstr = "--useLoopDetection ") useTend = traits.Bool( argstr = "--useTend ") tendF = traits.Float( argstr = "--tendF %f") tendG = traits.Float( argstr = "--tendG %f")
class BRAINSTalairachMaskInputSpec(CommandLineInputSpec): inputVolume = File(exists="True", argstr="--inputVolume %s") talairachParameters = File(exists="True", argstr="--talairachParameters %s") talairachBox = File(exists="True", argstr="--talairachBox %s") hemisphereMode = traits.Enum("left", "right", "both", argstr="--hemisphereMode %s") expand = traits.Bool(argstr="--expand ") outputVolume = traits.Either(traits.Bool, File, argstr="--outputVolume %s")
class parameters(trait.HasTraits): number_of_B = trait.Int number_of_X = trait.Int number_of_E = trait.Int m = trait.Int n = trait.Int resource_rate = trait.Float imitate_rate = trait.Float battle_cost = trait.Float endowment_effect = trait.Float selection_strength = trait.Float real_trans = trait.Bool profit_function = trait.Str batch_mode = trait.Bool update_method = trait.Enum("death-birth", "birth-death", "genetic pool") view = View(Group( Group(Item('number_of_B', label=u"B数量"), Item('number_of_X', label=u"X数量"), Item('number_of_E', label=u"E数量"), show_border=True, label=u"人口设置"), Group(Item('profit_function', label=u"收益函数f(s)"), Item('endowment_effect', label=u"禀赋效应(α)"), Item('battle_cost', label=u"战斗成本(c)"), Item('resource_rate', label=u"资源比例(g)"), show_border=True, label=u"博弈参数设置"), Group(Item('n', label=u"仿真代数(N)"), Item('m', label=u"重复轮次(n)"), Item('selection_strength', label=u"选择强度(ω)"), Item('imitate_rate', label=u"变异率(μ)"), Item("update_method", label=u"更新方式"), show_border=True, label=u"演化环境设置"), Group(Item('batch_mode', label="batch mode"), show_border=True, label=u"观察者模式设置")), buttons=["OK"], width=280, height=500, title=u"产权演化参数设置")
outputLandmarksInInputSpace = traits.Either(traits.Bool, File, argstr = "--outputLandmarksInInputSpace %s") outputLandmarksInACPCAlignedSpace = traits.Either(traits.Bool, File, argstr = "--outputLandmarksInACPCAlignedSpace %s") inputLandmarksPaired = File( exists = "True",argstr = "--inputLandmarksPaired %s") outputLandmarksPaired = traits.Either(traits.Bool, File, argstr = "--outputLandmarksPaired %s") outputMRML = traits.Either(traits.Bool, File, argstr = "--outputMRML %s") outputVerificationScript = traits.Either(traits.Bool, File, argstr = "--outputVerificationScript %s") mspQualityLevel = traits.Int( argstr = "--mspQualityLevel %d") otsuPercentileThreshold = traits.Float( argstr = "--otsuPercentileThreshold %f") acLowerBound = traits.Float( argstr = "--acLowerBound %f") cutOutHeadInOutputVolume = traits.Bool( argstr = "--cutOutHeadInOutputVolume ") outputUntransformedClippedVolume = traits.Either(traits.Bool, File, argstr = "--outputUntransformedClippedVolume %s") rescaleIntensities = traits.Bool( argstr = "--rescaleIntensities ") trimRescaledIntensities = traits.Float( argstr = "--trimRescaledIntensities %f") rescaleIntensitiesOutputRange = traits.List("traits.Int", sep = ",",argstr = "--rescaleIntensitiesOutputRange %d") backgroundFillValueString = traits.Str( argstr = "--BackgroundFillValue %s") interpolationMode = traits.Enum("NearestNeighbor","Linear","ResampleInPlace","BSpline","WindowedSinc", argstr = "--interpolationMode %s") forceACPoint = traits.List("traits.Float", sep = ",",argstr = "--forceACPoint %f") forcePCPoint = traits.List("traits.Float", sep = ",",argstr = "--forcePCPoint %f") forceVN4Point = traits.List("traits.Float", sep = ",",argstr = "--forceVN4Point %f") forceRPPoint = traits.List("traits.Float", sep = ",",argstr = "--forceRPPoint %f") inputLandmarksEMSP = File( exists = "True",argstr = "--inputLandmarksEMSP %s") forceHoughEyeDetectorReportFailure = traits.Bool( argstr = "--forceHoughEyeDetectorReportFailure ") radiusMPJ = traits.Float( argstr = "--rmpj %f") radiusAC = traits.Float( argstr = "--rac %f") radiusPC = traits.Float( argstr = "--rpc %f") radiusVN4 = traits.Float( argstr = "--rVN4 %f") debug = traits.Bool( argstr = "--debug ") verbose = traits.Bool( argstr = "--verbose ") writeBranded2DImage = traits.Either(traits.Bool, File, argstr = "--writeBranded2DImage %s") resultsDir = traits.Either(traits.Bool, File, argstr = "--resultsDir %s") writedebuggingImagesLevel = traits.Int( argstr = "--writedebuggingImagesLevel %d")
class SplineExplorer(traits.HasTraits): """A simple UI to adjust the parameters and view the resulting splines.""" v_min = traits.Float(0) v_max = traits.Float(15) a_min = traits.Float(-5) a_max = traits.Float(5) j_min = traits.Float(-2.5) j_max = traits.Float(2.5) mass = traits.Float(400) q_i = traits.Float v_i = traits.Float a_i = traits.Float t_i = traits.Float q_f = traits.Float(100) v_f = traits.Float(0) a_f = traits.Float(0) t_f = traits.Float(18) plot_names = traits.List( ["Position", "Jerk", "Velocity", "Power", "Acceleration"]) active_plots = traits.List target_type = traits.Enum(('Position', 'Velocity', 'Acceleration', 'Time')) plot_container = traits.Instance(Container) recalculate = menu.Action(name="Recalculate", action="recalc") dump = menu.Action(name="Print", action="dump") save = menu.Action(name="Save", action="save") trait_view = ui.View(ui.HGroup( ui.VGroup( ui.Item(name='target_type', label='Target'), ui.VGroup(ui.Item(name='active_plots', show_label=False, editor=ui.CheckListEditor(cols=3, name='plot_names'), style='custom'), label='Show Plots', show_border=True), ui.VGroup(ui.Item(name='q_i', label='Position'), ui.Item(name='v_i', label='Velocity'), ui.Item(name='a_i', label='Acceleration'), ui.Item(name='t_i', label='Time'), label='Initial Conditions', show_border=True), ui.VGroup(ui.Item( name='q_f', label='Position', enabled_when="target_type not in ('Velocity', 'Acceleration')" ), ui.Item(name='v_f', label='Velocity', enabled_when="target_type != 'Acceleration'"), ui.Item(name='a_f', label='Acceleration'), ui.Item(name='t_f', label='Time', enabled_when="target_type == 'Time'"), label='Final Conditions:', show_border=True), ui.VGroup(ui.Item(name='v_min', label='Min Velocity'), ui.Item(name='v_max', label='Max Velocity'), ui.Item(name='a_min', label='Min Acceleration'), ui.Item(name='a_max', label='Max Acceleration'), ui.Item(name='j_min', label='Min Jerk'), ui.Item(name='j_max', label='Max Jerk'), ui.Item(name='mass', label='Vehicle Mass'), label='Constraints', show_border=True)), ui.Item('plot_container', editor=ComponentEditor(), show_label=False)), title='Cubic Spline Explorer', handler=SEButtonHandler(), buttons=[recalculate, dump, save], resizable=True, width=1000) def __init__(self): super(SplineExplorer, self).__init__() self.active_plots = self.plot_names[:] self.active_plots.remove("Power") self.calc() def calc(self): try: self.solver = TrajectorySolver(self.v_max, self.a_max, self.j_max, self.v_min, self.a_min, self.j_min) self.initial = Knot(self.q_i, self.v_i, self.a_i, self.t_i) self.final = Knot(self.q_f, self.v_f, self.a_f, self.t_f) if self.target_type == 'Position': self.spline = self.solver.target_position( self.initial, self.final) elif self.target_type == 'Velocity': self.spline = self.solver.target_velocity( self.initial, self.final) elif self.target_type == 'Acceleration': self.spline = self.solver.target_acceleration( self.initial, self.final) elif self.target_type == 'Time': self.spline = self.solver.target_time(self.initial, self.final) pos = vel = accel = jerk = power = False if "Position" in self.active_plots: pos = True if "Velocity" in self.active_plots: vel = True if "Acceleration" in self.active_plots: accel = True if "Jerk" in self.active_plots: jerk = True if "Power" in self.active_plots: power = True self.plotter = CSplinePlotter(self.spline, self.v_max, self.a_max, self.j_max, self.v_min, self.a_min, self.j_min, mass=self.mass, plot_pos=pos, plot_vel=vel, plot_accel=accel, plot_jerk=jerk, plot_power=power) self.plot_container = self.plotter.container except: self.initial = None self.final = None self.spline = None self.plot_container = Container() def display(self): self.configure_traits() def get_save_filename(self): """Get a filename from the user via a FileDialog. Returns the filename.""" dialog = FileDialog(action="save as", default_filename="spline_00", wildcard="*.png") dialog.open() if dialog.return_code == OK: return dialog.path def save(self, path): """Save an image of the plot. Does not catch any exceptions.""" # Create a graphics context of the right size win_size = self.plot_container.outer_bounds plot_gc = chaco.PlotGraphicsContext(win_size) #plot_gc.set_fill_color("transparent") # Place the plot component into it plot_gc.render_component(self.plot_container) # Save out to the user supplied filename plot_gc.save(path) def _active_plots_changed(self): self.calc() def _target_type_changed(self): self.calc()
class PipelineConfiguration(traits.HasTraits): # project settings project_dir = traits.Directory( exists=False, desc="data path to where the project is stored") # project metadata (for connectome file) project_metadata = traits.Dict( desc="project metadata to be stored in the connectome file") # DEPRECATED: this field is deprecated after version >1.0.2 generator = traits.Str() # parcellation scheme parcellation_scheme = traits.Enum("NativeFreesurfer", ["Lausanne2008", "NativeFreesurfer"], desc="used parcellation scheme") # choose between 'L' (linear) and 'N' (non-linear) and 'B' (bbregister) registration_mode = traits.Enum( "Linear", ["Linear", "Nonlinear", "BBregister"], desc="registration mode: linear or non-linear or bbregister") # choose between 'L' (linear) and 'B' (bbregister) rsfmri_registration_mode = traits.Enum( "Linear", ["Linear", "BBregister"], desc="registration mode: linear or bbregister") diffusion_imaging_model = traits.Enum("DSI", ["DSI", "DTI", "QBALL"]) # DSI nr_of_gradient_directions = traits.Str('515') nr_of_sampling_directions = traits.Str('181') odf_recon_param = traits.Str('-b0 1 -dsi -p 4 -sn 0') hardi_recon_param = traits.Str('-b0 1 -p 3 -sn 0') # DTI gradient_table_file = traits.File(exists=False) gradient_table = traits.Enum('siemens_64', [ 'custom', 'mgh_dti_006', 'mgh_dti_018', 'mgh_dti_030', 'mgh_dti_042', 'mgh_dti_060', 'mgh_dti_072', 'mgh_dti_090', 'mgh_dti_120', 'mgh_dti_144', 'siemens_06', 'siemens_12', 'siemens_20', 'siemens_256', 'siemens_30', 'siemens_64' ]) nr_of_b0 = traits.Str('1') max_b0_val = traits.Str('1000') dti_recon_param = traits.Str('') dtb_dtk2dir_param = traits.Str('') # tractography streamline_param = traits.Str('--angle 60 --seeds 32') # registration lin_reg_param = traits.Str('-usesqform -nosearch -dof 6 -cost mutualinfo') nlin_reg_bet_T2_param = traits.Str('-f 0.35 -g 0.15') nlin_reg_bet_b0_param = traits.Str('-f 0.2 -g 0.2') nlin_reg_fnirt_param = traits.Str( '--subsamp=8,4,2,2 --miter==5,5,5,5 --lambda=240,120,90,30 --splineorder=3 --applyinmask=0,0,1,1 --applyrefmask=0,0,1,1' ) bb_reg_param = traits.Str('--init-header --dti') # dicom converter do_convert_diffusion = traits.Bool(True) do_convert_T1 = traits.Bool(True) do_convert_T2 = traits.Bool(False) do_convert_fMRI = traits.Bool(False) # rsfmri rsfmri_lin_reg_param = traits.Str( '-usesqform -nosearch -dof 6 -cost mutualinfo') rsfmri_bb_reg_param = traits.Str('--init-header --dti') do_save_mat = traits.Bool(True) # DEPRECATED: subject_raw_glob_diffusion = traits.Str("*.*") subject_raw_glob_T1 = traits.Str("*.*") subject_raw_glob_T2 = traits.Str("*.*") extract_diffusion_metadata = traits.Bool(False) # subject subject_name = traits.Str() subject_timepoint = traits.Str() subject_workingdir = traits.Directory() subject_logger = None subject_metadata = [ KeyValue(key='description', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), KeyValue(key='', value=''), ] active_createfolder = traits.Bool(True) active_dicomconverter = traits.Bool(False) active_registration = traits.Bool(False) active_segmentation = traits.Bool(False) active_parcellation = traits.Bool(False) active_applyregistration = traits.Bool(False) active_reconstruction = traits.Bool(False) active_tractography = traits.Bool(False) active_fiberfilter = traits.Bool(False) active_connectome = traits.Bool(False) active_statistics = traits.Bool(False) active_rsfmri = traits.Bool(False) active_cffconverter = traits.Bool(False) skip_completed_stages = traits.Bool(False) # metadata creator = traits.Str() email = traits.Str() publisher = traits.Str() created = traits.Date() modified = traits.Date() license = traits.Str() # rights = traits.Str() reference = traits.Str() # relation = traits.Str() species = traits.Str('H**o sapiens') description = traits.Str() # segmentation recon_all_param = traits.Str('-all -no-isrunning') # parcellation custompar_nrroi = traits.Int() custompar_nodeinfo = traits.File() custompar_volumeparcell = traits.File() # fiber filtering apply_splinefilter = traits.Bool( True, desc='apply the spline filtering from diffusion toolkit') apply_fiberlength = traits.Bool(True, desc='apply cutoff to fiber lengths') fiber_cutoff_lower = traits.Float( 20.0, desc='cut fibers that are shorter in length than given length in mm') fiber_cutoff_upper = traits.Float( 500.0, desc='cut fibers that are longer in length than given length in mm') # measures connection_P0 = traits.Bool(False) connection_gfa = traits.Bool(False) connection_kurtosis = traits.Bool(False) connection_skewness = traits.Bool(False) connection_adc = traits.Bool(False) connection_fa = traits.Bool(False) # cff converter cff_fullnetworkpickle = traits.Bool( True, desc='stores the full network pickle generated by connectome creation') cff_cmatpickle = traits.Bool(True) cff_originalfibers = traits.Bool(True, desc='stores original fibers') cff_filteredfibers = traits.Bool(True, desc='stores filtered fibers') cff_finalfiberlabels = traits.Bool( True, desc='stores final fibers and their labelarrays') cff_fiberarr = traits.Bool(True) cff_rawdiffusion = traits.Bool(True) cff_scalars = traits.Bool(True) cff_rawT1 = traits.Bool(True) cff_rawT2 = traits.Bool(True) cff_roisegmentation = traits.Bool( True, desc='stores multi-resolution parcellation volumes') cff_surfaces = traits.Bool(True, desc='stores individually genertated surfaces') cff_surfacelabels = traits.Bool( True, desc='stores individually genertated surfaces') # do you want to do manual white matter mask correction? wm_handling = traits.Enum( 1, [1, 2, 3], desc="in what state should the freesurfer step be processed") # custom parcellation parcellation = traits.Dict( desc="provide the dictionary with your parcellation.") # start up fslview inspect_registration = traits.Bool( False, desc='start fslview to inspect the the registration results') fsloutputtype = traits.Enum('NIFTI', ['NIFTI']) # connectome creation compute_curvature = traits.Bool(False) # email notification, needs a local smtp server # sudo apt-get install postfix emailnotify = traits.ListStr( [], desc='the email address to send stage completion status message') freesurfer_home = traits.Directory(exists=False, desc="path to Freesurfer") fsl_home = traits.Directory(exists=False, desc="path to FSL") dtk_home = traits.Directory(exists=False, desc="path to diffusion toolkit") # This file stores descriptions of the inputs/outputs to each stage of the # CMP pipeline. It can be queried using the PipelineStatus python object pipeline_status_file = traits.Str("cmp.status") # Pipeline status object pipeline_status = pipeline_status.PipelineStatus() def _get_lausanne_parcellation(self, parcel="NativeFreesurfer"): if parcel == "Lausanne2008": return { 'scale33': { 'number_of_regions': 83, # contains name, url, color, freesurfer_label, etc. used for connection matrix 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution83'), 'resolution83.graphml'), # scalar node values on fsaverage? or atlas? 'surface_parcellation': None, # scalar node values in fsaverage volume? 'volume_parcellation': None, # the subdirectory name from where to copy parcellations, with hemispheric wildcard 'fs_label_subdir_name': 'regenerated_%s_36', # should we subtract the cortical rois for the white matter mask? 'subtract_from_wm_mask': 1, }, 'scale60': { 'number_of_regions': 129, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution150'), 'resolution150.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_60', 'subtract_from_wm_mask': 1, }, 'scale125': { 'number_of_regions': 234, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution258'), 'resolution258.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_125', 'subtract_from_wm_mask': 1, }, 'scale250': { 'number_of_regions': 463, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution500'), 'resolution500.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_250', 'subtract_from_wm_mask': 1, }, 'scale500': { 'number_of_regions': 1015, 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('resolution1015'), 'resolution1015.graphml'), 'surface_parcellation': None, 'volume_parcellation': None, 'fs_label_subdir_name': 'regenerated_%s_500', 'subtract_from_wm_mask': 1, }, } else: return { 'freesurferaparc': { 'number_of_regions': 83, # contains name, url, color, freesurfer_label, etc. used for connection matrix 'node_information_graphml': op.join( self.get_lausanne_parcellation_path('freesurferaparc'), 'resolution83.graphml'), # scalar node values on fsaverage? or atlas? 'surface_parcellation': None, # scalar node values in fsaverage volume? 'volume_parcellation': None, } } def __init__(self, **kwargs): # NOTE: In python 2.6, object.__init__ no longer accepts input # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. super(PipelineConfiguration, self).__init__(**kwargs) # the default parcellation provided self.parcellation = self._get_lausanne_parcellation( parcel="NativeFreesurfer") self.can_use_dipy = dipy_here # no email notify self.emailnotify = [] # default gradient table for DTI self.gradient_table_file = self.get_cmp_gradient_table('siemens_64') # try to discover paths from environment variables try: self.freesurfer_home = op.join(os.environ['FREESURFER_HOME']) self.fsl_home = op.join(os.environ['FSLDIR']) self.dtk_home = os.environ['DTDIR'] self.dtk_matrices = op.join(self.dtk_home, 'matrices') except KeyError: pass self.fsloutputtype = 'NIFTI' os.environ['FSLOUTPUTTYPE'] = self.fsloutputtype os.environ['FSLOUTPUTTYPE'] = 'NIFTI' def consistency_check(self): """ Provides a checking facility for configuration objects """ # project name not empty if not op.exists(self.project_dir): msg = 'Your project directory does not exist!' raise Exception(msg) # check metadata if self.creator == '': raise Exception('You need to enter creator metadata!') if self.publisher == '': raise Exception('You need to enter publisher metadata!') if self.email == '': raise Exception('You need to enter email of a contact person!') # check if software paths exists pas = { 'configuration.freesurfer_home': self.freesurfer_home, 'configuration.fsl_home': self.fsl_home, 'configuration.dtk_home': self.dtk_home, 'configuration.dtk_matrices': self.dtk_matrices } for k, p in pas.items(): if not op.exists(p): msg = 'Required software path for %s does not exists: %s' % (k, p) raise Exception(msg) if self.subject_workingdir == '': msg = 'No working directory defined for subject' raise Exception(msg) # else: # wdir = self.get_subj_dir() # if not op.exists(wdir): # msg = 'Working directory %s does not exists for subject' % (wdir) # raise Exception(msg) # else: # wdiff = op.join(self.get_raw_diffusion()) # print wdiff # if not op.exists(wdiff): # msg = 'Diffusion MRI subdirectory %s does not exists for the subject' % wdiff # raise Exception(msg) # wt1 = op.join(self.get_rawt1()) # if not op.exists(wt1): # msg = 'Structural MRI subdirectory %s T1 does not exist in RAWDATA' % wt1 # raise Exception(msg) def get_cmp_home(self): """ Return the cmp home path """ return op.dirname(__file__) def get_rawdata(self): """ Return raw data path for the subject """ return op.join(self.get_subj_dir(), 'RAWDATA') def get_log(self): """ Get subject log dir """ return op.join(self.get_subj_dir(), 'LOG') def get_logname(self, suffix='.log'): """ Get a generic name for the log and pickle files """ a = dt.datetime.now() return 'pipeline-%s-%02i%02i-%s-%s%s' % ( a.date().isoformat(), a.time().hour, a.time().minute, self.subject_name, self.subject_timepoint, suffix) def get_logger(self): """ Get the logger instance created """ if self.subject_logger is None: # setup logger for the subject self.subject_logger = \ getLog(os.path.join(self.get_log(), self.get_logname())) return self.subject_logger else: return self.subject_logger def get_rawglob(self, modality): """ DEPRECATED: Get the file name endings for modality """ if modality == 'diffusion': if not self.subject_raw_glob_diffusion == '': return self.subject_raw_glob_diffusion else: raise Exception('No raw_glob_diffusion defined for subject') elif modality == 'T1': if not self.subject_raw_glob_T1 == '': return self.subject_raw_glob_T1 else: raise Exception('No raw_glob_T1 defined for subject') elif modality == 'T2': if not self.subject_raw_glob_T2 == '': return self.subject_raw_glob_T2 else: raise Exception('No raw_glob_T2 defined for subject') def get_dicomfiles(self, modality): """ Get a list of dicom files for the requested modality. Tries to discover them automatically """ from glob import glob if modality == 'diffusion': pat = self.get_raw_diffusion() elif modality == 'T1': pat = self.get_rawt1() elif modality == 'T2': pat = self.get_rawt2() elif modality == 'fMRI': pat = self.get_rawrsfmri() # discover files with *.* and * difiles = sorted(glob(op.join(pat, '*.*')) + glob(op.join(pat, '*'))) # exclude potential .nii and .nii.gz files difiles = [ e for e in difiles if not e.endswith('.nii') and not e.endswith('.nii.gz') ] # check if no files and throw exception if len(difiles) == 0: raise Exception('Could not find any DICOM files in folder %s' % pat) return difiles def get_rawrsfmri(self): """ Get raw functional MRI path for subject """ return op.join(self.get_rawdata(), 'fMRI') def get_rawt1(self): """ Get raw structural MRI T1 path for subject """ return op.join(self.get_rawdata(), 'T1') def get_rawt2(self): """ Get raw structural MRI T2 path for subject """ return op.join(self.get_rawdata(), 'T2') def get_subj_dir(self): return self.subject_workingdir def get_raw_diffusion(self): """ Get the raw diffusion path for subject """ if self.diffusion_imaging_model == 'DSI': return op.join(self.get_subj_dir(), 'RAWDATA', 'DSI') elif self.diffusion_imaging_model == 'DTI': return op.join(self.get_subj_dir(), 'RAWDATA', 'DTI') elif self.diffusion_imaging_model == 'QBALL': return op.join(self.get_subj_dir(), 'RAWDATA', 'QBALL') def get_fs(self): """ Returns the subject root folder path for freesurfer files """ return op.join(self.get_subj_dir(), 'FREESURFER') def get_stats(self): """ Return statistic output path """ return op.join(self.get_subj_dir(), 'STATS') def get_cffdir(self): """ Returns path to store connectome file """ return op.join(self.get_cmp(), 'cff') def get_nifti(self): """ Returns the subject root folder path for nifti files """ return op.join(self.get_subj_dir(), 'NIFTI') def get_nifti_trafo(self): """ Returns the path to the subjects transformation / registration matrices """ return op.join(self.get_nifti(), 'transformations') def get_nifti_bbregister(self): """ Returns the path to the subjects transformation / registration matrices, bbregister mode """ return op.join(self.get_nifti(), 'bbregister') def get_diffusion_metadata(self): """ Diffusion metadata, i.e. where gradient_table.txt is stored """ return op.join(self.get_nifti(), 'diffusion_metadata') def get_nifti_wm_correction(self): """ Returns the path to the subjects wm_correction path """ return op.join(self.get_nifti(), 'wm_correction') def get_cmp(self): return op.join(self.get_subj_dir(), 'CMP') def get_cmp_rawdiff(self, ): return op.join(self.get_cmp(), 'raw_diffusion') def get_cmp_rawdiff_reconout(self): """ Returns the output path for diffusion reconstruction without prefix""" if self.diffusion_imaging_model == 'DSI': return op.join(self.get_cmp(), 'raw_diffusion', 'odf_0') elif self.diffusion_imaging_model == 'DTI': return op.join(self.get_cmp(), 'raw_diffusion', 'dti_0') elif self.diffusion_imaging_model == 'QBALL': return op.join(self.get_cmp(), 'raw_diffusion', 'qball_0') def get_cmp_rawdiff_resampled(self): return op.join(self.get_cmp_rawdiff(), '2x2x2') def get_cmp_fsout(self): return op.join(self.get_cmp(), 'fs_output') def get_cmp_fibers(self): return op.join(self.get_cmp(), 'fibers') def get_cmp_scalars(self): return op.join(self.get_cmp(), 'scalars') def get_cmp_matrices(self): return op.join(self.get_cmp_fibers(), 'matrices') def get_cmp_fmri(self): return op.join(self.get_cmp(), 'fMRI') def get_cmp_tracto_mask(self): return op.join(self.get_cmp_fsout(), 'HR') def get_cmp_tracto_mask_tob0(self): return op.join(self.get_cmp_fsout(), 'HR__registered-TO-b0') def get_custom_gradient_table(self): """ Returns the absolute path to the custom gradient table with optional b-values in the 4th row """ return self.gradient_table_file def get_cmp_gradient_table(self, name): """ Return default gradient tables shipped with CMP. These are mainly derived from Diffusion Toolkit """ cmp_path = op.dirname(__file__) return op.join(cmp_path, 'data', 'diffusion', 'gradient_tables', name + '.txt') def get_dtb_streamline_vecs_file(self, as_text=False): """ Returns the odf directions file used for DTB_streamline """ cmp_path = op.dirname(__file__) if as_text: return op.join(cmp_path, 'data', 'diffusion', 'odf_directions', '181_vecs.txt') else: return op.join(cmp_path, 'data', 'diffusion', 'odf_directions', '181_vecs.dat') # XXX def get_cmp_scalarfields(self): """ Returns a list with tuples with the scalar field name and the absolute path to its nifti file """ ret = [] if self.diffusion_imaging_model == 'DSI': # add gfa per default ret.append(('gfa', op.join(self.get_cmp_scalars(), 'dsi_gfa.nii.gz'))) # XXX: add adc per default elif self.diffusion_imaging_model == 'DTI': # nothing to add yet for DTI pass return ret def get_dtk_dsi_matrix(self): """ Returns the DSI matrix from Diffusion Toolkit The parameters have to be set in the configuration object with keys: 1. number of gradient directions : 'nr_of_gradient_directions' 2. number of sampling directions : 'nr_of_sampling_directions' Example ------- confobj.nr_of_gradient_directions = 515 confobj.nr_of_sampling_directions = 181 Returns matrix including absolute path to DSI_matrix_515x181.dat """ grad = self.nr_of_gradient_directions samp = self.nr_of_sampling_directions fpath = op.join(self.dtk_matrices, "DSI_matrix_%sx%s.dat" % (grad, samp)) if not op.exists(fpath): msg = "DSI matrix does not exists: %s" % fpath raise Exception(msg) return fpath def get_lausanne_atlas(self, name=None): """ Return the absolute path to the lausanne parcellation atlas for the resolution name """ cmp_path = op.dirname(__file__) provided_atlases = [ 'myatlas_36_rh.gcs', 'myatlasP1_16_rh.gcs', 'myatlasP17_28_rh.gcs', 'myatlasP29_36_rh.gcs', 'myatlas_60_rh.gcs', 'myatlas_125_rh.gcs', 'myatlas_250_rh.gcs', 'myatlas_36_lh.gcs', 'myatlasP1_16_lh.gcs', 'myatlasP17_28_lh.gcs', 'myatlasP29_36_lh.gcs', 'myatlas_60_lh.gcs', 'myatlas_125_lh.gcs', 'myatlas_250_lh.gcs' ] if name in provided_atlases: return op.join(cmp_path, 'data', 'colortable_and_gcs', 'my_atlas_gcs', name) else: msg = "Atlas %s does not exists" % name raise Exception(msg) def get_freeview_lut(self, name): """ Returns the Look-Up-Table as text file for a given parcellation scheme in a dictionary """ cmp_path = op.dirname(__file__) if name == "NativeFreesurfer": return { 'freesurferaparc': op.join(cmp_path, 'data', 'parcellation', 'nativefreesurfer', 'freesurferaparc', 'FreeSurferColorLUT_adapted.txt') } else: return "" def get_lausanne_parcellation_path(self, parcellationname): cmp_path = op.dirname(__file__) if self.parcellation_scheme == "Lausanne2008": allowed_default_parcel = [ 'resolution83', 'resolution150', 'resolution258', 'resolution500', 'resolution1015' ] if parcellationname in allowed_default_parcel: return op.join(cmp_path, 'data', 'parcellation', 'lausanne2008', parcellationname) else: msg = "Not a valid default parcellation name for the lausanne2008 parcellation scheme" raise Exception(msg) else: allowed_default_parcel = ['freesurferaparc'] if parcellationname in allowed_default_parcel: return op.join(cmp_path, 'data', 'parcellation', 'nativefreesurfer', parcellationname) else: msg = "Not a valid default parcellation name for the NativeFreesurfer parcellation scheme" raise Exception(msg) def get_cmp_binary_path(self): """ Returns the path to the binary files for the current platform and architecture """ if sys.platform == 'linux2': import platform as pf if '32' in pf.architecture()[0]: return op.join(op.dirname(__file__), "binary", "linux2", "bit32") elif '64' in pf.architecture()[0]: return op.join(op.dirname(__file__), "binary", "linux2", "bit64") else: raise ('No binary files compiled for your platform!') def get_pipeline_status_file(self): """Returns the absolute path of the pipeline status file""" return op.join(self.get_subj_dir(), self.pipeline_status_file) def init_pipeline_status(self): """Create the 'cmp.status'. The 'cmp.status' file contains information about the inputs/outputs of each pipeline stage""" status_file = op.join(self.get_subj_dir(), self.pipeline_status_file) self.pipeline_status.Pipeline.name = "cmp" self.pipeline_status.SaveToFile(status_file) def update_pipeline_status(self): """Update the pipeline status on disk with the current status in memory""" status_file = op.join(self.get_subj_dir(), self.pipeline_status_file) self.pipeline_status.SaveToFile(status_file)
class Berth(Sim.Process, traits.HasTraits): ID = traits.Int platform = traits.Instance('Platform') station = traits.Instance('Station') start_pos = traits.Float # The 'tail' end of the berth end_pos = traits.Float # The 'nose' end of the berth unloading = traits.Bool(False) loading = traits.Bool(False) storage_entrance = traits.Bool(False) storage_exit = traits.Bool(False) DISEMBARK = "DISEMBARK" EMBARK = "EMBARK" ENTER_STORAGE = "ENTER_STORAGE" EXIT_STORAGE = "EXIT_STORAGE" _action = traits.Enum(None, DISEMBARK, EMBARK, ENTER_STORAGE, EXIT_STORAGE) _error_continue = traits.Bool(False) ## traits_view = ui.View(ui.HGroup(ui.Item(name='vehicle', ## editor = ui.TextEditor()), ## ui.Item('busy'))) def __init__(self, ID, station, platform, start_pos, end_pos, unloading, loading, storage_entrance, storage_exit): Sim.Process.__init__(self, name='berth_' + str(ID)) traits.HasTraits.__init__(self) self.ID = ID self.station = station self.platform = platform self.start_pos = start_pos self.end_pos = end_pos self.unloading = unloading self.loading = loading self.storage_entrance = storage_entrance self.storage_exit = storage_exit # Record keeping for statistics ## self._occupied_times = [Sim.now(), self._vehicles[:]] # elements are (time, list_of_occupying_vehicle_refs) self._busy_times = [] # elements are: (time, busy_state) self._all_passengers = [ ] # record of all passengers, including those who have departed # Control flags/settings for the run loop self._busy = False # use the self._busy property to enable record gathering self._action = None self._fnc_args = None self._error_continue = False def __str__(self): return self.name ## def is_empty(self): ## """Returns True if the berth is not occupied by a vehicle.""" ## return False if self.vehicle else True def disembark(self, vehicle, passengers, cmd_msg, cmd_msg_id): """If ordering matters, note that passengers at the end of the list are serviced first.""" assert not self._busy self._action = Berth.DISEMBARK self._fnc_args = (vehicle, passengers, cmd_msg, cmd_msg_id) if self.passive: Sim.reactivate(self, prior=True) def embark(self, vehicle, passengers, cmd_msg, cmd_msg_id): """If ordering matters, note that passengers at the end of the list are serviced first.""" assert not self._busy self._action = Berth.EMBARK self._fnc_args = (vehicle, passengers, cmd_msg, cmd_msg_id) if self.passive: Sim.reactivate(self, prior=True) def enter_storage(self, vehicle, cmd_msg, cmd_msg_id): assert not self._busy self._action = Berth.ENTER_STORAGE self._fnc_args = (vehicle, cmd_msg, cmd_msg_id) if self.passive: Sim.reactivate(self, prior=True) def exit_storage(self, position, model_name, cmd_msg, cmd_msg_id): assert not self._busy self._action = Berth.EXIT_STORAGE self._fnc_args = (position, model_name, cmd_msg, cmd_msg_id) if self.passive: Sim.reactivate(self, prior=True) def get_busy(self): return self.__busy def set_busy(self, value): self._busy_times.append((Sim.now(), value)) self.__busy = value _busy = property(get_busy, set_busy) def is_busy(self): return self.__busy def run(self): """ The main loop for the Berth.""" # A Berth has four different tasks to accomplish but only one active loop. while True: try: if self._action is Berth.DISEMBARK: for disembark_delay in self._do_disembark(*self._fnc_args): yield Sim.hold, self, disembark_delay # Wait while passenger disembarks elif self._action is Berth.EMBARK: for embark_delay in self._do_embark(*self._fnc_args): yield Sim.hold, self, embark_delay elif self._action is Berth.ENTER_STORAGE: for enter_delay in self._do_enter_storage(*self._fnc_args): yield Sim.hold, self, enter_delay elif self._action is Berth.EXIT_STORAGE: for exit_delay in self._do_exit_storage(*self._fnc_args): yield Sim.hold, self, exit_delay except VehicleOutOfPositionError as err: nose_pos, tail_pos = err.vehicle.get_positions() logging.info( "T=%4.3f Vehicle not in berth for attempted %s. Vehicle: %s, Berth: %s, Platform: %s, Station: %s, DisembarkCmdId: %s, vNosePos: %s, vNoseLoc %s, vTailPos: %s, vTailLoc: %s, berth.start_pos: %s, berth.end_pos: %s", Sim.now(), self._action, err.vehicle.ID, self.ID, self.platform.ID, self.station.ID, err.msg_id, nose_pos, err.vehicle.loc, tail_pos, err.vehicle.tail_loc, self.start_pos, self.end_pos) error_msg = api.SimMsgBodyInvalidId() error_msg.id_type = api.VEHICLE error_msg.msgID = err.msg_id error_msg.ID = err.vehicle.ID common.interface.send(api.SIM_MSG_BODY_INVALID_ID, error_msg) self._busy = False except PassengerNotAvailableError as err: logging.info( "T=%4.3f Passenger not available for attempted %s. Vehicle: %s, Berth: %s, Platform: %s, Station: %s, DisembarkCmdId: %s, Passenger: %s", Sim.now(), self._action, err.vehicle.ID, self.ID, self.platform.ID, self.station.ID, err.msg_id, err.pax.ID) error_msg = api.SimMsgBodyInvalidId() error_msg.msgID = err.msg_id error_msg.id_type = api.PASSENGER error_msg.ID = err.pax.ID common.interface.send(api.SIM_MSG_BODY_INVALID_ID, error_msg) self._error_continue = True # process other passengers except VehicleFullError as err: logging.info( "T=%4.3f Action %s failed since vehicle is at max passenger capacity. Vehicle: %s, Berth: %s, Platform: %s, Station: %s, EmbarkCmdId: %s, Passenger: %s", Sim.now(), self._action, err.vehicle.ID, self.ID, self.platform.ID, self.station.ID, err.msg_id, err.pax.ID) error_msg = api.SimMsgBodyInvalidId() error_msg.msgID = err.msg_id error_msg.id_type = api.PASSENGER error_msg.ID = err.pax.ID common.interface.send(api.SIM_MSG_BODY_INVALID_ID, error_msg) self._error_continue = True # process other passengers if not self._error_continue: # Reset state self._action = None self._fnc_args = None assert not self._busy yield Sim.passivate, self else: # Go through the loop again self._error_continue = False def _do_disembark(self, vehicle, passengers, cmd_msg, cmd_msg_id): self._busy = True while passengers: pax = passengers.pop() self._do_disembark_pax_start(pax, vehicle, cmd_msg_id) yield pax.unload_delay # Wait while passenger disembarks self._do_disembark_pax_finish(pax, vehicle, cmd_msg_id) self._busy = False # Notify controller that all passenger disembarkments are done. cmd_complete = api.SimCompletePassengersDisembark() cmd_complete.msgID = cmd_msg_id cmd_complete.cmd.CopyFrom(cmd_msg) cmd_complete.time = Sim.now() common.interface.send(api.SIM_COMPLETE_PASSENGERS_DISEMBARK, cmd_complete) def _do_disembark_pax_start(self, pax, vehicle, cmd_msg_id): # Error if vehicle not parked in berth if not vehicle.is_parked_between(self.start_pos, self.end_pos, self.platform.track_segment): raise VehicleOutOfPositionError(vehicle, cmd_msg_id) # Error if pax not in the vehicle if pax not in vehicle.passengers: raise PassengerNotAvailableError(pax, vehicle, cmd_msg_id) # Notify controller that disembark of this passenger is starting start_msg = api.SimNotifyPassengerDisembarkStart() start_msg.vID = vehicle.ID start_msg.sID = self.station.ID start_msg.platformID = self.platform.ID start_msg.pID = pax.ID start_msg.berthID = self.ID start_msg.time = Sim.now() common.interface.send(api.SIM_NOTIFY_PASSENGER_DISEMBARK_START, start_msg) def _do_disembark_pax_finish(self, pax, vehicle, cmd_msg_id): # Error if vehicle is not still parked in berth if not vehicle.is_parked_between(self.start_pos, self.end_pos, self.platform.track_segment): raise VehicleOutOfPositionError(vehicle, cmd_msg_id) # Move the passenger from the vehicle to the station vehicle.disembark(pax) pax.loc = self.station # Note if the passenger has arrived at final dest (may not be # the case with non-PRT systems) if self.station.ID == pax.dest_station.ID: pax.trip_end = Sim.now() pax.trip_success = True common.delivered_pax.add(pax) self.station._pax_arrivals_count += 1 self.station._all_passengers.append(pax) logging.info( "T=%4.3f %s delivered to platform %s in %s by %s (%d out of %d), disembarked in berth %s", Sim.now(), pax, self.platform.ID, self.station.ID, vehicle.ID, vehicle.get_pax_count(), vehicle.max_pax_capacity, self.ID) else: self.station.add_passenger(pax) self.station._arrivals_count += 1 # Notify that disembark of this passenger is complete end_msg = api.SimNotifyPassengerDisembarkEnd() end_msg.vID = vehicle.ID end_msg.sID = self.station.ID end_msg.platformID = self.platform.ID end_msg.pID = pax.ID end_msg.berthID = self.ID end_msg.time = Sim.now() common.interface.send(api.SIM_NOTIFY_PASSENGER_DISEMBARK_END, end_msg) def _do_embark(self, vehicle, passengers, cmd_msg, cmd_msg_id): self._busy = True while passengers: pax = passengers.pop() self._do_embark_pax_start(pax, vehicle, cmd_msg_id) yield pax.load_delay self._do_embark_pax_finish(pax, vehicle, cmd_msg_id) self._busy = False # Notify controller that all passenger embarkments are done. cmd_complete = api.SimCompletePassengersEmbark() cmd_complete.msgID = cmd_msg_id cmd_complete.cmd.CopyFrom(cmd_msg) cmd_complete.time = Sim.now() common.interface.send(api.SIM_COMPLETE_PASSENGERS_EMBARK, cmd_complete) def _do_embark_pax_start(self, pax, vehicle, cmd_msg_id): # Error if vehicle not parked in berth if not vehicle.is_parked_between(self.start_pos, self.end_pos, self.platform.track_segment): raise VehicleOutOfPositionError(vehicle, cmd_msg_id) # Error if pax not at the station if pax not in self.station._passengers: raise PassengerNotAvailableError(pax, vehicle, cmd_msg_id) # Error if the vehicle is at full capacity if vehicle.get_pax_count() >= vehicle.max_pax_capacity: raise VehicleFullError(pax, vehicle, cmd_msg_id) # Notify controller that embark of this passenger is starting start_msg = api.SimNotifyPassengerEmbarkStart() start_msg.vID = vehicle.ID start_msg.sID = self.station.ID start_msg.platformID = self.platform.ID start_msg.pID = pax.ID start_msg.berthID = self.ID start_msg.time = Sim.now() common.interface.send(api.SIM_NOTIFY_PASSENGER_EMBARK_START, start_msg) def _do_embark_pax_finish(self, pax, vehicle, cmd_msg_id): # Error if vehicle is not still parked in berth if not vehicle.is_parked_between(self.start_pos, self.end_pos, self.platform.track_segment): raise VehicleOutOfPositionError(vehicle, cmd_msg_id) # Move passenger's location to the vehicle vehicle.embark(pax) pax.loc = vehicle self.station._pax_departures_count += 1 self.station.remove_passenger(pax) pax.trip_boarded = Sim.now() logging.info( "T=%4.3f %s loaded into Vehicle %s (%d out of %d) at station %s, platform %s, berth %s ", Sim.now(), pax, vehicle.ID, vehicle.get_pax_count(), vehicle.max_pax_capacity, self.station.ID, self.platform.ID, self.ID) # Notify that embark of this passenger is complete end_msg = api.SimNotifyPassengerEmbarkEnd() end_msg.vID = vehicle.ID end_msg.sID = self.station.ID end_msg.platformID = self.platform.ID end_msg.pID = pax.ID end_msg.berthID = self.ID end_msg.time = Sim.now() common.interface.send(api.SIM_NOTIFY_PASSENGER_EMBARK_END, end_msg) def _do_enter_storage(self, vehicle, cmd_msg, cmd_msg_id): if not vehicle.is_parked_between(self.start_pos, self.end_pos, self.platform.track_segment): raise VehicleOutOfPositionError(vehicle, cmd_msg_id) storage = self.station._storage_dict[vehicle.model_name] storage._reserve_slot() self._busy = True yield self.station.storage_entrance_delay if not vehicle.is_parked_between(self.start_pos, self.end_pos, self.platform.track_segment): raise VehicleOutOfPositionError(vehicle, cmd_msg_id) storage._store_vehicle(vehicle) self._busy = False # Notify controller that vehicle entering storage is done. cmd_complete = api.SimCompleteStorageEnter() cmd_complete.msgID = cmd_msg_id cmd_complete.cmd.CopyFrom(cmd_msg) cmd_complete.time = Sim.now() common.interface.send(api.SIM_COMPLETE_STORAGE_ENTER, cmd_complete) def _do_exit_storage(self, position, model_name, cmd_msg, cmd_msg_id): storage = self.station._storage_dict[model_name] storage._reserve_vehicle() self._busy = True yield self.station.storage_exit_delay vehicle = storage._request_vehicle(position, self.platform.track_segment) self._busy = False # Notify controller that vehicle exiting storage is done. cmd_complete = api.SimCompleteStorageExit() cmd_complete.msgID = cmd_msg_id cmd_complete.cmd.CopyFrom(cmd_msg) cmd_complete.time = Sim.now() vehicle.fill_VehicleStatus(cmd_complete.v_status) common.interface.send(api.SIM_COMPLETE_STORAGE_EXIT, cmd_complete) logging.info( "T=%4.3f Exit from Storage: Vehicle: %s, Berth: %s, Platform: %s, Station: %s", Sim.now(), vehicle.ID, self.ID, self.platform.ID, self.station.ID)