class CombinePandasDfsInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for CombinePandasDfs""" ID = traits.Any(mandatory=True) network = traits.Any(mandatory=False) net_mets_csv_list = traits.List(mandatory=True) plot_switch = traits.Bool(False, usedefault=True) multi_nets = traits.Any(mandatory=False) multimodal = traits.Bool(False, usedefault=True)
class Export2PandasInputSpec(BaseInterfaceInputSpec): """ Input interface wrapper for Export2Pandas """ csv_loc = File(exists=True, mandatory=True, desc="") ID = traits.Any(mandatory=True) network = traits.Any(mandatory=False) roi = traits.Any(mandatory=False)
class NetworkAnalysisInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for NetworkAnalysis""" ID = traits.Any(mandatory=True) est_path = File(exists=True, mandatory=True) prune = traits.Any(mandatory=False) norm = traits.Any(mandatory=False) binary = traits.Bool(False, usedefault=True)
class FilterResultdictsInputSpec(BaseInterfaceInputSpec): indicts = traits.List(traits.Dict(traits.Str(), traits.Any()), mandatory=True) filterdicts = traits.List(traits.Any(), desc="filter list") variabledicts = traits.List(traits.Any(), desc="variable list") spreadsheet = File(desc="spreadsheet", exists=True) requireoneofimages = traits.List( traits.Str(), desc="only keep resultdicts that have at least one of these keys" ) excludefiles = traits.Str()
class BoldFileReportMetadataInputSpec(TraitedSpec): basedict = traits.Dict(traits.Str(), traits.Any()) confounds = File(exists=True, mandatory=True) tsnr_file = File(exists=True, mandatory=True) dseg = File(exists=True, mandatory=True) aroma_metadata = traits.Dict(traits.Str(), traits.Any(), exists=True, mandatory=True)
class PerformMLInputSpec(BaseInterfaceInputSpec): ds_file = File(desc='dataset file for ML to be performed on', exists=True, mandatory=True) classifier = traits.Any(None) #TODO: make this a classifier object scoring = traits.Any(None) #TODO: make this a scoring object targets = traits.Any(None) #TODO: make this a string learning_curve = traits.Bool( False, desc='whether training curve analysis will be performed')
class CollectPandasDfsInputSpec(BaseInterfaceInputSpec): """ Input interface wrapper for CollectPandasDfs """ ID = traits.Any(mandatory=True) network = traits.Any(mandatory=True) net_pickle_mt_list = traits.List(mandatory=True) plot_switch = traits.Any(mandatory=True) multi_nets = traits.Any(mandatory=True) multimodal = traits.Any(mandatory=True)
class _IndividualClusteringInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for IndividualClustering""" func_file = File(exists=True, mandatory=True) conf = traits.Any(mandatory=False) clust_mask = File(exists=True, mandatory=True) ID = traits.Any(mandatory=True) k = traits.Any(mandatory=True) clust_type = traits.Str(mandatory=True) vox_size = traits.Str('2mm', mandatory=True, usedefault=True) local_corr = traits.Str('allcorr', mandatory=True, usedefault=True) mask = traits.Any(mandatory=False)
class _FetchNodesLabelsInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for FetchNodesLabels""" atlas = traits.Any(mandatory=False) parcellation = traits.Any(mandatory=False) ref_txt = traits.Any() in_file = traits.Any(mandatory=True) parc = traits.Bool(mandatory=True) use_parcel_naming = traits.Bool(False, usedefault=True) outdir = traits.Str(mandatory=True) vox_size = traits.Str("2mm", mandatory=True, usedefault=True) clustering = traits.Bool(False, usedefault=True)
class _RegisterDWIInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for RegisterDWI""" fa_path = File(exists=True, mandatory=True) ap_path = File(exists=True, mandatory=True) B0_mask = File(exists=True, mandatory=True) anat_file = File(exists=True, mandatory=True) gtab_file = File(exists=True, mandatory=True) dwi_file = File(exists=True, mandatory=True) vox_size = traits.Str('2mm', mandatory=True, usedefault=True) waymask = traits.Any(mandatory=False) mask = traits.Any(mandatory=False) simple = traits.Bool(False, usedefault=True) overwrite = traits.Bool(True, usedefault=True)
class _ExtractTimeseriesOutputSpec(TraitedSpec): """Output interface wrapper for ExtractTimeseries""" ts_within_nodes = traits.Any(mandatory=True) node_size = traits.Any(mandatory=True) smooth = traits.Any(mandatory=True) dir_path = traits.Str(mandatory=True) atlas = traits.Any(mandatory=True) uatlas = traits.Any(mandatory=True) labels = traits.Any(mandatory=True) coords = traits.Any(mandatory=True) c_boot = traits.Any(mandatory=True) hpass = traits.Any(mandatory=True) roi = traits.Any(mandatory=True)
class _ExtractTimeseriesOutputSpec(TraitedSpec): """Output interface wrapper for ExtractTimeseries""" ts_within_nodes = traits.Any(mandatory=True) node_radius = traits.Any(mandatory=True) smooth = traits.Any(mandatory=True) dir_path = Directory(exists=True, mandatory=True) atlas = traits.Any(mandatory=False) parcellation = traits.Any(mandatory=False) labels = traits.Any(mandatory=True) coords = traits.Any(mandatory=True) hpass = traits.Any(mandatory=True) roi = traits.Any(mandatory=True) signal = traits.Any(mandatory=False)
class ReportNodeInputSpec(BaseInterfaceInputSpec): """Input interface for reporting.""" output_dir = traits.Directory( argstr='%s', exists=False, resolve=True, desc='Output directory to write results and BIDS derivatives to write.', mandatory=True, position=1) fms = traits.Any() df_features = traits.Any() _fit_args = traits.Any()
class RegressOutputSpec(TraitedSpec): cleaned_path = File(exists=True, mandatory=True, desc="Cleaned timeseries.") VE_file_path = File(exists=True, mandatory=True, desc="Variance explained map from confound regression.") STD_file_path = File(exists=True, mandatory=True, desc="Temporal standard deviation map after confound correction, prior to standardization.") CR_STD_file_path = File(exists=True, mandatory=True, desc="Temporal standard deviation map after confound correction, prior to standardization.") frame_mask_file = File(exists=True, mandatory=True, desc="Frame mask from temporal censoring.") data_dict = traits.Any( desc="A dictionary with key outputs.") aroma_out = traits.Any( desc="Output directory from ICA-AROMA.")
class LFComputationConnInputSpec(BaseInterfaceInputSpec): sbj_id = traits.String(desc='subject id', mandatory=True) sbj_dir = traits.String(exists=True, desc='Freesurfer main directory', mandatory=True) raw_info = traits.Any(desc='raw info', mandatory=True) raw_fname = traits.String(desc='raw file name', mandatory=True) spacing = traits.String(desc='spacing to use to setup a source space', mandatory=False) aseg = traits.Bool(desc='if true sub structures will be considered', mandatory=False) aseg_labels = traits.List(desc='list of substructures in the src space', mandatory=False) save_mixed_src_space = traits.Bool(False, desc='if true save src space', usedefault=True, mandatory=False)
class _RegisterFuncInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for RegisterFunc""" anat_file = File(exists=True, mandatory=True) mask = traits.Any(mandatory=False) vox_size = traits.Str('2mm', mandatory=True, usedefault=True) simple = traits.Bool(False, usedefault=True) overwrite = traits.Bool(True, usedefault=True)
class _PlotMatricesInputSpec(BaseInterfaceInputSpec): run_info = traits.Any(desc="List of regressors of no interest") mat_file = File(exists=True, desc="Matrix File produced by Generate Model") con_file = File(exists=True, desc="Contrast File Produces by Generate Model") database_path = Directory(exists=True, desc="Database path for current model") entities = traits.Dict(desc="Dictionary containing BIDS file entities") output_dir = Directory(desc="Directory for Output")
class CalcMeanInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True) mask = File(exists=True) parcellation = File(exists=True) dseg = File(exists=True) vals = traits.Dict(traits.Str(), traits.Any()) key = traits.Str()
class _IndividualClusteringOutputSpec(TraitedSpec): """Output interface wrapper for IndividualClustering""" uatlas = File(exists=True) atlas = traits.Str(mandatory=True) clustering = traits.Bool(True, usedefault=True) clust_mask = File(exists=True, mandatory=True) k = traits.Any(mandatory=True) clust_type = traits.Str(mandatory=True)
class AtlasTransformInputSpec(TraitedSpec): nifti = traits.Any(mandatory=True, desc='input nifti') atlas_name = traits.String(mandatory=True, desc='atlas name') bids_dir = traits.String(mandatory=True, desc='atlas name') resolution = traits.Int(mandatory=False, desc='resolution (for shen atlas)') number_of_clusters = traits.Int(mandatory=False, desc='for craddock') similarity_measure = traits.String(mandatory=False, desc='for craddock') algorithm = traits.String(mandatory=False, desc='for craddock')
class FilterResultdictsInputSpec(BaseInterfaceInputSpec): indicts = traits.List(traits.Dict(traits.Str(), traits.Any()), mandatory=True) filterobjs = traits.List(desc="filter list", mandatory=True) variableobjs = traits.List(desc="variable list") spreadsheet = traits.File(desc="spreadsheet") requireoneofkeys = traits.List( traits.Str(), desc="only keep resultdicts that have at least one of these keys" ) qualitycheckfile = traits.File()
class ExtractNetStatsInputSpec(BaseInterfaceInputSpec): ID = traits.Any(mandatory=True) network = traits.Any(mandatory=False) thr = traits.Any(mandatory=True) conn_model = traits.Str(mandatory=True) est_path = File(exists=True, mandatory=True, desc="") roi = traits.Any(mandatory=False) prune = traits.Any(mandatory=False) node_size = traits.Any(mandatory=False) smooth = traits.Any(mandatory=False) c_boot = traits.Any(mandatory=False)
class _FetchNodesLabelsOutputSpec(TraitedSpec): """Output interface wrapper for FetchNodesLabels""" labels = traits.Any(mandatory=True) coords = traits.Any(mandatory=True) atlas = traits.Any() networks_list = traits.Any() parcels_4d = traits.Any() par_max = traits.Any() parcellation = traits.Any() dir_path = traits.Any()
class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): #Input either csv file, or time-series object and use _xor_inputs to #discriminate _xor_inputs = ('in_file', 'in_TS') in_file = File(desc=('csv file with ROIs on the columns and ' 'time-points on the rows. ROI names at the top row'), exists=True, requires=('TR', )) #If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float(desc=('The TR used to collect the data' 'in your csv file <in_file>')) in_TS = traits.Any(desc='a nitime TimeSeries object') NFFT = traits.Range( low=32, value=64, usedefault=True, desc=('This is the size of the window used for ' 'the spectral estimation. Use values between ' '32 and the number of samples in your time-series.' '(Defaults to 64.)')) n_overlap = traits.Range( low=0, value=0, usedefault=True, desc=('The number of samples which overlap' 'between subsequent windows.(Defaults to 0)')) frequency_range = traits.List(value=[0.02, 0.15], usedefault=True, minlen=2, maxlen=2, desc=('The range of frequencies over' 'which the analysis will average.' '[low,high] (Default [0.02,0.15]')) output_csv_file = File( desc= 'File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}' ) output_figure_file = File( desc= 'File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,...' ) figure_type = traits.Enum('matrix', 'network', usedefault=True, desc=("The type of plot to generate, where " "'matrix' denotes a matrix image and" "'network' denotes a graph representation." " Default: 'matrix'"))
class AvScaleOutputSpec(TraitedSpec): rotation_translation_matrix=traits.Any(desc='Rotation and Translation Matrix') scales = traits.Any(desc='Scales (x,y,z)') skews = traits.Any(desc='Skews') average_scaling = traits.Any(desc='Average Scaling') determinant = traits.Any(desc='Determinant') forward_half_transform = traits.Any(desc='Forward Half Transform') backward_half_transform = traits.Any(desc='Backwards Half Transform') left_right_orientation_preserved = traits.Bool(desc='True if LR orientation preserved')
class _IndividualClusteringInputSpec(BaseInterfaceInputSpec): """Input interface wrapper for IndividualClustering""" func_file = File(exists=True, mandatory=True) conf = traits.Any(mandatory=False) clust_mask = File(exists=True, mandatory=True) ID = traits.Any(mandatory=True) k = traits.Any(mandatory=True) clust_type = traits.Str(mandatory=True) vox_size = traits.Str("2mm", mandatory=True, usedefault=True) local_corr = traits.Str("allcorr", mandatory=True, usedefault=True) mask = traits.Any(mandatory=False) outdir = traits.Str(mandatory=True) basedir_path = Directory(exists=True, mandatory=True) anat_file = File(exists=True, mandatory=True) t1w_brain = File(exists=True, mandatory=True) mni2t1w_warp = File(exists=True, mandatory=True) mni2t1_xfm = File(exists=True, mandatory=True) template_name = traits.Str("MNI152_T1", mandatory=True, usedefault=True) simple = traits.Bool(False, usedefault=True)
class _GetRunModelInfoInputSpec(BaseInterfaceInputSpec): metadata_file = File() regressor_file = File() events_file = File() entities = traits.Dict(mandatory=True) model = traits.Dict(mandatory=True) detrend_poly = traits.Any( default=None, desc=("Legendre polynomials to regress out" "for temporal filtering"), )
class BIDSDataGraberInputSpec(BaseInterfaceInputSpec): bids_dir = traits.Str(exists=True, mandatory=True, desc="BIDS data directory") suffix = traits.List(exists=True, mandatory=True, desc="Suffix to search for") scan_info = traits.Dict(exists=True, mandatory=True, desc="Info required to find the scan") run = traits.Any(exists=True, desc="Run number")
class RegressInputSpec(BaseInterfaceInputSpec): bold_file = File(exists=True, mandatory=True, desc="Timeseries to denoise.") data_dict = traits.Dict( exists=True, mandatory=True, desc="Dictionary with extra inputs.") brain_mask_file = File(exists=True, mandatory=True, desc="Brain mask.") CSF_mask_file = File(exists=True, mandatory=True, desc="CSF mask.") cr_opts = traits.Any( exists=True, mandatory=True, desc="Processing specs.")
class PassMetaOutsInputSpec(BaseInterfaceInputSpec): conn_model_iterlist = traits.Any(mandatory=True) est_path_iterlist = traits.Any(mandatory=False) node_size_iterlist = traits.Any(mandatory=True) thr_iterlist = traits.Str(mandatory=True) prune_iterlist = File(exists=True, mandatory=True, desc="") ID_iterlist = traits.Any(mandatory=False) roi_iterlist = traits.Any(mandatory=False) norm_iterlist = traits.Any(mandatory=False) binary_iterlist = traits.Any(mandatory=False)