def test_ThresholdStatistics_inputs():
    input_map = dict(contrast_index=dict(mandatory=True,
    ),
    extent_threshold=dict(usedefault=True,
    ),
    height_threshold=dict(mandatory=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    matlab_cmd=dict(),
    mfile=dict(usedefault=True,
    ),
    paths=dict(),
    spm_mat_file=dict(copyfile=True,
    mandatory=True,
    ),
    stat_image=dict(copyfile=False,
    mandatory=True,
    ),
    use_mcr=dict(),
    use_v8struct=dict(min_ver='8',
    usedefault=True,
    ),
    )
    inputs = ThresholdStatistics.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Example #2
0
def test_ThresholdStatistics_inputs():
    input_map = dict(
        contrast_index=dict(mandatory=True, ),
        extent_threshold=dict(usedefault=True, ),
        height_threshold=dict(mandatory=True, ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        matlab_cmd=dict(),
        mfile=dict(usedefault=True, ),
        paths=dict(),
        spm_mat_file=dict(
            copyfile=True,
            mandatory=True,
        ),
        stat_image=dict(
            copyfile=False,
            mandatory=True,
        ),
        use_mcr=dict(),
        use_v8struct=dict(
            min_ver='8',
            usedefault=True,
        ),
    )
    inputs = ThresholdStatistics.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_ThresholdStatistics_outputs():
    output_map = dict(clusterwise_P_FDR=dict(),
    clusterwise_P_RF=dict(),
    voxelwise_P_Bonf=dict(),
    voxelwise_P_FDR=dict(),
    voxelwise_P_RF=dict(),
    voxelwise_P_uncor=dict(),
    )
    outputs = ThresholdStatistics.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #4
0
def test_ThresholdStatistics_outputs():
    output_map = dict(
        clusterwise_P_FDR=dict(),
        clusterwise_P_RF=dict(),
        voxelwise_P_Bonf=dict(),
        voxelwise_P_FDR=dict(),
        voxelwise_P_RF=dict(),
        voxelwise_P_uncor=dict(),
    )
    outputs = ThresholdStatistics.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #5
0
thresh.inputs.height_threshold_type = 'p-value'
# input: which contrast in the SPM.mat to use (an integer):
thresh.inputs.contrast_index = 1
# input: p threshold on FDR corrected cluster size probabilities (float):
thresh.inputs.extent_fdr_p_threshold = 0.05
# input: minimum cluster size in voxels (an integer, default = 0):
thresh.inputs.extent_threshold = 0
# set expected thread and memory usage for the node:
thresh.interface.num_threads = 1
thresh.interface.mem_gb = 0.2
# ======================================================================
# DEFINE NODE: THRESHOLD STATISTICS
# ======================================================================
# function: Given height and cluster size threshold calculate
# theoretical probabilities concerning false positives
thresh_stat = Node(ThresholdStatistics(), name="thresh_stat")
# input: which contrast in the SPM.mat to use (an integer):
thresh_stat.inputs.contrast_index = 1
# ======================================================================
# CREATE DATASINK NODE (OUTPUT STREAM):
# ======================================================================
# create a node of the function:
l1datasink = Node(DataSink(), name='datasink')
# assign the path to the base directory:
l1datasink.inputs.base_directory = opj(path_root, 'l1pipeline')
# create a list of substitutions to adjust the file paths of datasink:
substitutions = [('_subject_id_', '')]
# assign the substitutions to the datasink command:
l1datasink.inputs.substitutions = substitutions
# determine whether to store output in parameterized form:
l1datasink.inputs.parameterization = True