Ejemplo n.º 1
0
def test_iap():
    sys.argv = [sys.argv[0]]
    pos_keys = ['positional_str', 'positional_bool', 'positional_int',
                'positional_float']

    opt_keys = ['optional_str', 'optional_bool', 'optional_int',
                'optional_float']

    pos_results = ['test', 0, 10, 10.2]
    opt_results = ['opt_test', 1, 20, 20.2]

    inputs = inputs_from_results(opt_results, opt_keys)
    inputs.extend(inputs_from_results(pos_results))

    sys.argv.extend(inputs)
    parser = IntrospectiveArgumentParser()
    parser.add_workflow(dummy_flow)
    args = parser.get_flow_args()
    all_keys = pos_keys + opt_keys
    all_results = pos_results + opt_results

    # Test if types and order are respected
    for k, v in zip(all_keys, all_results):
        npt.assert_equal(args[k], v)

    # Test if **args really fits dummy_flow's arguments
    return_values = dummy_flow(**args)
    npt.assert_array_equal(return_values, all_results + [2.0])
Ejemplo n.º 2
0
def dipy_to_nipype_interface(cls_name, dipy_flow, BaseClass=DipyBaseInterface):
    """Construct a class in order to respect nipype interface specifications.

    This convenient class factory convert a DIPY Workflow to a nipype
    interface.

    Parameters
    ----------
    cls_name: string
        new class name
    dipy_flow: Workflow class type.
        It should be any children class of `dipy.workflows.workflow.Worflow`
    BaseClass: object
        nipype instance object

    Returns
    -------
    newclass: object
        new nipype interface specification class

    """
    parser = IntrospectiveArgumentParser()
    flow = dipy_flow()
    parser.add_workflow(flow)
    default_values = inspect.getargspec(flow.run).defaults
    optional_params = [args + (val,) for args, val in zip(parser.optional_parameters, default_values)]
    start = len(parser.optional_parameters) - len(parser.output_parameters)

    output_parameters = [args + (val,) for args, val in zip(parser.output_parameters, default_values[start:])]
    input_parameters = parser.positional_parameters + optional_params

    input_spec = create_interface_specs("{}InputSpec".format(cls_name),
                                        input_parameters,
                                        BaseClass=BaseInterfaceInputSpec)

    output_spec = create_interface_specs("{}OutputSpec".format(cls_name),
                                         output_parameters,
                                         BaseClass=TraitedSpec)

    def _run_interface(self, runtime):
        flow = dipy_flow()
        args = self.inputs.get()
        flow.run(**args)

    def _list_outputs(self):
        outputs = self._outputs().get()
        out_dir = outputs.get("out_dir", ".")
        for key, values in outputs.items():
            outputs[key] = op.join(out_dir, values)

        return outputs

    newclass = type(str(cls_name), (BaseClass, ),
                    {"input_spec": input_spec,
                     "output_spec": output_spec,
                     "_run_interface": _run_interface,
                     "_list_outputs:": _list_outputs})
    return newclass
Ejemplo n.º 3
0
def test_nargs():
    sys.argv = [sys.argv[0]]
    var_args = ['1', '2', '3', '4', '5', '6', '7', '8']
    optionnals = ['--optional_int', '2']
    sys.argv.extend(var_args + optionnals)

    parser = IntrospectiveArgumentParser()
    parser.add_workflow(nargs_flow)
    args = parser.get_flow_args()
    var_ints, opt_int = nargs_flow(**args)
    npt.assert_equal(len(var_ints), len(var_args))
Ejemplo n.º 4
0
def run_flow(flow):
    """ Wraps the process of building an argparser that reflects the workflow
    that we want to run along with some generic parameters like logging,
    force and output strategies. The resulting parameters are then fed to
    the workflow's run method.
    """
    parser = IntrospectiveArgumentParser()
    sub_flows_dicts = parser.add_workflow(flow)

    # Common workflow arguments
    parser.add_argument('--force', dest='force',
                        action='store_true', default=False,
                        help='Force overwriting output files.')

    parser.add_argument('--version', action='version',
                        version='DIPY {}'.format(dipy_version))

    parser.add_argument('--out_strat', action='store', dest='out_strat',
                        metavar='string', required=False, default='absolute',
                        help='Strategy to manage output creation.')

    parser.add_argument('--mix_names', dest='mix_names',
                        action='store_true', default=False,
                        help='Prepend mixed input names to output names.')

    # Add logging parameters common to all workflows
    msg = 'Log messsages display level. Accepted options include CRITICAL,'
    msg += ' ERROR, WARNING, INFO, DEBUG and NOTSET (default INFO).'
    parser.add_argument('--log_level', action='store', dest='log_level',
                        metavar='string', required=False, default='INFO',
                        help=msg)

    parser.add_argument('--log_file', action='store', dest='log_file',
                        metavar='string', required=False, default='',
                        help='Log file to be saved.')

    args = parser.get_flow_args()

    logging.basicConfig(filename=args['log_file'],
                        format='%(levelname)s:%(message)s',
                        level=get_level(args['log_level']))

    # Output management parameters
    flow._force_overwrite = args['force']
    flow._output_strategy = args['out_strat']
    flow._mix_names = args['mix_names']

    # Keep only workflow related parameters
    del args['force']
    del args['log_level']
    del args['log_file']
    del args['out_strat']
    del args['mix_names']

    # Remove subflows related params
    for sub_flow, params_dict in iteritems(sub_flows_dicts):
        for key, _ in iteritems(params_dict):
            if key in args.keys():
                params_dict[key] = args.pop(key)

                # Rename dictionary key to the original param name
                params_dict[key.split('.')[1]] = params_dict.pop(key)

    if sub_flows_dicts:
        flow.set_sub_flows_optionals(sub_flows_dicts)

    return flow.run(**args)
Ejemplo n.º 5
0
            if hasattr(tractography_data, 'signal'):
                signal = tractography_data.signal.get_data()
                data = signal[:, :, :, 0]
                affine = np.eye(4)

        if f.endswith('.nii.gz') or f.endswith('.nii'):

            img = nib.load(f)
            data = img.get_data()
            affine = img.get_affine()
            if verbose:
                print(affine)

    # tmp save
    # tractogram = nib.streamlines.Tractogram(tractograms[0])
    # tractogram.apply_affine(img.affine)
    # nib.streamlines.save(tractogram, "tmp.tck")
    # exit()

    horizon(tractograms, data, affine, cluster, cluster_thr, random_colors,
            length_lt, length_gt, clusters_lt, clusters_gt)


parser = IntrospectiveArgumentParser()
parser.add_workflow(horizon_flow)

if __name__ == '__main__':
    args = parser.get_flow_args()
    horizon_flow(**args)
Ejemplo n.º 6
0
def run_flow(flow):
    """ Wraps the process of building an argparser that reflects the workflow
    that we want to run along with some generic parameters like logging,
    force and output strategies. The resulting parameters are then fed to
    the workflow's run method.
    """
    parser = IntrospectiveArgumentParser()
    sub_flows_dicts = parser.add_workflow(flow)

    # Common workflow arguments
    parser.add_argument('--force',
                        dest='force',
                        action='store_true',
                        default=False,
                        help='Force overwriting output files.')

    parser.add_argument('--version',
                        action='version',
                        version='DIPY {}'.format(dipy_version))

    parser.add_argument('--out_strat',
                        action='store',
                        dest='out_strat',
                        metavar='string',
                        required=False,
                        default='absolute',
                        help='Strategy to manage output creation.')

    parser.add_argument('--mix_names',
                        dest='mix_names',
                        action='store_true',
                        default=False,
                        help='Prepend mixed input names to output names.')

    # Add logging parameters common to all workflows
    msg = 'Log messages display level. Accepted options include CRITICAL,'
    msg += ' ERROR, WARNING, INFO, DEBUG and NOTSET (default INFO).'
    parser.add_argument('--log_level',
                        action='store',
                        dest='log_level',
                        metavar='string',
                        required=False,
                        default='INFO',
                        help=msg)

    parser.add_argument('--log_file',
                        action='store',
                        dest='log_file',
                        metavar='string',
                        required=False,
                        default='',
                        help='Log file to be saved.')

    args = parser.get_flow_args()

    logging.basicConfig(filename=args['log_file'],
                        format='%(levelname)s:%(message)s',
                        level=get_level(args['log_level']))

    # Output management parameters
    flow._force_overwrite = args['force']
    flow._output_strategy = args['out_strat']
    flow._mix_names = args['mix_names']

    # Keep only workflow related parameters
    del args['force']
    del args['log_level']
    del args['log_file']
    del args['out_strat']
    del args['mix_names']

    # Remove subflows related params
    for params_dict in list(sub_flows_dicts.values()):
        for key in list(params_dict.keys()):
            if key in args.keys():
                params_dict[key] = args.pop(key)

                # Rename dictionary key to the original param name
                params_dict[key.split('.')[1]] = params_dict.pop(key)

    if sub_flows_dicts:
        flow.set_sub_flows_optionals(sub_flows_dicts)

    return flow.run(**args)
Ejemplo n.º 7
0
def dipy_to_nipype_interface(cls_name, dipy_flow, BaseClass=DipyBaseInterface):
    """Construct a class in order to respect nipype interface specifications.

    This convenient class factory convert a DIPY Workflow to a nipype
    interface.

    Parameters
    ----------
    cls_name: string
        new class name
    dipy_flow: Workflow class type.
        It should be any children class of `dipy.workflows.workflow.Worflow`
    BaseClass: object
        nipype instance object

    Returns
    -------
    newclass: object
        new nipype interface specification class

    """
    parser = IntrospectiveArgumentParser()
    flow = dipy_flow()
    parser.add_workflow(flow)
    default_values = inspect.getfullargspec(flow.run).defaults
    optional_params = [
        args + (val, )
        for args, val in zip(parser.optional_parameters, default_values)
    ]
    start = len(parser.optional_parameters) - len(parser.output_parameters)

    output_parameters = [
        args + (val, )
        for args, val in zip(parser.output_parameters, default_values[start:])
    ]
    input_parameters = parser.positional_parameters + optional_params

    input_spec = create_interface_specs(
        "{}InputSpec".format(cls_name),
        input_parameters,
        BaseClass=BaseInterfaceInputSpec,
    )

    output_spec = create_interface_specs("{}OutputSpec".format(cls_name),
                                         output_parameters,
                                         BaseClass=TraitedSpec)

    def _run_interface(self, runtime):
        flow = dipy_flow()
        args = self.inputs.get()
        flow.run(**args)

    def _list_outputs(self):
        outputs = self._outputs().get()
        out_dir = outputs.get("out_dir", ".")
        for key, values in outputs.items():
            outputs[key] = op.join(out_dir, values)

        return outputs

    newclass = type(
        str(cls_name),
        (BaseClass, ),
        {
            "input_spec": input_spec,
            "output_spec": output_spec,
            "_run_interface": _run_interface,
            "_list_outputs:": _list_outputs,
        },
    )
    return newclass
Ejemplo n.º 8
0
def test_iap_epilog_and_description():
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflow1()
    parser.add_workflow(dummy_flow)
    assert "dummy references" in parser.epilog
    assert "Workflow used to test combined" in parser.description
Ejemplo n.º 9
0
def test_iap_epilog():
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflow1()
    parser.add_workflow(dummy_flow)
    assert "dummy references" in parser.epilog
Ejemplo n.º 10
0
def test_iap_epilog():
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflow1()
    parser.add_workflow(dummy_flow)
    assert "dummy references" in parser.epilog
Ejemplo n.º 11
0
def test_optional_str():
    # Test optional and variable str argument exists but does not have a value
    sys.argv = [sys.argv[0]]
    inputs = ['--optional_str_1']
    sys.argv.extend(inputs)
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflowOptionalStr()
    parser.add_workflow(dummy_flow)
    args = parser.get_flow_args()
    all_keys = ['optional_str_1']
    all_results = [[]]
    # Test if types and order are respected
    for k, v in zip(all_keys, all_results):
        npt.assert_equal(args[k], v)
    # Test if **args really fits dummy_flow's arguments
    return_values = dummy_flow.run(**args)
    npt.assert_array_equal(return_values, all_results + ['default'])

    # Test optional and variable str argument exists and has a value
    sys.argv = [sys.argv[0]]
    inputs = ['--optional_str_1', 'test']
    sys.argv.extend(inputs)
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflowOptionalStr()
    parser.add_workflow(dummy_flow)
    args = parser.get_flow_args()
    all_keys = ['optional_str_1']
    all_results = [['test']]
    # Test if types and order are respected
    for k, v in zip(all_keys, all_results):
        npt.assert_equal(args[k], v)
    # Test if **args really fits dummy_flow's arguments
    return_values = dummy_flow.run(**args)
    npt.assert_array_equal(return_values, all_results + ['default'])

    # Test optional str empty arguments
    sys.argv = [sys.argv[0]]
    inputs = ['--optional_str_2']
    sys.argv.extend(inputs)
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflowOptionalStr()
    parser.add_workflow(dummy_flow)
    with npt.assert_raises(SystemExit) as cm:
        parser.get_flow_args()
    npt.assert_equal(cm.exception.code, 2)
Ejemplo n.º 12
0
def test_iap_epilog_and_description():
    parser = IntrospectiveArgumentParser()
    dummy_flow = DummyWorkflow1()
    parser.add_workflow(dummy_flow)
    assert "dummy references" in parser.epilog
    assert "Workflow used to test combined" in parser.description