Beispiel #1
0
def wrap_cunn(thcunn_h_path, install_dir, template_path):
    wrapper = '#include <TH/TH.h>\n'
    wrapper += '#include <THC/THC.h>\n\n\n'
    cunn_functions = thnn_utils.parse_header(thcunn_h_path or thnn_utils.THCUNN_H_PATH)
    for fn in cunn_functions:
        for t in ['CudaHalf', 'Cuda', 'CudaDouble']:
            wrapper += wrap_function(fn.name, t, fn.arguments)
    install_dir = install_dir or 'torch/csrc/nn'
    with open(os.path.join(install_dir, 'THCUNN.cwrap'), 'w') as f:
        f.write(wrapper)
    cwrap(os.path.join(install_dir, 'THCUNN.cwrap'),
          plugins=[NNExtension('torch._C._THCUNN'), NullableArguments(), AutoGPU(has_self=False)],
          template_path=template_path)
Beispiel #2
0
def wrap_nn(thnn_h_path, install_dir, template_path):
    wrapper = '#include <TH/TH.h>\n\n\n'
    nn_functions = thnn_utils.parse_header(thnn_h_path or thnn_utils.THNN_H_PATH)
    for fn in nn_functions:
        for t in ['Float', 'Double']:
            wrapper += wrap_function(fn.name, t, fn.arguments)
    install_dir = install_dir or 'torch/csrc/nn'
    try:
        os.makedirs(install_dir)
    except OSError:
        pass
    with open(os.path.join(install_dir, 'THNN.cwrap'), 'w') as f:
        f.write(wrapper)
    cwrap(os.path.join(install_dir, 'THNN.cwrap'),
          plugins=[NNExtension('torch._C._THNN'), NullableArguments()],
          template_path=template_path)
Beispiel #3
0
def _generate_function_classes(scope_dict):
    global function_list, function_by_name
    function_list = parse_header(THNN_H_PATH)
    function_by_name = {fn.name: fn for fn in function_list}
    classes_to_generate = {fn.name.partition('_')[0] for fn in function_list}
    exceptions = {
        'Linear',
        'IndexLinear',
        'SpatialFullConvolution',
        'SpatialConvolutionMM',
        'SparseLinear',
        'TemporalConvolution',
        'SpatialAveragePooling',
        'SpatialMaxPooling',
        'SpatialDilatedMaxPooling',
        'SpatialMaxUnpooling',
        'SpatialAdaptiveMaxPooling',
        'SpatialAdaptiveAveragePooling',
        'VolumetricAveragePooling',
        'VolumetricMaxPooling',
        'VolumetricMaxUnpooling',
        'VolumetricConvolution',
        'VolumetricFullConvolution',
        'VolumetricConvolutionMM',
        'TemporalMaxPooling',
        'BatchNormalization',
        'LookupTable',
        'PReLU',
        'RReLU',
        'Threshold',
        'LeakyReLU',
        'GRUFused',
        'LSTMFused',
        'unfolded',
    }
    name_remap = {
        'TemporalConvolution': 'Conv1d',
        'SpatialDilatedConvolution': 'DilatedConv2d',
        'SpatialMaxUnpooling': 'MaxUnpool2d',
        'SpatialReflectionPadding': 'ReflectionPad2d',
        'SpatialReplicationPadding': 'ReplicationPad2d',
        'VolumetricReplicationPadding': 'ReplicationPad3d',
        'VolumetricMaxUnpooling': 'MaxUnpool3d',
        'SoftMax': 'Softmax',
        'LogSoftMax': 'LogSoftmax',
        'HardTanh': 'Hardtanh',
        'HardShrink': 'Hardshrink',
        'SoftPlus': 'Softplus',
        'SoftShrink': 'Softshrink',
        'MSECriterion': 'MSELoss',
        'AbsCriterion': 'L1Loss',
        'BCECriterion': '_BCELoss',  # TODO: move the glue code into THNN
        'ClassNLLCriterion': 'NLLLoss',
        'DistKLDivCriterion': 'KLDivLoss',
        'SpatialClassNLLCriterion': 'NLLLoss2d',
        'MultiLabelMarginCriterion': 'MultiLabelMarginLoss',
        'MultiMarginCriterion': 'MultiMarginLoss',
        'SmoothL1Criterion': 'SmoothL1Loss',
        'SoftMarginCriterion': 'SoftMarginLoss',
    }

    classes_to_generate -= exceptions
    for fn in classes_to_generate:
        update_output = function_by_name[fn + '_updateOutput']
        update_grad_input = function_by_name[fn + '_updateGradInput']
        acc_grad_parameters = function_by_name.get(fn + '_accGradParameters')
        class_name = name_remap.get(fn, fn)
        # This has to call a function to retain correct references to functions
        if 'Criterion' in fn:
            cls = _make_function_class_criterion(class_name, update_output,
                                                 update_grad_input,
                                                 acc_grad_parameters)
        else:
            cls = _make_function_class(class_name, update_output,
                                       update_grad_input, acc_grad_parameters)
        scope_dict[class_name] = cls
        if not class_name.startswith('_'):
            _all_functions.append(cls)
Beispiel #4
0
def _generate_function_classes(scope_dict):
    global function_list, function_by_name
    function_list = parse_header(THNN_H_PATH)
    function_by_name = {fn.name: fn for fn in function_list}
    classes_to_generate = {fn.name.partition('_')[0] for fn in function_list}
    exceptions = {
        'Linear',
        'IndexLinear',
        'SpatialFullConvolution',
        'SpatialConvolutionMM',
        'SparseLinear',
        'TemporalConvolution',
        'SpatialAveragePooling',
        'SpatialMaxPooling',
        'SpatialDilatedMaxPooling',
        'SpatialMaxUnpooling',
        'SpatialAdaptiveMaxPooling',
        'SpatialAdaptiveAveragePooling',
        'VolumetricAveragePooling',
        'VolumetricMaxPooling',
        'VolumetricMaxUnpooling',
        'VolumetricAdaptiveAveragePooling',
        'VolumetricAdaptiveMaxPooling',
        'VolumetricConvolution',
        'VolumetricFullConvolution',
        'VolumetricConvolutionMM',
        'TemporalMaxPooling',
        'BatchNormalization',
        'LookupTable',
        'LookupTableBag',
        'PReLU',
        'RReLU',
        'SoftMax',
        'LogSoftMax',
        'GRUFused',
        'LSTMFused',
        'unfolded',
    }
    name_remap = {
        'TemporalConvolution': 'Conv1d',
        'TemporalReflectionPadding': 'ReflectionPad1d',
        'TemporalReplicationPadding': 'ReplicationPad1d',
        'SpatialDilatedConvolution': 'DilatedConv2d',
        'SpatialMaxUnpooling': 'MaxUnpool2d',
        'SpatialReflectionPadding': 'ReflectionPad2d',
        'SpatialReplicationPadding': 'ReplicationPad2d',
        'VolumetricReplicationPadding': 'ReplicationPad3d',
        'VolumetricMaxUnpooling': 'MaxUnpool3d',
        'HardTanh': 'Hardtanh',
        'HardShrink': 'Hardshrink',
        'SoftPlus': 'Softplus',
        'SoftShrink': 'Softshrink',
        'MSECriterion': 'MSELoss',
        'AbsCriterion': 'L1Loss',
        'BCECriterion': 'BCELoss',
        'ClassNLLCriterion': 'NLLLoss',
        'DistKLDivCriterion': 'KLDivLoss',
        'SpatialClassNLLCriterion': 'NLLLoss2d',
        'MultiLabelMarginCriterion': 'MultiLabelMarginLoss',
        'MultiMarginCriterion': 'MultiMarginLoss',
        'SmoothL1Criterion': 'SmoothL1Loss',
        'SoftMarginCriterion': 'SoftMarginLoss',
    }

    classes_to_generate -= exceptions
    for fn in classes_to_generate:
        update_output = function_by_name[fn + '_updateOutput']
        update_grad_input = function_by_name[fn + '_updateGradInput']
        acc_grad_parameters = function_by_name.get(fn + '_accGradParameters')
        class_name = name_remap.get(fn, fn)
        double_backwards_fn = double_backwards_fns.get(class_name)
        if double_backwards_fn is None:

            def make_default_double_backwards_fn(class_name):
                def default_double_backwards_fn(ctx, *grad_params):
                    raise ValueError(class_name +
                                     " can only be differentiated once.")

                return default_double_backwards_fn

            double_backwards_fn = make_default_double_backwards_fn(class_name)
        symbolic_fn = symbolic_fns.get(class_name)
        # This has to call a function to retain correct references to functions
        is_criterion_fn = 'Criterion' in fn
        if is_criterion_fn:
            cls, backward_cls = _make_function_class_criterion(
                class_name, update_output, update_grad_input,
                acc_grad_parameters, double_backwards_fn, symbolic_fn)
        else:
            cls, backward_cls = _make_function_class(class_name, update_output,
                                                     update_grad_input,
                                                     acc_grad_parameters,
                                                     double_backwards_fn,
                                                     symbolic_fn)
        scope_dict[class_name] = cls
        scope_dict[backward_cls.__name__] = backward_cls
        if not class_name.startswith('_'):
            _all_functions.append(cls)
            _all_functions.append(backward_cls)
Beispiel #5
0
def _generate_function_classes(scope_dict):
    global function_list, function_by_name
    function_list = parse_header(THNN_H_PATH)
    function_by_name = {fn.name: fn for fn in function_list}
    classes_to_generate = {fn.name.partition('_')[0] for fn in function_list}
    exceptions = {
        'Linear',
        'IndexLinear',
        'SpatialFullConvolution',
        'SpatialConvolutionMM',
        'SparseLinear',
        'TemporalConvolution',
        'SpatialAveragePooling',
        'SpatialMaxPooling',
        'SpatialDilatedMaxPooling',
        'SpatialMaxUnpooling',
        'SpatialAdaptiveMaxPooling',
        'SpatialAdaptiveAveragePooling',
        'VolumetricAveragePooling',
        'VolumetricMaxPooling',
        'VolumetricMaxUnpooling',
        'VolumetricAdaptiveAveragePooling',
        'VolumetricAdaptiveMaxPooling',
        'VolumetricConvolution',
        'VolumetricFullConvolution',
        'VolumetricConvolutionMM',
        'TemporalMaxPooling',
        'BatchNormalization',
        'LookupTable',
        'LookupTableBag',
        'PReLU',
        'RReLU',
        'SoftMax',
        'LogSoftMax',
        'GRUFused',
        'LSTMFused',
        'unfolded',
    }
    name_remap = {
        'TemporalConvolution': 'Conv1d',
        'TemporalReflectionPadding': 'ReflectionPad1d',
        'TemporalReplicationPadding': 'ReplicationPad1d',
        'SpatialDilatedConvolution': 'DilatedConv2d',
        'SpatialMaxUnpooling': 'MaxUnpool2d',
        'SpatialReflectionPadding': 'ReflectionPad2d',
        'SpatialReplicationPadding': 'ReplicationPad2d',
        'VolumetricReplicationPadding': 'ReplicationPad3d',
        'VolumetricMaxUnpooling': 'MaxUnpool3d',
        'HardTanh': 'Hardtanh',
        'HardShrink': 'Hardshrink',
        'SoftPlus': 'Softplus',
        'SoftShrink': 'Softshrink',
        'MSECriterion': 'MSELoss',
        'AbsCriterion': 'L1Loss',
        'BCECriterion': 'BCELoss',
        'ClassNLLCriterion': 'NLLLoss',
        'DistKLDivCriterion': 'KLDivLoss',
        'SpatialClassNLLCriterion': 'NLLLoss2d',
        'MultiLabelMarginCriterion': 'MultiLabelMarginLoss',
        'MultiMarginCriterion': 'MultiMarginLoss',
        'SmoothL1Criterion': 'SmoothL1Loss',
        'SoftMarginCriterion': 'SoftMarginLoss',
    }

    classes_to_generate -= exceptions
    for fn in classes_to_generate:
        update_output = function_by_name[fn + '_updateOutput']
        update_grad_input = function_by_name[fn + '_updateGradInput']
        acc_grad_parameters = function_by_name.get(fn + '_accGradParameters')
        class_name = name_remap.get(fn, fn)
        double_backwards_fn = double_backwards_fns.get(class_name)
        if double_backwards_fn is None:
            def make_default_double_backwards_fn(class_name):
                def default_double_backwards_fn(ctx, *grad_params):
                    raise ValueError(class_name + " can only be differentiated once.")
                return default_double_backwards_fn
            double_backwards_fn = make_default_double_backwards_fn(class_name)
        symbolic_fn = symbolic_fns.get(class_name)
        # This has to call a function to retain correct references to functions
        is_criterion_fn = 'Criterion' in fn
        if is_criterion_fn:
            cls, backward_cls = _make_function_class_criterion(class_name, update_output,
                                                               update_grad_input, acc_grad_parameters,
                                                               double_backwards_fn, symbolic_fn)
        else:
            cls, backward_cls = _make_function_class(class_name, update_output,
                                                     update_grad_input, acc_grad_parameters,
                                                     double_backwards_fn, symbolic_fn)
        scope_dict[class_name] = cls
        scope_dict[backward_cls.__name__] = backward_cls
        if not class_name.startswith('_'):
            _all_functions.append(cls)
            _all_functions.append(backward_cls)