Exemple #1
0
def build_ftrl(model, engine="SIMD", **kwargs):
    if engine == "SIMD":
        assert core.IsOperator('Ftrl_ENGINE_SIMD')
        assert core.IsOperator('SparseFtrl_ENGINE_SIMD')
    ftrl_optimizer = FtrlOptimizer(engine=engine, **kwargs)
    for param, grad in model.GetOptimizationPairs().items():
        ftrl_optimizer(model.net, model.param_init_net, param, grad)
Exemple #2
0
    def __getattr__(self, layer):
        # TODO(amalevich): Add add support for ifbpy inline documentation
        if layers.layer_exists(layer):
            def wrapper(*args, **kwargs):
                return self.add_layer(
                    layers.create_layer(layer, self, *args, **kwargs))
            return wrapper
        elif core.IsOperator(layer):
            def wrapper(*args, **kwargs):
                def apply_operator(net, in_record, out_record):
                    # core.Net will throw exception if output_dtypes is set
                    # in Functional layer because MakeArgment() cannot recognize
                    # it. Just remove it from kwargs.
                    clean_kwargs = dict(kwargs)
                    if 'output_dtypes' in clean_kwargs:
                        del clean_kwargs['output_dtypes']

                    # TODO(amalevich): Switch to net.operator as soon as it gets
                    # landed
                    net.__getattr__(layer)(in_record.field_blobs(),
                                           out_record.field_blobs(),
                                           **clean_kwargs)
                if 'name' not in kwargs:
                    kwargs['name'] = layer
                return self.add_layer(
                    layers.create_layer('Functional',
                                        self, *args, function=apply_operator,
                                        **kwargs))
            return wrapper
        else:
            raise ValueError(
                "Tring to create non-registered layer: {0}".format(layer))
Exemple #3
0
 def is_functional_layer(layer):
     if core.IsOperator(layer):
         return True
     elif layer.startswith('FunctionalLayer'):
         return True
     else:
         return False
Exemple #4
0
    def _common_onnx_node_to_caffe2_op(cls, onnx_node):
        """
        This translator performs the basic translation of ONNX nodes into
        Caffe2 operators.  Besides doing a straightforward marshalling from
        one format to another, it also does these extra things:

          - Renames operators based on '_renamed_operators'
          - Renames attributes based on '_global_renamed_attrs' and
            '_per_op_renamed_attrs'

        If you're writing a custom translator, consider calling this first,
        and then fixing things up further.
        """
        c2_op = caffe2_pb2.OperatorDef()

        c2_op.input.extend(onnx_node.inputs)
        c2_op.output.extend(onnx_node.outputs)
        c2_op.name = onnx_node.name

        onnx_op_type = onnx_node.op_type
        c2_op.type = cls._renamed_operators.get(onnx_op_type, onnx_op_type)
        if not core.IsOperator(c2_op.type):
            raise ValueError(
                "Don't know how to translate op {}".format(onnx_op_type))

        def kmap(k):
            if (onnx_op_type in cls._per_op_renamed_attrs and
                    k in cls._per_op_renamed_attrs[onnx_op_type]):
                return cls._per_op_renamed_attrs[onnx_op_type][k]
            if k in cls._global_renamed_attrs:
                return cls._global_renamed_attrs[k]
            return k
        c2_op.arg.extend(onnx_node.attrs.caffe2(kmap=kmap))

        return c2_op
Exemple #5
0
 def __getattr__(self, op_type):
     """Catch-all for all other operators, mostly those without params."""
     if not core.IsOperator(op_type):
         raise RuntimeError('Method ' + op_type +
                            ' is not a registered operator.')
     # known_working_ops are operators that do not need special care.
     known_working_ops = [
         "Accuracy",
         "Adam",
         "AveragedLoss",
         "Cast",
         "LabelCrossEntropy",
         "LearningRate",
         "Print",
         "Sigmoid",
         "Scale",
         "Snapshot",
         "Softmax",
         "StopGradient",
         "Summarize",
         "Tanh",
         "WeightedSum",
     ]
     if op_type not in known_working_ops:
         logging.warning("You are creating an op that the CNNModelHelper "
                         "does not recognize: {}.".format(op_type))
     return self.net.__getattr__(op_type)
Exemple #6
0
 def initializer(self, op):
     assert op is None or core.IsOperator(
         getattr(op, "type", None)
     ), "initializer expects an operator, got type: {}".format(type(op))
     self._initializer = op
     if op is not None:
         self.shape = self._infer_shape_from_initializer()
    def __getattr__(self, layer):
        if layer.startswith('__'):
            raise AttributeError(layer)

        # TODO(amalevich): Add add support for ifbpy inline documentation
        if layers.layer_exists(layer):

            def wrapper(*args, **kwargs):
                return self.add_layer(
                    layers.create_layer(layer, self, *args, **kwargs))

            return wrapper
        elif core.IsOperator(layer):

            def wrapper(*args, **kwargs):
                def apply_operator(net, in_record, out_record, **kwargs):
                    # TODO(amalevich): Switch to net.operator as soon as it gets
                    # landed
                    net.__getattr__(layer)(in_record.field_blobs(),
                                           out_record.field_blobs(), **kwargs)

                if 'name' not in kwargs:
                    kwargs['name'] = layer
                return self.add_layer(
                    layers.create_layer('Functional',
                                        self,
                                        *args,
                                        function=apply_operator,
                                        **kwargs))

            return wrapper
        else:
            raise ValueError(
                "Trying to create non-registered layer: {}".format(layer))
Exemple #8
0
    def __getattr__(self, op_type):
        """Catch-all for all other operators, mostly those without params."""
        if op_type.startswith('__'):
            raise AttributeError(op_type)

        if not core.IsOperator(op_type):
            raise RuntimeError('Method ' + op_type +
                               ' is not a registered operator.')
        # known_working_ops are operators that do not need special care.
        known_working_ops = [
            "Accuracy",
            "Adam",
            "Add",
            "Adagrad",
            "SparseAdagrad",
            "AveragedLoss",
            "Cast",
            "Checkpoint",
            "ConstantFill",
            "Copy",
            "CopyGPUToCPU",
            "CopyCPUToGPU",
            "DequeueBlobs",
            "EnsureCPUOutput",
            "Flatten",
            "FlattenToVec",
            "LabelCrossEntropy",
            "LearningRate",
            "MakeTwoClass",
            "MatMul",
            "NCCLAllreduce",
            "NHWC2NCHW",
            "PackSegments",
            "Print",
            "PRelu",
            "Scale",
            "ScatterWeightedSum",
            "Sigmoid",
            "SortedSegmentSum",
            "Snapshot",  # Note: snapshot is deprecated, use Checkpoint
            "Softmax",
            "SoftmaxWithLoss",
            "SquaredL2Distance",
            "Squeeze",
            "StopGradient",
            "Summarize",
            "Tanh",
            "UnpackSegments",
            "WeightedSum",
            "ReduceFrontSum",
        ]
        if op_type not in known_working_ops:
            if not self.allow_not_known_ops:
                raise RuntimeError(
                    "Operator {} is not known to be safe".format(op_type))

            logging.warning("You are creating an op that the ModelHelperBase "
                            "does not recognize: {}.".format(op_type))
        return self.net.__getattr__(op_type)
Exemple #9
0
 def resolve_functional_layer(layer):
     if core.IsOperator(layer):
         return layer
     elif layer.startswith('FunctionalLayer'):
         return layer[len('FunctionalLayer'):]
     else:
         raise ValueError('%s cannot be resolved as functional layer' %
                          layer)
Exemple #10
0
 def __getattr__(self, op_type):
     """Catch-all for all other operators, mostly those without params."""
     if not core.IsOperator(op_type):
         raise RuntimeError('Method ' + op_type +
                            ' is not a registered operator.')
     logging.warning("You are creating an op that the CNNModelHelper "
                     "does not recognize: {}.".format(op_type))
     return self.net.__getattr__(op_type)
Exemple #11
0
def build_ftrl(model, dedup_indices=False, engine="SIMD", **params):
    if engine == "SIMD":
        assert core.IsOperator('Ftrl_ENGINE_SIMD')
        assert core.IsOperator('SparseFtrl_ENGINE_SIMD')
    for param, grad in model.GetOptimizationPairs().items():
        # allocate additional args of the same shape as main weights
        nz = model.param_init_net.ConstantFill(
            [param],
            param + "_ftrl_nz",
            extra_shape=[2],
            value=0.0
        )
        if isinstance(grad, core.GradientSlice):
            g = _dedup(model, dedup_indices, grad)
            model.SparseFtrl([param, nz, g.indices, g.values],
                             [param, nz], engine=engine, **params)
        else:
            model.Ftrl([param, nz, grad], [param, nz], engine=engine, **params)
Exemple #12
0
 def initializer(self, op):
     assert op is None or core.IsOperator(getattr(op, 'type', None)), \
         "initializer expects an operator, got type: {}".format(type(op))
     self._initializer = op
     if op is not None:
         shape = self._infer_shape_from_initializer()
         assert self.shape is None or self.shape == shape, \
             "inconsistent shape for layer parameter:"\
             " {}, expect: {}, but got {}".format(self, self.shape, shape)
         self._shape = shape
Exemple #13
0
    def _common_onnx_node_to_caffe2_op(cls, init_model, pred_model, onnx_node,
                                       opset_version):
        """
        This translator performs the basic translation of ONNX nodes into
        Caffe2 operators.  Besides doing a straightforward marshalling from
        one format to another, it also does these extra things:

          - Renames operators based on '_renamed_operators'
          - Renames attributes based on '_global_renamed_attrs' and
            '_per_op_renamed_attrs'

        If you're writing a custom translator, consider calling this first,
        and then fixing things up further.
        """
        c2_op = caffe2_pb2.OperatorDef()

        c2_op.input.extend(onnx_node.inputs)
        c2_op.output.extend(onnx_node.outputs)
        c2_op.name = onnx_node.name

        onnx_op_type = onnx_node.op_type
        broken_version = cls._broken_operators.get(onnx_op_type, float('Inf'))
        if broken_version <= opset_version:
            raise ValueError(
                "Don't know how to translate op {} in ONNX operator set v{} (I only support prior to v{})"
                .format(onnx_op_type, opset_version, broken_version))
        c2_op.type = cls._renamed_operators.get(onnx_op_type, onnx_op_type)
        if not core.IsOperator(c2_op.type):
            raise ValueError(
                "Don't know how to translate op {}".format(onnx_op_type))

        def kmap(k):
            if (onnx_op_type in cls._per_op_renamed_attrs
                    and k in cls._per_op_renamed_attrs[onnx_op_type]):
                return cls._per_op_renamed_attrs[onnx_op_type][k]
            if k in cls._global_renamed_attrs:
                return cls._global_renamed_attrs[k]
            return k

        c2_op.arg.extend(onnx_node.attrs.caffe2(kmap=kmap))

        if opset_version < 7:
            # onnx opset 7 and newest caffe2 have adopted full onnx broadcast semantics
            # so we don't need this hack anymore
            if c2_op.type in cls._broadcast_operators:
                already_broadcast = False
                for arg in c2_op.arg:
                    if arg.name == 'broadcast':
                        already_broadcast = True
                if not already_broadcast:
                    c2_op.arg.extend(
                        [caffe2.python.utils.MakeArgument('broadcast', 1)])

        return c2_op
Exemple #14
0
    def __getattr__(self, op_type):
        """Catch-all for all other operators, mostly those without params."""
        if op_type.startswith('__'):
            raise AttributeError(op_type)

        if not core.IsOperator(op_type):
            raise AttributeError(
                'Method ' + op_type + ' is not a registered operator.' +
                ' Did you mean: [' +
                ','.join(workspace.C.nearby_opnames(op_type)) + ']')
        if op_type not in _known_working_ops:
            if not self.allow_not_known_ops:
                raise AttributeError(
                    "Operator {} is not known to be safe".format(op_type))

            logging.warning("You are creating an op that the ModelHelper "
                            "does not recognize: {}.".format(op_type))
        return self.net.__getattr__(op_type)
Exemple #15
0
 def __getattr__(self, op_type):
     """Catch-all for all other operators, mostly those without params."""
     if not core.IsOperator(op_type):
         raise RuntimeError('Method ' + op_type +
                            ' is not a registered operator.')
     # known_working_ops are operators that do not need special care.
     known_working_ops = [
         "Accuracy",
         "Adam",
         "AveragedLoss",
         "Cast",
         "EnsureCPUOutput",
         "LabelCrossEntropy",
         "LearningRate",
         "Print",
         "Sigmoid",
         "Scale",
         "Snapshot",
         "Softmax",
         "StopGradient",
         "Summarize",
         "Tanh",
         "WeightedSum",
         "SquaredL2Distance",
         "FlattenToVec",
         "NHWC2NCHW",
         "ScatterWeightedSum",
         "Squeeze",
         "NCCLAllreduce",
         "ConstantFill",
         "Add",
         "DequeueBlobs",
     ]
     if op_type not in known_working_ops:
         assert self.allow_not_known_ops
         logging.warning("You are creating an op that the ModelHelperBase "
                         "does not recognize: {}.".format(op_type))
     return self.net.__getattr__(op_type)
Exemple #16
0
        m2 = net.Mul([m1, three], "mul_2")
        grad_map = net.AddGradientOperators([m2, m1])
        workspace.ResetWorkspace()
        workspace.blobs[input] = np.array([1]).astype(np.float32)
        workspace.blobs[two] = np.array([2]).astype(np.float32)
        workspace.blobs[three] = np.array([3]).astype(np.float32)
        workspace.RunNetOnce(net)
        print(net.Proto())
        for blob in workspace.blobs:
            print(blob, workspace.blobs[blob])
        print("Input grad: ", workspace.blobs[grad_map[str(input)]])
        assert workspace.blobs[grad_map[str(input)]] == 8.0


# Skip if sparse operators are not available
@unittest.skipIf(not core.IsOperator('SparseFunHash'),
                 'Sparse operators not available')
class TestSparseGradientsAccumulation(test_util.TestCase):
    def testSparseAccumulationWithValues(self):
        # The gradient for "Gather" only computes values. indices are directly
        # passed from the input
        #
        # x1-->Gather-->x4-->
        #        |          |
        # x2-----+     DotProduct-->x6
        #        |          |
        # x3-->Gather-->x5-->
        net = core.Net("test_net")
        net.Gather(["x2", "x1"], "x4")
        net.Gather(["x2", "x3"], "x5")
        net.DotProduct(["x4", "x5"], "x6")
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals

import unittest
import hypothesis.strategies as st
from hypothesis import given
import numpy as np
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu


@unittest.skipIf(not core.IsOperator("PackedFC"),
                 "PackedFC is not supported in this caffe2 build.")
class PackedFCTest(hu.HypothesisTestCase):
    @given(seed=st.integers(0, 65536),
           M=st.integers(16, 32),
           K=st.integers(128, 1024),
           N=st.integers(128, 1024),
           **hu.gcs_cpu_only)
    @unittest.skipIf(not core.C.builtin_cpu_supports_avx2(),
                     "Intel MKL sgemm_pack has a known numerical issue with "
                     "non-avx2 machines that will be fixed in a later build.")
    def test_packed_fc(self, seed, M, K, N, gc, dc):
        np.random.seed(seed)
        X = np.random.rand(M, K).astype(np.float32) - 0.5
        W = np.random.rand(N, K).astype(np.float32) - 0.5
        b = np.random.rand(N).astype(np.float32) - 0.5

        # If you are debugging, the following hard-coded ones might help.
Exemple #18
0
 def test_is_operator(self):
     self.assertTrue(core.IsOperator('Relu'))
     self.assertFalse(core.IsOperator('NOEXIST'))
Exemple #19
0
def build_ftrl(model, engine="SIMD", **kwargs):
    if engine == "SIMD":
        assert core.IsOperator('Ftrl_ENGINE_SIMD')
        assert core.IsOperator('SparseFtrl_ENGINE_SIMD')
    ftrl_optimizer = FtrlOptimizer(engine=engine, **kwargs)
    return _build(model, ftrl_optimizer)
Exemple #20
0
 def __getattr__(self, op_type):
   """Catch-all for all other operators, mostly those without params."""
   if not core.IsOperator(op_type):
     raise RuntimeError(
         'Method ' + op_type + ' is not a registered operator.')
   return self.net.__getattr__(op_type)