Exemplo n.º 1
0
    def test_def_function(self):
        @dragon.function(input_signature=[dragon.Tensor(None)])
        def func2(a, b):
            return a + b

        self.assertEqual(
            self.func1([1, 2], [3, 4]).get_value().tolist(), [4, 6])
        self.assertEqual(
            self.func1([1, 2], b=[3, 4]).get_value().tolist(), [4, 6])
        self.assertEqual(
            self.func1([1, 2], b=[3, 4], c=1).get_value().tolist(), [5, 7])
        self.assertEqual(
            self.func1([1, 2], b=[3, 4], c=1).get_value().tolist(), [5, 7])
        self.assertEqual(
            self.func1([1, 2], [3, 4],
                       executing_stage='forward').get_value().tolist(), [4, 6])
        dragon.function(func=lambda: dragon.optimizers.SGD())()
        try:
            self.func1(1, 2, 3, 4)
        except ValueError:
            pass
        try:
            func2(1, 2)
        except ValueError:
            pass
Exemplo n.º 2
0
    def function(self):
        """Returns the function the ``ForwardBackward``.

        Returns
        -------
        lambda
            The function.

        See Also
        --------
        `theano.function(*args, **kwargs)`_ - How to make a graph. [**Theano Style**]

        References
        ----------
        The implementation of `ForwardBackward(net.cpp, L85)`_.

        """
        if hasattr(self, '_function'): return self._function

        for loss in self.losses:
            for var in self.trainable_variables:
                dragon.grad(loss, var)

        self._function = dragon.function(
            outputs=[self.blobs[key].data for key in self.outputs])

        if hasattr(self, '_model'):
            dragon.workspace.Restore(self._model, format='caffe')

        return self._function
Exemplo n.º 3
0
    def BuildOptimizer(self):
        """Build the optimizer.

        Returns
        -------
        None

        """
        # Collect
        for layer, blobs in self.net.params.items():
            self._layer_blobs.extend(blobs)

        # Push
        for idx, blob in enumerate(self._layer_blobs):
            if blob.lr_multiplier > 0 and blob.diff is not None:
                self.optimizer.append((blob.data, blob.diff),
                                      blob.lr_multiplier,
                                      blob.decay_multiplier)

        # Compile
        self.update = dragon.function(updater=self.optimizer)
Exemplo n.º 4
0
        self._fetcher = Fetcher(self._queue)
        self._fetcher.start()

    def run(self, inputs, outputs):
        """Run method, i.e., forward pass.

        Parameters
        ----------
        inputs : list of str
            Indicating the name of input tensors.
        outputs : list of str
            Indicating the name of output tensors.

        Returns
        -------
        None

        """
        dg.workspace.FeedTensor(outputs[0], self._queue.get())


if __name__ == '__main__':
    # Def
    y = dg.ops.Run([], module=__name__, op='DataProcessOp', nout=1)
    foo = dg.function(outputs=y)

    # Run
    foo()

    # Fetch
    print(y.get_value())
Exemplo n.º 5
0
 def run(self):
     if not hasattr(self, '_init_func'):
         self._init_func = dragon.function(outputs=self.var_list) \
             if len(self.var_list) > 0 else None
     if self._init_func: self._init_func()
Exemplo n.º 6
0
    def _run(self, fetches, feed_dict):
        if self._closed:
            raise RuntimeError('Attempted to use a closed Session.')

        # Unpack opts and tensors
        tensors, optimizers = [], []

        for e in fetches:
            if isinstance(e, Optimizer): optimizers.append(e)
            elif isinstance(e, VariablesInitializer):
                tensors.extend(e.var_list)
            elif isinstance(e, dragon.Tensor):
                tensors.append(e)

        # Find minimum solving targets
        targets = set()
        for e in tensors:
            targets.add(e)
        for optimizer in optimizers:
            for t in optimizer._targets:
                targets.add(t)

        targets = list(targets)
        gen_flow_key = tuple(e.name for e in targets)

        # Exist this data flow before?
        data_flow = _DataFlow.try_get(self._graph._workspace, gen_flow_key)

        # Run by feeding
        if feed_dict is not None:
            # Check the feed dict
            for key, value in feed_dict.items():
                if not isinstance(key, dragon.Tensor):
                    raise TypeError(
                        'The key of feed_dict key should be a Tensor.')
                if key.shape is not None:
                    # Align the number of dimensions
                    if len(key.shape) != len(value.shape):
                        raise RuntimeError(
                            'The Tensor({}) was limited to {} dimensions, \
                                while feed a value with {} dimensions.'.format(
                                key.name, len(key.shape), len(value.shape)))
                    # Verify for the each dimension
                    for i in range(len(key.shape)):
                        if key.shape[i] is None: continue
                        if key.shape[i] != value.shape[i]:
                            raise RuntimeError(
                                'The shape of Tensor({}) was limited as ('.
                                format(key.name) +
                                ','.join([str(dim) for dim in key.shape]) +
                                '), ' + 'while feed a value with (' +
                                ','.join([str(dim)
                                          for dim in value.shape]) + ').')

        # Create a new data flow if necessary
        if data_flow is None:
            functions = [dragon.function(outputs=targets)]
            for optimizer in optimizers:
                functions.append(dragon.function(updater=optimizer.updater))
            data_flow = _DataFlow(functions)
            _DataFlow.try_add(self.graph._workspace, gen_flow_key, data_flow)

        # Run this data flow
        data_flow.run(feed_dict)

        # Fetch after running
        returns = []

        for e in fetches:
            if isinstance(e, Optimizer):
                e._inc_global_step()
                returns.append(None)
            elif isinstance(e, VariablesInitializer):
                returns.append(None)
            else:
                np_target = e.get_value()
                # Unpack the scalar if necessary
                if np_target.size == 1: returns.append(np_target.flatten()[0])
                else: returns.append(np_target)

        # Unpack the returns if necessary
        if len(returns) == 1: return returns[0]
        else: return returns