Exemplo n.º 1
0
    def test_transfer(self):
        if not torch.cuda.is_available():
            return

        for nodes in Nodes.__subclasses__():
            layer = nodes(10)

            layer.to(torch.device("cuda:0"))

            layer_tensors = [
                k for k, v in layer.state_dict().items() if isinstance(v, torch.Tensor)
            ]

            tensor_devs = [getattr(layer, k).device for k in layer_tensors]

            print("State dict in {} : {}".format(nodes, layer.state_dict().keys()))
            print("__dict__ in {} : {}".format(nodes, layer.__dict__.keys()))
            print("Tensors in {} : {}".format(nodes, layer_tensors))
            print("Tensor devices {}".format(list(zip(layer_tensors, tensor_devs))))

            for d in tensor_devs:
                print(d, d == torch.device("cuda:0"))
                assert d == torch.device("cuda:0")

            print("Reset layer")
            layer.reset_state_variables()
            layer_tensors = [
                k for k, v in layer.state_dict().items() if isinstance(v, torch.Tensor)
            ]

            tensor_devs = [getattr(layer, k).device for k in layer_tensors]

            for d in tensor_devs:
                print(d, d == torch.device("cuda:0"))
                assert d == torch.device("cuda:0")
Exemplo n.º 2
0
    def add_layer(self, layer: Nodes, name: str) -> None:
        # language=rst
        """
        Adds a layer of nodes to the network.

        :param layer: A subclass of the ``Nodes`` object.
        :param name: Logical name of layer.
        """
        self.layers[name] = layer
        self.add_module(name, layer)

        layer.train(self.learning)
        layer.compute_decays(self.dt)
        layer.set_batch_size(self.batch_size)