Esempio n. 1
0
def tensor2tensor(tensor, *, device: Optional[Device] = None):
    """Convert a TensorFLow tensor to PyTorch Tensor, or vice versa.
    """
    if tensorflow.is_tensor(tensor):
        m = tensoras(tensor)
        device = gf.device(device, backend="torch")
        return astensor(m, device=device, backend="torch")
    elif pytorch.is_tensor(tensor):
        m = tensoras(tensor)
        device = gf.device(device, backend="tensorflow")
        return astensor(m, device=device, backend="tensorflow")
    else:
        raise ValueError(
            f"The input must be a TensorFlow or PyTorch Tensor, buf got {type(tensor).__name__}"
        )
Esempio n. 2
0
def astensors(*xs, dtype=None, device=None, backend=None, escape=None):
    """Convert input matrices to Tensor(s) or SparseTensor(s).

    Parameters:
    ----------
    xs: one or a list of python object(s)
    dtype: The type of Tensor 'x', if not specified,
        it will automatically use appropriate data type.
        See 'graphgallery.infer_type'.
    device: tf.device, optional. the desired device of returned tensor.
        Default: if 'None', uses the CPU device for the default tensor type.     
    backend: String or BackendModule, optional.
        'tensorflow', 'torch', TensorFlowBackend, PyTorchBackend, etc.
        if not specified, return the current default backend module.    
    escape: a Class or a tuple of Classes, `astensor` will disabled if
        `isinstance(x, escape)`.

    Returns:
    -------     
    Tensor(s) or SparseTensor(s) with dtype. If dtype is 'None', 
    dtype will be one of the following:       
        1. 'graphgallery.floatx()' if 'x' is floating.
        2. 'graphgallery.intx()' if 'x' is integer.
        3. 'graphgallery.boolx()' if 'x' is boolean.
    """
    backend = gg.backend(backend)
    device = gf.device(device, backend)
    return _astensors_fn(*xs,
                         dtype=dtype,
                         device=device,
                         backend=backend,
                         escape=escape)
Esempio n. 3
0
    def make_data(self, graph, graph_transform=None, device=None, **kwargs):
        """This method is used for process your inputs, which accepts
        only keyword arguments in your defined method 'data_step'.
        This method will process the inputs, and transform them into tensors.

        Commonly used keyword arguments:
        --------------------------------
        graph: graphgallery graph classes.
        graph_transform: string, Callable function,
            or a tuple with function and dict arguments.
            transform for the entire graph, it is used first.
        device: device for preparing data, if None, it defaults to `self.device`
        adj_transform: string, Callable function,
            or a tuple with function and dict arguments.
            transform for adjacency matrix.
        attr_transform: string, Callable function,
            or a tuple with function and dict arguments.
            transform for attribute matrix.
        other arguments (if have) will be passed into method 'data_step'.
        """
        self.graph = gf.get(graph_transform)(graph)
        cfg = self.cfg.data
        if device is not None:
            self.data_device = gf.device(device, self.backend)
        else:
            self.data_device = self.device
        cfg.device = device
        _, kwargs = gf.wrapper(self.data_step)(**kwargs)
        kwargs['graph_transform'] = graph_transform
        cfg.merge_from_dict(kwargs)

        for k, v in kwargs.items():
            if k.endswith("transform"):
                setattr(self.transform, k, gf.get(v))
        return self
Esempio n. 4
0
    def __init__(self, *, device="cpu", seed=None, name=None, **kwargs):
        """
        Parameters:
        ----------
        device: string. optional
            The device where the model running on.
        seed: interger scalar. optional
            Used to create a reproducible sequence of tensors
            across multiple calls.
        name: string. optional
            Specified name for the model. (default: :str: `class name`)
        kwargs: other custom keyword arguments. 
        """
        # if graph is not None and not isinstance(graph, gg.data.BaseGraph):
        #     raise ValueError(f"Unrecognized graph: {graph}.")

        kwargs.pop("self", None)
        kwargs.pop("__class__", None)

        cfg = gg.CfgNode()
        cfg.merge_from_dict(kwargs)
        cfg.intx = self.intx = gg.intx()
        cfg.floatx = self.floatx = gg.floatx()
        cfg.boolx = self.boolx = gg.boolx()
        cfg.seed = self.seed = seed
        cfg.name = self.name = name or self.__class__.__name__
        cfg.device = device
        _backend = gg.backend()
        cfg.backend = getattr(_backend, "name", None)

        if seed:
            gf.random_seed(seed, _backend)

        self.device = gf.device(device, _backend)
        self.data_device = self.device
        self.backend = _backend

        # data types, default: `float32`,`int32` and `bool`
        self._cache = gf.BunchDict()
        self.transform = gf.BunchDict()

        self._model = None
        self._graph = None
        self.cfg = cfg
        self.setup_cfg()
        self.custom_setup()
Esempio n. 5
0
    def __init__(self, graph, device="cpu", seed=None, name=None, **kwargs):
        """

        Parameters:
        ----------
        graph: Graph or MultiGraph.
        device: string. optional
            The device where the model running on.
        seed: interger scalar. optional
            Used in combination with `tf.random.set_seed` & `np.random.seed`
            & `random.seed` to create a reproducible sequence of tensors
            across multiple calls.
        name: string. optional
            Specified name for the model. (default: :str: `class.__name__`)
        kwargs: other custom keyword arguments. 
        """
        if not isinstance(graph, gg.data.BaseGraph):
            raise ValueError(f"Unrecognized graph: {graph}.")

        _backend = gg.backend()

        # It currently takes no keyword arguments
        gg.utils.raise_error.raise_if_kwargs(kwargs)

        if seed:
            gf.random_seed(seed, _backend)

        if name is None:
            name = self.__class__.__name__

        self.seed = seed
        self.name = name
        self.graph = graph.copy()
        self.device = gf.device(device, _backend)
        self.backend = _backend

        # data types, default: `float32`,`int32` and `bool`
        self.floatx = gg.floatx()
        self.intx = gg.intx()
        self.boolx = gg.boolx()
        self._cache = gf.BunchDict()
Esempio n. 6
0
def test_device():
    # how about other backend?

    # tf
    assert isinstance(device("cpu", "tf"), str)
    assert device() == 'cpu'
    assert device("cpu", "tf") == 'CPU'
    assert device("cpu", "tf") == 'cpu'
    assert device("device/cpu", "tf") == 'cpu'

    try:
        assert device("gpu", "tf") == 'GPU'
        assert device("cuda", "tf") == 'GPU'
    except RuntimeError:
        pass
    device = tf.device("cpu")
    assert device(device, "tf") == device._device_name

    # ?? torch
    device = device("cpu", "torch")
    assert isinstance(device, torch.device) and 'cpu' in str(device)
    device = device(backend="torch")
    assert isinstance(device, torch.device) and 'cpu' in str(device)

    try:
        assert 'cuda' in str(device("gpu", "torch"))
        assert 'cuda' in str(device("cuda", "torch"))
    except RuntimeError:
        pass
    device = torch.device("cpu")
    assert device(device, "torch") == device