Exemplo n.º 1
0
def test_conversion_gpu_to_cpuC():
    rational = Rational(version='C', cuda=True)
    rational.cpu()
    params = np.all([str(para.device) == 'cpu' for para in rational.parameters()])
    cpu_f = "PYTORCH_C" in rational.activation_function.__qualname__
    new_res = rational(inp).detach().numpy()
    coherent_compute = np.all(np.isclose(new_res, expected_res, atol=5e-02))
    assert params and cpu_f and coherent_compute
Exemplo n.º 2
0
from rational.torch import Rational

rational_function = Rational()  # Initialized closed to Leaky ReLU
print(rational_function)
#    Pade Activation Unit (version A) of degrees (5, 4) running on cuda:0
# or Pade Activation Unit (version A) of degrees (5, 4) running on cpu

rational_function.cpu()
rational_function.cuda()

print(rational_function.degrees)
# (5, 4)
print(rational_function.version)
# A
print(rational_function.training)
# True

import torch
import torch.nn as nn


class RationalNetwork(nn.Module):
    n_features = 512

    def __init__(self,
                 input_shape,
                 output_shape,
                 recurrent=False,
                 cuda=False,
                 **kwargs):
        super().__init__()