def test_conversion_cpu_to_gpuD():
    rational = Rational(version='D', cuda=False, trainable=False)
    rational.cuda()
    params = np.all(['cuda' in str(para.device) for para in rational.parameters()])
    cpu_f = "CUDA_D" in rational.activation_function.__qualname__
    new_res = rational(cuda_inp).clone().detach().cpu().numpy()
    coherent_compute = np.all(np.isclose(new_res, expected_res, atol=5e-02))
    assert params and cpu_f and coherent_compute
Exemple #2
0
from rational.torch import Rational

rational_function = Rational()  # Initialized closed to Leaky ReLU
print(rational_function)
#    Pade Activation Unit (version A) of degrees (5, 4) running on cuda:0
# or Pade Activation Unit (version A) of degrees (5, 4) running on cpu

rational_function.cpu()
rational_function.cuda()

print(rational_function.degrees)
# (5, 4)
print(rational_function.version)
# A
print(rational_function.training)
# True

import torch
import torch.nn as nn


class RationalNetwork(nn.Module):
    n_features = 512

    def __init__(self,
                 input_shape,
                 output_shape,
                 recurrent=False,
                 cuda=False,
                 **kwargs):
        super().__init__()