Пример #1
0
    def __init__(self, width, ker_width, depth, ker_in, in_width=1, out_width=1):
        super(KernelNNBoundary, self).__init__()
        self.depth = depth

        self.fc1 = torch.nn.Linear(in_width, width)

        kernel = DenseNet([ker_in, ker_width//2, ker_width, width**2], torch.nn.ReLU)
        self.conv1 = NNConv_old(width, width, kernel, aggr='mean')
        kernel2 = DenseNet([ker_in, ker_width//2, ker_width, width**2], torch.nn.ReLU)
        self.conv2 = NNConv_old(width, width, kernel2, aggr='mean')

        self.fc2 = torch.nn.Linear(width, ker_width)
        self.fc3 = torch.nn.Linear(ker_width, 1)
Пример #2
0
    def __init__(self):
        super(Net_MP, self).__init__()
        self.fc1 = torch.nn.Linear(3, width)

        kernel = nn.Sequential(nn.Linear(3, width), nn.ReLU(),  nn.Linear(width, width**2))
        self.conv1 = NNConv_old(width, width, kernel, aggr='mean')

        self.fc2 = torch.nn.Linear(width, 1)
Пример #3
0
    def __init__(self, width_node, width_kernel, depth, ker_in, in_width=1, out_width=1):
        super(KernelNN3, self).__init__()
        self.depth = depth

        self.fc1 = torch.nn.Linear(in_width, width_node)

        kernel = DenseNet([ker_in, width_kernel // 2, width_kernel, width_node**2], torch.nn.ReLU)
        self.conv1 = NNConv_old(width_node, width_node, kernel, aggr='mean')

        self.fc2 = torch.nn.Linear(width_node, 1)
Пример #4
0
    def __init__(self,
                 width,
                 width_mid,
                 ker_width,
                 depth,
                 ker_in,
                 in_width=1,
                 out_width=1):
        super(KernelNN, self).__init__()
        self.depth = depth

        self.fc1 = torch.nn.Linear(in_width, width)

        kernel1 = DenseNet([ker_in, ker_width, ker_width, width**2],
                           torch.nn.ReLU)
        self.conv1 = NNConv_old(width, width, kernel1, aggr='mean')
        kernel2 = DenseNet([ker_in, ker_width, ker_width, width_mid**2],
                           torch.nn.ReLU)
        self.conv2 = NNConv_old(width_mid, width_mid, kernel2, aggr='mean')
        kernel3 = DenseNet([ker_in, ker_width, ker_width, width**2],
                           torch.nn.ReLU)
        self.conv3 = NNConv_old(width, width, kernel3, aggr='mean')

        kernel12 = DenseNet([ker_in, ker_width, ker_width, width * width_mid],
                            torch.nn.ReLU)
        self.conv12 = NNConv_old(width, width_mid, kernel12, aggr='mean')

        kernel23 = DenseNet([ker_in, ker_width, ker_width, width * width_mid],
                            torch.nn.ReLU)
        self.conv23 = NNConv_old(width_mid, width, kernel23, aggr='mean')

        kernel32 = DenseNet([ker_in, ker_width, ker_width, width * width_mid],
                            torch.nn.ReLU)
        self.conv32 = NNConv_old(width, width_mid, kernel32, aggr='mean')

        kernel21 = DenseNet([ker_in, ker_width, ker_width, width * width_mid],
                            torch.nn.ReLU)
        self.conv21 = NNConv_old(width_mid, width, kernel21, aggr='mean')

        self.fc2 = torch.nn.Linear(width, 1)
Пример #5
0
    def __init__(self):
        super(Net_MP_one, self).__init__()

        kernel = nn.Sequential(nn.Linear(3, width), nn.ReLU(),  nn.Linear(width, 1))
        self.conv1 = NNConv_old(1, 1, kernel, aggr='mean')