def __init__(self, width, ker_width, depth, ker_in, in_width=1, out_width=1): super(KernelNN, self).__init__() self.depth = depth self.fc1 = torch.nn.Linear(in_width, width) kernel = DenseNet([ker_in, ker_width // 2, ker_width, width**2], torch.nn.ReLU) self.conv1 = NNConv_old(width, width, kernel, aggr='mean') self.fc2 = torch.nn.Linear(width, 1)
def __init__(self, width_node, width_kernel, depth, ker_in, in_width=1, out_width=1): super(KernelNN3, self).__init__() self.depth = depth self.fc1 = torch.nn.Linear(in_width, width_node) kernel = DenseNet([ ker_in, width_kernel // 4, width_kernel // 2, width_kernel, width_kernel, width_node**2 ], torch.nn.ReLU) # kernel = DenseNet([ker_in, width_kernel // 2, width_kernel, width_node**2], torch.nn.ReLU) # kernel = DenseNet([ker_in, width_kernel, width_node**2], torch.nn.ReLU) self.conv1 = NNConv_old(width_node, width_node, kernel, aggr='mean') self.fc2 = torch.nn.Linear(width_node, 1)
def __init__(self, width, width_mid, ker_width, depth, ker_in, in_width=1, out_width=1): super(KernelNN, self).__init__() self.depth = depth self.fc1 = torch.nn.Linear(in_width, width) kernel1 = DenseNet([ker_in, ker_width, ker_width, width**2], torch.nn.ReLU) self.conv1 = NNConv_old(width, width, kernel1, aggr='mean') kernel2 = DenseNet([ker_in, ker_width, ker_width, width_mid**2], torch.nn.ReLU) self.conv2 = NNConv_old(width_mid, width_mid, kernel2, aggr='mean') kernel3 = DenseNet([ker_in, ker_width, ker_width, width**2], torch.nn.ReLU) self.conv3 = NNConv_old(width, width, kernel3, aggr='mean') kernel12 = DenseNet([ker_in, ker_width, ker_width, width * width_mid], torch.nn.ReLU) self.conv12 = NNConv_old(width, width_mid, kernel12, aggr='mean') kernel23 = DenseNet([ker_in, ker_width, ker_width, width * width_mid], torch.nn.ReLU) self.conv23 = NNConv_old(width_mid, width, kernel23, aggr='mean') kernel32 = DenseNet([ker_in, ker_width, ker_width, width * width_mid], torch.nn.ReLU) self.conv32 = NNConv_old(width, width_mid, kernel32, aggr='mean') kernel21 = DenseNet([ker_in, ker_width, ker_width, width * width_mid], torch.nn.ReLU) self.conv21 = NNConv_old(width_mid, width, kernel21, aggr='mean') self.fc2 = torch.nn.Linear(width, 1)
def __init__(self): super(Net_MP_one, self).__init__() kernel = nn.Sequential(nn.Linear(3, width), nn.ReLU(), nn.Linear(width, 1)) self.conv1 = NNConv_old(1, 1, kernel, aggr='mean')