Example #1
0
 def __init__(self,
              node_dim: int,
              cond_dim: int,
              out_dim: int,
              method: str,
              dropout: float = 0.,
              use_gin: bool = False,
              ):
     super().__init__()
     self.feat_method, self.agg_method = str_split(method, '^')
     if self.feat_method == 'share':
         self.feat_l = None
     elif self.feat_method == 'film':
         self.feat_l = FilmFusion(node_dim, cond_dim, out_dim, act_type='relu')
     elif self.feat_method == 'linear':
         self.feat_l = nn.Sequential(
             Linear(node_dim, out_dim // 2, orders=('linear', 'act')),
             Linear(out_dim // 2, out_dim, orders=('linear', 'act'))
         )
     else:
         raise NotImplementedError()
     self.node_dim = node_dim
     self.cond_dim = cond_dim
     self.out_dim = out_dim
     self.drop_l = nn.Dropout(dropout)
     self.act_l = nn.ReLU()
     self.use_gin = use_gin
     self.norm_l = lambda x: x
     if self.use_gin:
         self.eps = torch.nn.Parameter(torch.zeros(out_dim))
Example #2
0
    def __init__(
        self,
        node_dim: int,
        cond_dim: int,
        edge_dim: int,
        out_dim: int,
        param: str,
        dropout: float = 0.,
    ):
        """

        :param node_dim:
        :param cond_dim:
        :param edge_dim:
        :param out_dim:
        :param param: cgs:x_cond:x_x
        :param dropout:
        """
        super().__init__()
        self.graph_learner_l, graph_conv_l = None, None
        edge_method, conv_method, self.pool_method = str_split(param, '_')
        edge_method, method_param = str_split(edge_method, ':')
        if edge_method == 'cgs':
            self.graph_learner_l = g_modules.CgsGraphLearner(
                node_dim, cond_dim, method_param, 512, dropout)
        elif edge_method == 'cond':
            self.graph_learner_l = g_modules.CondGraphLearner(
                node_dim, cond_dim, edge_dim, method_param, dropout)
        else:
            raise NotImplementedError()
        conv_method, method_param = str_split(conv_method, ':')
        if conv_method == 'cgs':
            self.graph_conv_l = nn.Sequential(
                g_modules.CgsGraphConv(node_dim,
                                       out_dim * 2,
                                       method_param,
                                       dropout=dropout),
                g_modules.CgsGraphConv(out_dim * 2,
                                       out_dim,
                                       method_param,
                                       use_graph_weights=False,
                                       dropout=dropout))
        elif conv_method == 'cond':
            self.graph_conv_l = g_modules.CondGraphConv(
                node_dim, cond_dim, edge_dim, out_dim, method_param, dropout)
        else:
            raise NotImplementedError()
Example #3
0
 def __init__(
     self,
     node_dim: int,
     cond_dim: int,
     edge_dim: int,
     params: str,
     dropout: float = 0.,
 ):
     super().__init__()
     weight_params, reduce_size = str_split(params, '_')
     self.node_weight_l = g_modules.NodeWeightLayer(node_dim, edge_dim,
                                                    weight_params, dropout)
     self.node_pool_l = g_modules.NodePoolLayer(reduce_size)
Example #4
0
 def __init__(self,
              node_dim: int,
              cond_dim: int,
              edge_dim: int,
              method: str,
              dropout: float = 0.):
     super().__init__()
     self.node_dim = node_dim
     self.edge_dim = edge_dim
     if '_' in method:
         self.method, method = str_split(method, '_')
     else:
         self.method = 'cond'
     self.method_param, self.norm_method, self.reduce_size = str_split(
         method, '^')
     if self.method == 'cond':
         if self.method_param == 'share':
             self.logit_l = None
         else:
             self.logit_l = nn.Sequential(
                 nn.Dropout(dropout),
                 nn.utils.weight_norm(nn.Linear(edge_dim, edge_dim // 2)),
                 nn.ReLU(),
                 nn.utils.weight_norm(nn.Linear(edge_dim // 2, 1)))
     elif self.method == 'cgs':
         if self.method_param == 'share':
             self.logit_l = None
         else:
             self.logit_l = nn.Sequential(
                 nn.utils.weight_norm(
                     nn.Linear(node_dim + cond_dim, edge_dim)), nn.ReLU(),
                 nn.utils.weight_norm(nn.Linear(edge_dim, edge_dim)),
                 nn.ReLU())
     else:
         raise NotImplementedError()
     self.drop_l = nn.Dropout(dropout)
Example #5
0
 def __init__(self,
              edge_dim: int,
              method: str,
              dropout: float = 0.
              ):
     super().__init__()
     self.edge_dim = edge_dim
     self.norm_method, kernel_size, self.reduce_size = str_split(method, '^')
     if kernel_size == 'share':
         self.logit_l = None
     else:
         self.logit_l = nn.Sequential(
             nn.Dropout(dropout),
             nn.utils.weight_norm(nn.Linear(edge_dim, edge_dim//2)),
             nn.ReLU(),
             nn.utils.weight_norm(nn.Linear(edge_dim//2, int(kernel_size)))
         )
Example #6
0
 def __init__(self,
              edge_dim: int,
              method: str,
              params=None,
              ):
     super().__init__()
     self.edge_dim = edge_dim
     self.params = params
     self.score_method, self.norm_method, self.reduce_size = pt.str_split(method, '_')
     if self.score_method == 'share':
         self.score_l = None
     elif self.score_method == 'linear':
         self.score_l = nn.Sequential(
             pt.PtLinear(edge_dim, edge_dim//2, norm=params.e_w_norm, drop=params.e_w_drop, orders=params.e_w_orders),
             pt.PtLinear(edge_dim//2, 1, norm='weight', orders='ln')  # for the ln orders, layer norm is terrible
         )
     else:
         raise NotImplementedError()
Example #7
0
 def __init__(self,
              node_dim: int,
              edge_dim: int,
              method: str,
              dropout: float = 0.):
     super().__init__()
     self.node_dim = node_dim
     self.edge_dim = edge_dim
     self.method = method
     if method == 'none':
         self.node_logit_l = None
         return
     self.weight_method, self.node_method, self.norm_method = str_split(
         method, '^')
     if self.node_method == 'linear':
         self.node_logit_l = nn.Sequential(
             nn.Dropout(dropout),
             nn.utils.weight_norm(nn.Linear(node_dim, node_dim // 2)),
             nn.ReLU(), nn.utils.weight_norm(nn.Linear(node_dim // 2, 1)))
     else:
         self.node_logit_l = None
Example #8
0
 def __init__(
     self,
     node_dim: int,
     cond_dim: int,
     out_dim: int,
     params: str,
     dropout: float = 0.,
 ):
     super().__init__()
     weight_method, conv_method = str_split(params, '_')
     self.edge_learner_l = g_modules.CgsGraphLearner(node_dim,
                                                     cond_dim,
                                                     weight_method,
                                                     hid_dim=512)
     self.conv_layers = nn.ModuleList([
         g_modules.CgsGraphConv(node_dim, out_dim * 2, conv_method),
         g_modules.CgsGraphConv(out_dim * 2,
                                out_dim,
                                conv_method,
                                use_graph_weights=False)
     ])
Example #9
0
 def __init__(
     self,
     node_dim: int,
     cond_dim: int,
     out_dim: int,
     method: str,
     dropout: float = 0.,
 ):
     super().__init__()
     self.method, self.method_param = str_split(method, '_')
     if self.method == 'cond':
         self.node_feat_l = CondNodeFeat(node_dim, cond_dim, out_dim,
                                         self.method_param, dropout)
     elif self.method == 'cgs':
         # self.node_feat_l = nn.Sequential(
         #     CgsNodeFeat(node_dim, out_dim, self.method_param, dropout=dropout),
         #     # CgsNodeFeat(out_dim, out_dim, self.method_param, use_graph_weights=False, dropout=dropout)
         # )
         self.node_feat_l = CgsNodeFeat(node_dim,
                                        out_dim,
                                        self.method_param,
                                        dropout=dropout)
Example #10
0
    def __init__(self,
                 node_dim: int,
                 out_dim: int,
                 method: str,
                 use_graph_weights: bool = True,
                 dropout: float = 0.):
        super().__init__()
        self.node_dim = node_dim
        self.agg_method, kernel_size = str_split(method, '^')
        self.mean_rho = nn.Parameter(torch.Tensor(1, kernel_size))
        self.mean_theta = nn.Parameter(torch.Tensor(1, kernel_size))
        self.precision_rho = nn.Parameter(torch.Tensor(1, kernel_size))
        self.precision_theta = nn.Parameter(torch.Tensor(1, kernel_size))

        self.conv_layers = nn.ModuleList([
            nn.Linear(node_dim, out_dim // kernel_size, bias=False)
            for _ in range(kernel_size)
        ])
        self.relu_l = nn.ReLU()
        self.use_graph_weights = use_graph_weights
        self.drop_l = nn.Dropout(dropout)
        self.reset_parameters()