コード例 #1
0
ファイル: gat.py プロジェクト: freebird3366/GraphGallery
    def __init__(self,
                 in_channels,
                 out_channels,
                 hids=[8],
                 num_heads=[8],
                 acts=['elu'],
                 dropout=0.6,
                 weight_decay=5e-4,
                 lr=0.01,
                 use_bias=True):

        super().__init__()

        layers = ModuleList()
        act_fns = []
        paras = []

        inc = in_channels
        pre_head = 1
        for hid, num_head, act in zip(hids, num_heads, acts):
            layer = GATConv(inc * pre_head,
                            hid,
                            heads=num_head,
                            bias=use_bias,
                            dropout=dropout)
            layers.append(layer)
            act_fns.append(get_activation(act))
            paras.append(
                dict(params=layer.parameters(), weight_decay=weight_decay))
            inc = hid
            pre_head = num_head

        layer = GATConv(inc * pre_head,
                        out_channels,
                        heads=1,
                        bias=use_bias,
                        concat=False,
                        dropout=dropout)
        layers.append(layer)
        # do not use weight_decay in the final layer
        paras.append(dict(params=layer.parameters(), weight_decay=0.))

        self.act_fns = act_fns
        self.layers = layers
        self.dropout = Dropout(dropout)
        self.compile(loss=torch.nn.CrossEntropyLoss(),
                     optimizer=optim.Adam(paras, lr=lr),
                     metrics=[Accuracy()])
コード例 #2
0
class NASOP(nn.Module):
    def __init__(self, in_features, out_features, aggregation, attention,
                 **kwargs):
        super().__init__()
        assert attention in ("constant", "gcn", "gat")
        if attention == "constant":
            self.op = ConstantConv(in_features, out_features)
        elif attention == "gcn":
            self.op = GCNConv(in_features, out_features)
        else:
            self.op = GATConv(in_features,
                              out_features,
                              dropout=config.DROPOUT)
        assert aggregation in ("add", "mean", "max")
        self.op.aggr = aggregation

    def forward(self, x, edge_index):
        if config.DROPOUT > 0:
            x = F.dropout(x, p=config.DROPOUT)
        x = self.op(x, edge_index)
        return x

    def quantize(self, int_bits, frac_bits):
        for p in self.op.parameters():
            p.data.copy_(quantize(p.data, int_bits, frac_bits))
コード例 #3
0
ファイル: old_model.py プロジェクト: qinglu0330/FGNAS
class Attention(nn.Module):
    def __init__(self,
                 attention_type,
                 aggregation_type,
                 in_features,
                 out_features,
                 heads=1,
                 concat=True):
        super().__init__()
        assert attention_type in ("constant", "gcn", "gat")
        if attention_type == "constant":
            self.op = ConstantConv(in_features, out_features)
        elif attention_type == "gcn":
            self.op = GCNConv(in_features, out_features)
        else:
            self.op = GATConv(in_features,
                              out_features,
                              heads=heads,
                              concat=concat)
        assert aggregation_type in ("add", "mean", "max")
        self.op.aggr = aggregation_type

        self.heads = heads
        self.concat = concat

    def forward(self, x, edge_index):
        x = self.op(x, edge_index)
        if isinstance(self.op, GATConv):
            return x
        if self.concat is True:
            return x.repeat(1, self.heads)
        else:
            return x

    def quantize(self, int_bits=None, frac_bits=None):
        for params in self.op.parameters():
            params.data.copy_(quantize(params.data, int_bits, frac_bits))