Ejemplo n.º 1
0
class GNNp(nn.Module):
    def __init__(self, opt, adj):
        super(GNNp, self).__init__()
        self.opt = opt
        self.adj = adj

        opt_ = dict([('in', opt['num_class']), ('out', opt['hidden_dim'])])
        self.m1 = GraphConvolution(opt_, adj)

        opt_ = dict([('in', opt['hidden_dim']), ('out', opt['num_class'])])
        self.m2 = GraphConvolution(opt_, adj)

        if opt['cuda']:
            self.cuda()

    def reset(self):
        self.m1.reset_parameters()
        self.m2.reset_parameters()

    def forward(self, x):
        x = F.dropout(x, self.opt['input_dropout'], training=self.training)
        x = self.m1(x)
        x = F.relu(x)
        x = F.dropout(x, self.opt['dropout'], training=self.training)
        x = self.m2(x)
        return x
Ejemplo n.º 2
0
class GNN(nn.Module):
    def __init__(self, opt, adj):
        super(GNN, self).__init__()
        self.opt = opt
        self.adj = adj

        opt_ = dict([('in', opt['num_feature']), ('out', opt['hidden_dim'])])
        self.m1 = GraphConvolution(opt_, adj)

        opt_ = dict([('in', opt['hidden_dim']), ('out', opt['num_class'])])
        self.m2 = GraphConvolution(opt_, adj)

        if opt['cuda']:
            self.cuda()

    def reset(self):
        self.m1.reset_parameters()
        self.m2.reset_parameters()

    def forward(self, x):
        x = F.dropout(x, self.opt['input_dropout'], training=self.training)
        x = self.m1(x)
        x = F.relu(x)
        x = F.dropout(x, self.opt['dropout'], training=self.training)
        x = self.m2(x)
        return x

    def forward_partition(self, x, adj_ss):
        x = F.dropout(x, self.opt['input_dropout'], training=self.training)
        x = self.m1(x, adj_ss)
        x = F.relu(x)
        x = F.dropout(x, self.opt['dropout'], training=self.training)
        x = self.m2.forward_partition(x, adj_ss)
        return x

    def forward_mix(self, x, target, target_discrete, idx, opt, mixup_layer):
        layer = random.choice(mixup_layer)
        if layer == 0:
            x, target, idx = get_augmented_network_input(
                self, x, target, target_discrete, idx, opt)
        x = F.dropout(x, self.opt['input_dropout'], training=self.training)
        x = self.m1(x)
        x = F.relu(x)
        if layer == 1:
            x, target, idx = get_augmented_network_input(
                self, x, target, target_discrete, idx, opt)
        x = F.dropout(x, self.opt['dropout'], training=self.training)
        x = self.m2(x)
        return x, target, idx

    def forward_aux(self,
                    x,
                    target=None,
                    train_idx=None,
                    mixup_input=False,
                    mixup_hidden=False,
                    mixup_alpha=0.0,
                    layer_mix=None):

        if mixup_hidden == True or mixup_input == True:
            if mixup_hidden == True:
                layer_mix = random.choice(layer_mix)
            elif mixup_input == True:
                layer_mix = 0

            if layer_mix == 0:
                x, target_a, target_b, lam = mixup_gnn_hidden(
                    x, target, train_idx, mixup_alpha)

            x = F.dropout(x, self.opt['input_dropout'], training=self.training)

            x = self.m1.forward_aux(x)
            x = F.relu(x)
            if layer_mix == 1:
                x, target_a, target_b, lam = mixup_gnn_hidden(
                    x, target, train_idx, mixup_alpha)

            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m2.forward_aux(x)

            return x, target_a, target_b, lam

        else:

            x = F.dropout(x, self.opt['input_dropout'], training=self.training)
            x = self.m1.forward_aux(x)
            x = F.relu(x)
            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m2.forward_aux(x)
            return x
Ejemplo n.º 3
0
class GNN_mix(nn.Module):
    def __init__(self, opt, adj):
        super(GNN_mix, self).__init__()
        self.opt = opt
        self.adj = adj

        opt_ = dict([('in', opt['num_feature']), ('out', 1000)])
        self.m1 = GraphConvolution(opt_, adj)

        #self.linear_m1_1 = nn.Linear(1000,500)
        #self.linear_m1_2 = nn.Linear(50,opt['num_class'] )

        opt_ = dict([('in', 1000), ('out', 500)])
        self.m2 = GraphConvolution(opt_, adj)

        #self.linear_m2_1 = nn.Linear(50,20)
        #self.linear_m2_2 = nn.Linear(20,opt['num_class'] )

        opt_ = dict([('in', 500), ('out', 100)])
        self.m3 = GraphConvolution(opt_, adj)

        #self.linear_m3_1 = nn.Linear(10,5 )
        #self.linear_m3_2 = nn.Linear(5,opt['num_class'] )

        opt_ = dict([('in', 100), ('out', opt['num_class'])])
        self.m4 = GraphConvolution(opt_, adj)
        """
        opt_ = dict([('in', opt['num_feature']), ('out', opt['hidden_dim'])])
        self.m1 = GraphConvolution(opt_, adj)

        opt_ = dict([('in', opt['hidden_dim']), ('out', opt['num_class'])])
        self.m2 = GraphConvolution(opt_, adj)
        """
        if opt['cuda']:
            self.cuda()

    def reset(self):
        self.m1.reset_parameters()
        self.m2.reset_parameters()

    def forward(self,
                x,
                target=None,
                train_idx=None,
                mixup_input=False,
                mixup_hidden=False,
                mixup_alpha=0.0,
                layer_mix=None):
        """    
        #import pdb; pdb.set_trace()
        if target is not None: 
            x, target_a, target_b, lam = mixup_gnn_hidden(x, target, train_idx, mixup_alpha)
        x = F.dropout(x, self.opt['input_dropout'], training=self.training)
        x = self.m1(x)
        x = F.relu(x)
        if target is not None: 
            x, target_a, target_b, lam = mixup_gnn_hidden(x, target, train_idx, mixup_alpha)
        x = F.dropout(x, self.opt['dropout'], training=self.training)
        x = self.m2(x)
        if target is not None:
            return x, target_a, target_b, lam
        else: 
            return x
        """
        #import pdb; pdb.set_trace()
        if mixup_hidden == True or mixup_input == True:
            if mixup_hidden == True:
                layer_mix = random.randint(1, layer_mix)
            elif mixup_input == True:
                layer_mix = 0

            if layer_mix == 0:
                x, target_a, target_b, lam = mixup_gnn_hidden(
                    x, target, train_idx, mixup_alpha)

            x = F.dropout(x, self.opt['input_dropout'], training=self.training)

            x = self.m1(x)
            x = F.relu(x)
            if layer_mix == 1:
                x, target_a, target_b, lam = mixup_gnn_hidden(
                    x, target, train_idx, mixup_alpha)

            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m2(x)
            x = F.relu(x)

            if layer_mix == 2:
                x, target_a, target_b, lam = mixup_gnn_hidden(
                    x, target, train_idx, mixup_alpha)

            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m3(x)
            x = F.relu(x)

            if layer_mix == 3:
                x, target_a, target_b, lam = mixup_gnn_hidden(
                    x, target, train_idx, mixup_alpha)

            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m4(x)

            return x, target_a, target_b, lam
        else:
            x = F.dropout(x, self.opt['input_dropout'], training=self.training)
            x = self.m1(x)
            x = F.relu(x)
            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m2(x)
            x = F.relu(x)
            x = F.dropout(x, self.opt['input_dropout'], training=self.training)
            x = self.m3(x)
            x = F.relu(x)
            x = F.dropout(x, self.opt['dropout'], training=self.training)
            x = self.m4(x)
            return x

    """