Пример #1
0
    def _loss(self):
        self.loss += softmax_cross_entropy(self.outputs, self.labels, 
            self.class_weights, self.use_class_weights)

        # alpha = 0.001

        # for var in self.vars:
        # 	self.loss += alpha * tf.nn.l2_loss(self.vars[var])

        tf.summary.scalar('loss', self.loss)
Пример #2
0
    def forward(self, u, v, r_matrix):

        u_z, v_z = self.gcl1(self.u_features, self.v_features,
                             range(self.num_users), range(self.num_items), r_matrix)
        u_z, v_z = self.gcl2(u_z, v_z, u, v, r_matrix)

        u_f = torch.relu(self.denseu1(self.u_features_side[u]))
        v_f = torch.relu(self.densev1(self.v_features_side[v]))

        u_h = self.denseu2(F.dropout(torch.cat((u_z, u_f), 1), self.dropout))
        v_h = self.densev2(F.dropout(torch.cat((v_z, v_f), 1), self.dropout))

        output, m_hat = self.bilin_dec(u_h, v_h, u, v)

        r_mx = r_matrix.index_select(1, u).index_select(2, v)
        loss = softmax_cross_entropy(output, r_mx.float())
        rmse_loss = rmse(m_hat, r_mx.float())

        return output, loss, rmse_loss
Пример #3
0
    def _loss(self):
        self.loss += softmax_cross_entropy(self.outputs, self.labels, 
            self.class_weights, self.use_class_weights)

        tf.summary.scalar('loss', self.loss)
    def _loss(self):
        self.loss += softmax_cross_entropy(self.outputs, self.labels)

        tf.summary.scalar('loss', self.loss)