def compute_view_1(self, X): """ compute network output of view 1 """ X = X.copy() # # normalize data # if self.prepare_view_1 is not None: # X = self.prepare_view_1(X) # return self.compute_v1_latent(X, self.dummy_in_v2) dummy_in_v2 = np.repeat(self.dummy_in_v2, X.shape[0], axis=0) return batch_compute2(X, dummy_in_v2, self.compute_v1_latent, batch_size=min(10, X.shape[0]), prepare1=self.prepare_view_1)
def compute_view_2(self, Z): """ compute network output of view 2 """ Z = Z.copy() # # normalize data # if self.prepare_view_2 is not None: # Z = self.prepare_view_2(Z) # return self.compute_v2_latent(self.dummy_in_v1, Z) dummy_in_v1 = np.repeat(self.dummy_in_v1, Z.shape[0], axis=0) return batch_compute2(dummy_in_v1, Z, self.compute_v2_latent, batch_size=min(10, Z.shape[0]), prepare2=self.prepare_view_2)
l_v1latent, deterministic=True)) compute_v2_latent = theano.function(inputs=input_2, outputs=lasagne.layers.get_output( l_v2latent, deterministic=True)) print("Evaluating on test set...") # compute output on test set eval_set = 'test' n_test = args.n_test if args.n_test is not None else data[eval_set].shape[0] X1, X2 = data[eval_set][0:n_test] print("Computing embedding ...") lv1_latent = batch_compute2(X1, X2, compute_v1_latent, np.min([100, n_test]), prepare=model.prepare) lv2_latent = batch_compute2(X1, X2, compute_v2_latent, np.min([100, n_test]), prepare=model.prepare) if args.V2_to_V1: lv1_latent, lv2_latent = flip_variables(lv1_latent, lv2_latent) # reset n_test n_test = lv1_latent.shape[0] # evaluate retrieval result