D = mahalanobis(xx, yy, IV) except: D = 0.0 results.append(D) return results if __name__ == '__main__': metaCollector = MetaFeaturesCollector(16, 64) metaCollector.train(f"../processed_data/processed_16_64_2/") lambdas = LambdaFeaturesCollector(16, 64) loader = get_loader(f"../processed_data/test/", 16, 64, 2, metaCollector, lambdas, 100, 5, train_meta=False) generator = Generator(16, 64, 2, metaCollector.getLength(), 100) generator.load_state_dict(torch.load(f'./models/generator-16_64_2-75.pkl')) generator.eval() generator.cuda() results = [] for i, data in enumerate(loader): print(i) metas = to_variable(data[1]) batch_size = data[0].size(0) noise = torch.randn(batch_size, 100) noise = noise.view((noise.size(0), noise.size(1), 1, 1))
from scipy.spatial.distance import mahalanobis V = np.cov(np.array([x_meta, y_meta]).T) V[np.diag_indices_from(V)] += 0.1 IV = np.linalg.inv(V) D = mahalanobis(x_meta, y_meta, IV) return D if __name__ == '__main__': datasize = 64 z_size = 100 batch_size = 1 workers = 5 lambdas = LambdaFeaturesCollector(16, 64) metas = MetaFeaturesCollector(16, 64) dataloader = get_loader(f"../processed_data/processed_16_64_2/", 16, 64, 2, metas, lambdas, batch_size, workers) datatest = get_loader(f"../processed_data/test/", 16, 64, 2, metas, lambdas, batch_size, workers, train_meta=False) meta_list = [] lambdas_list = [] for i, (data, meta, lambda_l) in tqdm(enumerate(dataloader)): meta_o = meta[:, :].numpy() meta_o = meta_o.ravel()
def __init__(self, num_epochs: int = 20, cuda: bool = True, continue_from: int = 0): self.features = 16 self.instances = 64 self.classes = 2 self.z_size = 100 self.batch_size = 100 self.workers = 4 self.num_epochs = num_epochs self.cuda = cuda self.log_step = 10 self.log_step_print = 50 self.save_period = 5 self.continue_from = continue_from self.models_path = "./cnn1206" self.lambdas = LambdaFeaturesCollector(self.features, self.instances) self.metas = MetaZerosCollector(self.features, self.instances) self.data_loader = get_loader( f"../processed_data/processed_{self.features}_{self.instances}_{self.classes}/", self.features, self.instances, self.classes, self.metas, self.lambdas, self.batch_size, self.workers) self.test_loader = get_loader(f"../processed_data/test/", 16, 64, 2, self.metas, self.lambdas, 147, self.workers, train_meta=False) if continue_from == 0: self.discriminator = Discriminator(self.features, self.instances, self.classes, self.metas.getLength(), self.lambdas.getLength()) else: self.discriminator = Discriminator(self.features, self.instances, self.classes, self.metas.getLength(), self.lambdas.getLength()) self.discriminator.load_state_dict( torch.load( f'{self.models_path}/discriminator-{self.features}_{self.instances}_{self.classes}-{continue_from}.pkl' )) self.discriminator.eval() if self.cuda: self.discriminator.cuda() self.lr = 0.0002 self.beta1 = 0.5 self.beta2 = 0.999 self.d_optimizer = optim.Adam(self.discriminator.parameters(), self.lr, [self.beta1, self.beta2]) self.cross_entropy = BCEWithLogitsLoss() if self.cuda: self.cross_entropy.cuda() self.mse = MSELoss() if self.cuda: self.mse.cuda()