def fit(self, X, y=None): print ("fit X: ", X.shape) print ("fit y: ", y.shape if y is not None else '') """ Split train and test set """ train_datas = self.normal_datas[X] valid_datas = np.vstack([np.delete(self.normal_datas,X,axis=0), self.abnormal_datas]) valid_labels = np.hstack([np.zeros(self.normal_datas.shape[0] - X.shape[0]), np.ones(self.abnormal_datas.shape[0])]) """ Rebuild VAE and train """ global global_best_test_score tf.reset_default_graph() # rebuild VAE vae = VAE(**self.vae_params) #cPickle.dump(vae, open(os.path.join(self.save_candidate_folder, 'vae_class.pkl'), 'wb')) vae.build() """ Normalization """ vae.build_normalize(train_data=train_datas) norm_train_datas = vae.transform_raw_data(raw_data=train_datas) norm_valid_datas = vae.transform_raw_data(raw_data=valid_datas) """ Mini Batchs """ mini_batchs = [norm_train_datas[i:min(i + batch_size, len(norm_train_datas))] for i in range(0, len(norm_train_datas), batch_size)] """ Train """ self.best_test_score, _ = train_vae.train(vae=vae, mini_batchs=mini_batchs, valida_data=norm_valid_datas, valida_label=valid_labels, save_out_model=None, n_epoch=30) # self.best_test_score, _ = train_vae.train(vae=vae, mini_batchs=mini_batchs, test_data=norm_valid_datas, # test_label=valid_labels,save_out_model=os.path.join(self.save_candidate_folder, 'vae_tensor.ckpt')) """ Save result """ print ("Perform training with the below parameters: ") print ("------------------------------------------- ") print (json.dumps(self.vae_params,indent=2)) print ("------------------------------------------- ") print ("Result (F1): ", self.best_test_score)
""" Prepare data """ datas = np.vstack([normal_datas, bearing_datas, gear_datas]) labels = np.hstack([np.zeros(normal_datas.shape[0]), # 0 for inlier, 1 for outlier np.ones(bearing_datas.shape[0]), np.ones(gear_datas.shape[0])]) train_datas, test_datas, train_labels, test_labels = utils.split_train_test(datas=datas, labels=labels, frac=0.8) """ Mini-batchs & perform MinMaxScaler """ vae.build_normalize(train_data=train_datas) # 1 norm_datas = vae.transform_raw_data(raw_data=train_datas) test_norm_datas = vae.transform_raw_data(raw_data=test_datas) mini_batchs = [norm_datas[i:min(i + batch_size, len(norm_datas))] for i in range(0, len(norm_datas), batch_size)] """ Train """ save_out_model = os.path.join(save_vae_hyper_folder, 'vae_tensor.ckpt') _, threshold = train_vae.train(vae=vae, mini_batchs=mini_batchs, valida_data=test_norm_datas, valida_label=test_labels, save_out_model=save_out_model, n_epoch=200) # 1 """ Testing
init_keep_prob = 0.8 vae = VAE(input_dim=input_dim, enc_hid_dim=enc_hid_dim, n_enc_layer=n_enc_layer, latent_dim=latent_dim, dec_hid_dim=dec_hid_dim, n_dec_layer=n_dec_layer, init_lr=init_lr, n_sample=n_sample, beta=beta, use_batch_norm=use_batch_norm,init_keep_prob=init_keep_prob) # save class instance by using cPickle, main purpose is to save parameters too. cPickle.dump(vae,open(os.path.join(save_out_folder,'vae_class.pkl'),'wb')) vae.build() """ Mini-batchs & perform MinMaxScaler """ vae.build_normalize(train_data=train_datas) #1 norm_datas = vae.transform_raw_data(raw_data=train_datas) valida_norm_datas = vae.transform_raw_data(raw_data=valida_datas) mini_batchs = [norm_datas[i:min(i + batch_size, len(norm_datas))] for i in range(0, len(norm_datas), batch_size)] # vae.build_normalize(train_data=datas) #2 # norm_datas = vae.transform_raw_data(raw_data=datas) # # mini_batchs = [norm_datas[i:min(i + batch_size, len(norm_datas))] for i in # range(0, len(norm_datas), batch_size)] """ Training """ save_out_model = os.path.join(save_out_folder,'vae_tensor.ckpt')