parameters['num_layers'] = 3 parameters['iterations'] = 2 parameters['batch_size'] = 128 parameters['module_name'] = 'gru' # Other options: 'lstm' or 'lstmLN' parameters['z_dim'] = len(dataX[0][0,:]) #%% Experiments # Output Initialization Discriminative_Score = list() Predictive_Score = list() # Each Iteration for it in range(Iteration): # Synthetic Data Generation dataX_hat = tgan(dataX,dataXs, parameters) print('Finish Synthetic Data Generation') #%% Performance Metrics # 1. Discriminative Score Acc = list() for tt in range(Sub_Iteration): Temp_Disc = discriminative_score_metrics (dataX, dataX_hat) Acc.append(Temp_Disc) Discriminative_Score.append(np.mean(Acc)) print('generated discriminative score') # 2. Predictive Performance MAE_All = list()
def fit(self, filename, logger=''): self.dataX_hat = tgan(self.dataX, self.parameters, self.noise_generator, logger, filename) print('Finish Synthetic Data Generation')