def detect(self): self.type.detector() type1_result = self.type.type1_pairs type2_result = self.type.similar_pairs utility.write_result("./type1.dup", type1_result) utility.write_result("./type2.dup", type2_result)
else: data_files, add_to_name, data_config = common_code() algo_name = algo_core+algo_type+"MultiStep"+str(multistep) +"Window"+\ str(window_size)+anomalyScore_func+anomalyScore_type+add_to_name model = predictionLstmStepAhead(input_shape, multistep) result_files = use_whole_data(data_files, input_shape, train_nStepPrediction_based_models_new, model, nStepAhead=multistep, nb_epoch=nb_epoch, anomaly_score=anomalyScore_type) print(algo_name) write_result(algorithm_name=algo_name, data_files=result_files, results_path=cwd + '/results') store_param(window_size, nb_epoch, input_shape, algo_core, algo_type, algo_name, model, normalized_input, anomalyScore_func, anomalyScore_type, multistep) #for i in range(len(df)): # a.append(al.anomalyProbability(df.value.values[i],df.anomaly_score.values[i],df.timestamp.values[i])) # 1- Params of model # 2- Params of training # 3- Get model # 4- Get type of training # 5- Train and get result # 6- Write output and params
from keras.layers import Conv1D, Flatten, Dropout, Dense from utility import read_data, train_autoencoder_based_models, use_whole_data, write_result, common_code_normalized from models import autoencoderLstm import os import pickle from datetime import datetime now = datetime.now() cwd = os.getcwd() path = cwd + "/data" data_files = read_data(path) window_size = 10 nb_epoch = 20 nb_features = 1 input_shape = (window_size, nb_features) model = autoencoderLstm(input_shape) data_files, add_to_name, data_config = common_code_normalized() result_files = data_files result_files = use_whole_data(data_files, input_shape, train_autoencoder_based_models, model) write_result(algorithm_name='autoencoderLstm', data_files=result_files, results_path=cwd + '/results') algo_name = "autoencoderLstmOneEpoch{}{}{}{}".format(now.month, now.day, now.hour, now.minute) with open("dump/" + algo_name + ".obj", 'wb') as f: pickle.dump(result_files, f) write_result(algorithm_name=algo_name, data_files=result_files, results_path=cwd + '/results')
def print_TP(self): utility.write_result('true_positive.result', list(self.TP))
def print_FP(self): print "False Positive pairs: %d" % len(self.FP) utility.write_result('false_positive.result', list(self.FP))
def detect_type3(self): self.type.detector_type3() type3_result = self.type.similar_pairs utility.write_result("./type3.dup", type3_result)