def main(): md = GtsrbModel(source_dir='GTSRB') # _model0 = (0, "models/gtsrbreplace_worst_of_10_model_False.hdf5") _model0 = (0, "models/gtsrb.oxford.original_model.hdf5") # _model0 = (0, "models/gtsrbga_loss_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model = md.load_model(_model0[0], _model0[1]) # index = 50, 10, 20, 30 index = 10578 save_img(x_original_test[index], "cur_image") img = copy.copy(x_original_test[index]) label = y_original_test[index] x, y = generate_image(img, label) temp_x = copy.copy(x) value = generate_value(model, md.preprocess_original_imgs(temp_x), y) # print value xig, yig, zig = prepare_data(value) plot(xig, yig, zig, "Loss") # value = generate_cov2(model, md.preprocess_original_imgs(x_original_test), md.preprocess_original_imgs(x), # y, y_original_test) # org_imgs = [] # for i in range(len(y)): # temp_img = copy.copy(img) # org_imgs.append(temp_img) # value = generate_cov(model, md.preprocess_original_imgs(org_imgs), md.preprocess_original_imgs(x), y) # # xig, yig, zig = prepare_data(value) # plot(xig, yig, zig, "Cov") plt.show()
def main(): print cross_entropy(np.array([[0.4, 0.6], [0.6, 0.4]]), np.array([[1, 0], [0, 1]])) for i in range(1): md = GtsrbModel(source_dir='GTSRB') if i == 0: _model0 = (0, "models/gtsrbga_loss_model_False.hdf5") else: _model0 = (0, "models/gtsrbga_loss_model_False_q2.hdf5") #_model0 = (0, "models/gtsrbreplace_worst_of_10_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model = md.load_model(_model0[0], _model0[1]) #if i==0: for i in range(10): x_part = x_original_test[0:64] y_part = y_original_test[0:64] #else: # x_part = x_original_test[1:128] # y_part = y_original_test[1:128] performance(model, md, x_part, y_part) x_part = x_original_test[640:656] y_part = y_original_test[640:656] performance(model, md, x_part, y_part) x_part = x_original_test[640:2440] y_part = y_original_test[640:2440] performance(model, md, x_part, y_part)
def main(): md = GtsrbModel(source_dir='GTSRB') # _model0 = (0, "models/gtsrbga_loss_model_False.hdf5") _model0 = (0, "models/gtsrbga_loss_model_False_q2.hdf5") # md = Cifar10Model() # _model0 = (0, "models/cifar10replace_worst_of_10_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model = md.load_model(_model0[0], _model0[1]) x_part = x_original_test[1:100] y_part = y_original_test[1:100] generate_misclassified_perturbations(model, md, x_part, y_part)
def main2(): md = GtsrbModel(source_dir='GTSRB') _model0 = (0, "models/gtsrbga_loss_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model0 = md.load_model(_model0[0], _model0[1]) x_part = x_original_test[0:1000] y_part = y_original_test[0:1000] au = Augmenter() temp_x_original_train = copy.deepcopy(x_part) x_10, y_train = au.worst_of_10(temp_x_original_train, y_part) select_worst_cov(md, model0, md.preprocess_original_imgs(temp_x_original_train), x_10, y_train)
def main(): md = GtsrbModel(source_dir='GTSRB') # _model0 = (0, "models/gtsrb.oxford.original_model.hdf5") _model0 = (0, "models/gtsrbreplace_worst_of_10_model_False.hdf5") # md = Cifar10Model() # _model0 = (0, "models/cifar10replace_worst_of_10_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model = md.load_model(_model0[0], _model0[1]) x_part = x_original_test y_part = y_original_test # x_part, y_part = rule_out_misclassified_images(model, md, x_part, y_part) generate_misclassified_perturbations(model, md, x_part, y_part) ground_truth(x_part, y_part)
def main(): md = GtsrbModel(source_dir='GTSRB') _model0 = (0, "models/gtsrbga_loss_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model0 = md.load_model(_model0[0], _model0[1]) x_part = x_original_test[0:2000] y_part = y_original_test[0:2000] au = Augmenter() temp_x_original_train = copy.deepcopy(x_part) x_part2, y_part = au.random_replace(temp_x_original_train, y_part) x_part = md.preprocess_original_imgs(x_part) x_part2 = md.preprocess_original_imgs(x_part2) function_test(model0, x_part, x_part2, y_part)
def main3(): md = GtsrbModel(source_dir='GTSRB') _model0 = (0, "models/gtsrbreplace_worst_of_10_cov_model_False.hdf5") x_original_test, y_original_test = md.load_original_test_data() model0 = md.load_model(_model0[0], _model0[1]) x_part = x_original_test[0:32] y_part = y_original_test[0:32] au = Augmenter() start_time = time.time() temp_x_original_train = copy.deepcopy(x_part) x_aug, y_train = au.worst_of_10(temp_x_original_train, y_part) print("augmentation done") print("--- %s seconds ---" % (time.time() - start_time)) memory_test(md, model0, x_part, x_aug) print("--- %s seconds ---" % (time.time() - start_time)) gc.collect()
# target dataset dataset = args.dataset if dataset not in DATASET.list(): print("unsupported dataset, please use --help to find supported ones") exit(1) config = ExperimentalConfig.gen_config() config.enable_filters = args.enable_filter config.enable_optimize = args.enable_optimize model_index = int(args.model) # initialize dataset dat = DATASET.get_name(dataset) if dat.value == DATASET.gtsrb.value: target0 = GtsrbModel(source_dir='GTSRB') elif dat.value == DATASET.cifar10.value: target0 = Cifar10Model() elif dat.value == DATASET.fashionmnist.value: target0 = FashionMnist() elif dat.value == DATASET.svhn.value: target0 = SVHN() config.brightness_range = [0] config.contrast_range = [1] elif dat.value == DATASET.imdb.value: target0 = IMDBModel("dataset") elif dat.value == DATASET.utk.value: target0 = UTKModel("dataset") elif dat.value == DATASET.kvasir.value: target0 = KvasirModel()
return coverage_diff def generate_layer_output(model, input_data): layer_names = [layer.name for layer in model.layers if 'flatten' not in layer.name and 'input' not in layer.name] intermediate_layer_model = Model(inputs=model.input, outputs=[model.get_layer(layer_name).output for layer_name in layer_names]) intermediate_layer_outputs = intermediate_layer_model.predict(input_data) return intermediate_layer_outputs """ if __name__ == '__main__': md = GtsrbModel(source_dir='GTSRB') # Load data x_original_test, y_original_test = md.load_original_test_data() # x_original_test = x_original_test[0:10] # y_original_test = y_original_test[0:10] x_original_train, y_original_train = md.load_original_data('train') # perturb data temp_x_test = copy.deepcopy(x_original_test) pt = Perturbator() x_perturbed_test, y_perturbed_test = pt.random_perturb( temp_x_test, y_original_test) # Load model
config.queue_len = int(0.4 * args.queue) if config.queue_len < 2: config.queue_len = 2 config.popsize = args.queue - config.queue_len config.enable_filters = args.enable_filter config.enable_optimize = args.enable_optimize start_point = args.start_point epoch = args.epoch model_index = int(args.model) config.robust_threshold = 0.1**int(args.threshold) # initialize dataset dat = DATASET.get_name(dataset) if dat.value == DATASET.gtsrb.value: target0 = GtsrbModel('GTSRB', start_point, epoch) elif dat.value == DATASET.cifar10.value: target0 = Cifar10Model(start_point, epoch) elif dat.value == DATASET.fashionmnist.value: target0 = FashionMnist(start_point, epoch) elif dat.value == DATASET.svhn.value: target0 = SVHN("data", start_point, epoch) config.brightness_range = [0] config.contrast_range = [1] elif dat.value == DATASET.imdb.value: target0 = IMDBModel("dataset", start_point, epoch) elif dat.value == DATASET.utk.value: target0 = UTKModel("dataset", start_point, epoch) elif dat.value == DATASET.kvasir.value: target0 = KvasirModel("kvasir-dataset", start_point, epoch) else: