def get_estimator(epochs=10, batch_size=50, epsilon=0.04, save_dir=tempfile.mkdtemp()): train_data, eval_data = cifar10.load_data() test_data = eval_data.split(0.5) pipeline = fe.Pipeline( train_data=train_data, eval_data=eval_data, test_data=test_data, batch_size=batch_size, ops=[Normalize(inputs="x", outputs="x", mean=(0.4914, 0.4822, 0.4465), std=(0.2471, 0.2435, 0.2616))]) clean_model = fe.build(model_fn=lambda: LeNet(input_shape=(32, 32, 3)), optimizer_fn="adam", model_name="clean_model") clean_network = fe.Network(ops=[ Watch(inputs="x"), ModelOp(model=clean_model, inputs="x", outputs="y_pred"), CrossEntropy(inputs=("y_pred", "y"), outputs="base_ce"), FGSM(data="x", loss="base_ce", outputs="x_adverse", epsilon=epsilon, mode="!train"), ModelOp(model=clean_model, inputs="x_adverse", outputs="y_pred_adv", mode="!train"), UpdateOp(model=clean_model, loss_name="base_ce") ]) clean_traces = [ Accuracy(true_key="y", pred_key="y_pred", output_name="clean_accuracy"), Accuracy(true_key="y", pred_key="y_pred_adv", output_name="adversarial_accuracy"), BestModelSaver(model=clean_model, save_dir=save_dir, metric="base_ce", save_best_mode="min"), ] clean_estimator = fe.Estimator(pipeline=pipeline, network=clean_network, epochs=epochs, traces=clean_traces, log_steps=1000) return clean_estimator
def get_estimator(epochs=10, batch_size=32, train_steps_per_epoch=None): train_data, eval_data = cifair10.load_data() pipeline = fe.Pipeline(train_data=train_data, eval_data=eval_data, batch_size=batch_size, ops=[Normalize(inputs="x", outputs="x")]) # step 2 model = fe.build(model_fn=lambda: LeNet(input_shape=(32, 32, 3)), optimizer_fn="adam") network = fe.Network(ops=[ ModelOp(model=model, inputs="x", outputs="y_pred"), CrossEntropy(inputs=("y_pred", "y"), outputs="ce"), UpdateOp(model=model, loss_name="ce") ]) # step 3 traces = [ Accuracy(true_key="y", pred_key="y_pred"), ] estimator = fe.Estimator(pipeline=pipeline, network=network, epochs=epochs, traces=traces, train_steps_per_epoch=train_steps_per_epoch) return estimator
def get_estimator(epochs=10, batch_size=32, extend_ds=False): train_data, eval_data = cifair10.load_data() if extend_ds: train_data = ExtendDataset(dataset=train_data, spoof_length=len(train_data) * 2) epochs //= 2 pipeline = fe.Pipeline(train_data=train_data, eval_data=eval_data, batch_size=batch_size, ops=[Normalize(inputs="x", outputs="x")]) # step 2 model = fe.build(model_fn=lambda: LeNet(input_shape=(32, 32, 3)), optimizer_fn="adam") network = fe.Network(ops=[ ModelOp(model=model, inputs="x", outputs="y_pred"), CrossEntropy(inputs=("y_pred", "y"), outputs="ce"), UpdateOp(model=model, loss_name="ce") ]) # step 3 traces = [ Accuracy(true_key="y", pred_key="y_pred"), ] estimator = fe.Estimator(pipeline=pipeline, network=network, epochs=epochs, traces=traces) return estimator
def test_saliency(self): fe.estimator.enable_deterministic(200) label_mapping = { 'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3, 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9 } batch_size = 32 train_data, eval_data = cifar10.load_data() pipeline = fe.Pipeline(test_data=train_data, batch_size=batch_size, ops=[Normalize(inputs="x", outputs="x")], num_process=0) weight_path = os.path.abspath( os.path.join(__file__, "..", "resources", "lenet_cifar10_tf.h5")) model = fe.build(model_fn=lambda: LeNet(input_shape=(32, 32, 3)), optimizer_fn="adam", weights_path=weight_path) network = fe.Network( ops=[ModelOp(model=model, inputs="x", outputs="y_pred")]) save_dir = tempfile.mkdtemp() traces = [ Saliency(model=model, model_inputs="x", class_key="y", model_outputs="y_pred", samples=5, label_mapping=label_mapping), ImageSaver(inputs="saliency", save_dir=save_dir) ] estimator = fe.Estimator(pipeline=pipeline, network=network, epochs=5, traces=traces, log_steps=1000) estimator.test() ans_img_path = os.path.abspath( os.path.join(__file__, "..", "resources", "saliency_figure.png")) ans_img = img_to_rgb_array(ans_img_path) output_img_path = os.path.join(save_dir, "saliency_test_epoch_5.png") output_img = img_to_rgb_array(output_img_path) self.assertTrue(check_img_similar(output_img, ans_img))
def get_estimator(epsilon=0.04, epochs=10, batch_size=32, max_train_steps_per_epoch=None, max_eval_steps_per_epoch=None, save_dir=tempfile.mkdtemp()): # step 1 train_data, eval_data = cifar10.load_data() test_data = eval_data.split(0.5) pipeline = fe.Pipeline(train_data=train_data, eval_data=eval_data, test_data=test_data, batch_size=batch_size, ops=[ Normalize(inputs="x", outputs="x", mean=(0.4914, 0.4822, 0.4465), std=(0.2471, 0.2435, 0.2616)) ]) # step 2 model = fe.build(model_fn=lambda: LeNet(input_shape=(32, 32, 3)), optimizer_fn="adam") network = fe.Network(ops=[ Watch(inputs="x"), ModelOp(model=model, inputs="x", outputs="y_pred"), CrossEntropy(inputs=("y_pred", "y"), outputs="base_ce"), FGSM(data="x", loss="base_ce", outputs="x_adverse", epsilon=epsilon), ModelOp(model=model, inputs="x_adverse", outputs="y_pred_adv"), CrossEntropy(inputs=("y_pred_adv", "y"), outputs="adv_ce"), Average(inputs=("base_ce", "adv_ce"), outputs="avg_ce"), UpdateOp(model=model, loss_name="avg_ce") ]) # step 3 traces = [ Accuracy(true_key="y", pred_key="y_pred", output_name="base accuracy"), Accuracy(true_key="y", pred_key="y_pred_adv", output_name="adversarial accuracy"), BestModelSaver(model=model, save_dir=save_dir, metric="adv_ce", save_best_mode="min"), ] estimator = fe.Estimator( pipeline=pipeline, network=network, epochs=epochs, traces=traces, max_train_steps_per_epoch=max_train_steps_per_epoch, max_eval_steps_per_epoch=max_eval_steps_per_epoch, monitor_names=["base_ce", "adv_ce"]) return estimator
def build_model(self): """ Define the FastEstimator model architecture. Args: None Returns: model: Union[tf.keras.sequential, nn.module] """ model = fe.build(model_fn=lambda: LeNet(input_shape=(32, 32, 3)), optimizer_fn="adam", model_name="adv_model") return model
def get_model(): tf.keras.backend.clear_session( ) # to avoid layer names mismatching return LeNet(input_shape=(32, 32, 3))
def test_lenet_class(self): data = np.ones((1, 28, 28, 1)) input_data = tf.constant(data) lenet = LeNet(classes=5) output_shape = lenet(input_data).numpy().shape self.assertEqual(output_shape, (1, 5))