Пример #1
0
 def _check_and_download_file(uri: str, basename: str, *paths: str) -> str:
     """Check and download the file from given URI."""
     dir_path = os.path.join(*paths)
     file_path = os.path.join(dir_path, basename)
     if not os.path.isdir(dir_path):
         make_directory(dir_path)
     if not os.path.isfile(file_path):
         logger.info("Could not find %s. Downloading it now...", basename)
         get_file(basename, uri, path=dir_path)
     return file_path
def save_classifier(classifier, file_path="./model/"):
    """Saves classifier in the given location

    :param classifier: Model to save
    :param str file_path: Path to file
    """
    make_directory(file_path.rsplit('/', 1)[0])
    # Save classifier params
    with open(os.path.join(file_path, 'params.json'), 'w') as fp:
        params = {
            "class_name": type(classifier).__name__,
            "defences": classifier.defences
        }
        json.dump(params, fp)

    # Serialize model to JSON
    with open(os.path.join(file_path, "model.json"), "w") as json_file:
        model_json = classifier.model.to_json()
        json_file.write(model_json)

    # Serialize weights to HDF5
    classifier.model.save_weights(os.path.join(file_path, "weights.h5"))

    # Save compilation params to json
    if classifier.comp_param:
        with open(os.path.join(file_path, 'comp_par.json'), 'w') as fp:
            try:
                json.dump(classifier.comp_param, fp)
            except:
                fp.seek(0)
                json.dump(
                    {
                        "loss": 'categorical_crossentropy',
                        "optimizer": "sgd",
                        "metrics": ['accuracy']
                    }, fp)
                fp.truncate()
Пример #3
0
v_print = get_verbose_print(args.verbose)
alpha = 0.05  # constant for random perturbation

# get dataset
(X_train, Y_train), (X_test, Y_test), min_, max_ = load_dataset(args.dataset)

session = tf.Session()
k.set_session(session)

# Load classification model
MODEL_PATH = os.path.join(os.path.abspath(args.load), "")
classifier = load_classifier(MODEL_PATH, "best-weights.h5")

if args.save:
    SAVE_ADV = os.path.join(os.path.abspath(args.save), args.adv_method)
    make_directory(SAVE_ADV)

    with open(os.path.join(SAVE_ADV, "readme.txt"), "w") as wfile:
        wfile.write("Model used for crafting the adversarial examples is in " +
                    MODEL_PATH)

    v_print("Adversarials crafted with", args.adv_method, "on", MODEL_PATH,
            "will be saved in", SAVE_ADV)

if args.adv_method in ['fgsm', "vat", "rnd_fgsm"]:

    eps_ranges = {
        'fgsm': [e / 10 for e in range(1, 11)],
        'rnd_fgsm': [e / 10 for e in range(1, 11)],
        'vat': [1.5, 2.1, 5, 7, 10]
    }
 def setUp(self):
     make_directory("./tests/")
Пример #5
0
classifier.compile(comp_params)

if args.save is not False:
    if args.save:
        MODEL_PATH = os.path.abspath(args.save)
    else:
        if args.defences:
            defences = "-".join(args.defences)
        else:
            defences = ""
        MODEL_PATH = os.path.join(os.path.abspath(DATA_PATH), "classifiers",
                                  args.dataset, args.classifier, args.act,
                                  defences)

    v_print("Classifier saved in", MODEL_PATH)
    make_directory(MODEL_PATH)

    # Save best classifier weights
    # checkpoint = ModelCheckpoint(os.path.join(FILEPATH,"best-weights.{epoch:02d}-{val_acc:.2f}.h5"),
    #                              monitor='val_acc', verbose=1, save_best_only=True, mode='max')
    checkpoint = ModelCheckpoint(os.path.join(MODEL_PATH, "best-weights.h5"),
                                 monitor='val_acc',
                                 verbose=1,
                                 save_best_only=True,
                                 mode='max')

    # Remote monitor
    monitor = TensorBoard(log_dir=os.path.join(MODEL_PATH, 'logs'),
                          write_graph=False)
    callbacks_list = [checkpoint, monitor]
else: