Ejemplo n.º 1
0
def get_ge(net_name, model_parameters, load_parameters):
    args = util.EmptySpace()
    for key, value in load_parameters.items():
        setattr(args, key, value)
    folder = "/media/rico/Data/TU/thesis/runs{}/{}".format(
        args.experiment, util.generate_folder_name(args))

    ge_x, ge_y = [], []
    lta, lva, ltl, lvl = [], [], [], []
    for run in runs:
        filename = '{}/model_r{}_{}'.format(
            folder, run, get_save_name(net_name, model_parameters))
        ge_path = '{}.exp'.format(filename)

        y_r = util.load_csv(ge_path, delimiter=' ', dtype=np.float)
        x_r = range(len(y_r))
        ge_x.append(x_r)
        ge_y.append(y_r)

        if show_losses or show_acc:
            ta, va, tl, vl = util.load_loss_acc(filename)
            lta.append(ta)
            lva.append(va)
            ltl.append(tl)
            lvl.append(vl)

    return ge_x, ge_y, (lta, lva, ltl, lvl)
Ejemplo n.º 2
0
def get_ranks(args):
    # Load the data and make it global
    global x_attack, y_attack, dk_plain, key_guesses
    x_attack, y_attack, key_guesses, real_key, dk_plain = load_data(args)

    model_params = {}
    map_accuracy = {}

    folder = "{}/{}/".format(args.models_path, generate_folder_name(args))
    model_params.update({"max_pool": args.max_pool})
    for channel_size in args.channels:
        model_params.update({"channel_size": channel_size})
        for layers in args.layers:
            model_params.update({"num_layers":  layers})
            for kernel in args.kernels:
                model_params.update({"kernel_size": kernel})

                # Calculate the accuracy
                mean_acc = 0.0
                no_data = False
                for run in range(args.runs):
                    model_path = '{}/model_r{}_{}.pt'.format(
                        folder,
                        run,
                        get_save_name(args.network_name, model_params))
                    if not os.path.exists(model_path):
                        print(util.BColors.WARNING + f"Path {model_path} does not exists" + util.BColors.ENDC)
                        no_data = True
                        break
                    print('path={}'.format(model_path))

                    model = load_model(args.network_name, model_path)
                    model.eval()
                    print("Using {}".format(model))
                    model.to(args.device)

                    # Calculate predictions
                    if require_domain_knowledge(args.network_name):
                        _, acc = accuracy2(model, x_attack, y_attack, dk_plain)
                    else:
                        _, acc = accuracy2(model, x_attack, y_attack, None)
                    print('Accuracy: {} - {}%'.format(acc, acc * 100))
                    acc = acc * 100
                    mean_acc = mean_acc + acc
                if not no_data:
                    mean_acc = mean_acc / float(args.runs)
                    map_accuracy.update({f"c_{channel_size}_l{layers}_k{kernel}": mean_acc})
                    print(util.BColors.WARNING + f"Mean accuracy {mean_acc}" + util.BColors.ENDC)

    if args.noise_level >= 0:
        acc_filename = f"{folder}/acc_{args.network_name}_noise{args.noise_level}.json"
    else:
        acc_filename = f"{folder}/acc_{args.network_name}.json"
    print(acc_filename)
    with open(acc_filename, "w") as acc_file:
        acc_file.write(json.dumps(map_accuracy))
Ejemplo n.º 3
0
def get_ranks(args, network_name, model_params):
    # Load the data and make it global
    global x_attack, y_attack, dk_plain, key_guesses
    x_attack, y_attack, key_guesses, real_key, dk_plain = load_data(
        args, network_name)

    folder = "{}/{}/".format(args.models_path, generate_folder_name(args))

    # Calculate the predictions before hand
    predictions = []
    for run in args.runs:
        model_path = '{}/model_r{}_{}.pt'.format(
            folder, run, get_save_name(network_name, model_params))
        print('path={}'.format(model_path))

        model = load_model(network_name, model_path)
        model.eval()
        print("Using {}".format(model))
        model.to(args.device)

        # Calculate predictions
        if require_domain_knowledge(network_name):
            prediction = accuracy(model, x_attack, y_attack, dk_plain)
            predictions.append(prediction.cpu().numpy())
        else:
            prediction = accuracy(model, x_attack, y_attack, None)
            predictions.append(prediction.cpu().numpy())

    # Check if it is only one run, if so don't do multi threading
    if len(args.runs) == 1:
        threaded_run_test(args, predictions[0], folder, args.runs[0],
                          network_name, model_params, real_key)
    else:
        # Start a thread for each run
        processes = []
        for i, run in enumerate(args.runs):
            p = Process(target=threaded_run_test,
                        args=(args, predictions[i], folder, run, network_name,
                              model_params, real_key))
            processes.append(p)
            p.start()
        # Wait for them to finish
        for p in processes:
            p.join()
            print('Joined process')
Ejemplo n.º 4
0
def get_ge(net_name, model_parameters):
    folder = "{}/{}".format('/media/rico/Data/TU/thesis/runs/',
                            util.generate_folder_name(args))

    ge_x, ge_y = [], []
    lta, lva, ltl, lvl = [], [], [], []
    for run in runs:
        filename = '{}/model_r{}_{}'.format(
            folder, run, get_save_name(net_name, model_parameters))
        ge_path = '{}.exp'.format(filename)

        y_r = util.load_csv(ge_path, delimiter=' ', dtype=np.float)
        x_r = range(len(y_r))
        ge_x.append(x_r)
        ge_y.append(y_r)

        if show_losses or show_acc:
            ta, va, tl, vl = util.load_loss_acc(filename)
            lta.append(ta)
            lva.append(va)
            ltl.append(tl)
            lvl.append(vl)

    return ge_x, ge_y, (lta, lva, ltl, lvl)
Ejemplo n.º 5
0
def get_ranks(args, network_name, model_params, edit_model=disable_filter):
    folder = "{}/{}/".format(args.models_path, generate_folder_name(args))

    # Calculate the predictions before hand
    # TODO: for multiple runs
    model_path = '{}/model_r{}_{}.pt'.format(
        folder, args.run, get_save_name(network_name, model_params))
    print('path={}'.format(model_path))

    if not os.path.exists(f"{model_path}.predictions1.npy"):

        # Load the data and make it global
        global x_attack, y_attack, dk_plain, key_guesses
        x_attack, y_attack, key_guesses, real_key, dk_plain = load_data(
            args, network_name)
        model = load_model(network_name, model_path)
        model.eval()
        model.to(args.device)

        predictions, correct_indices, sum_indices = edit_model(model)

        np_predictions = np.array(predictions)
        np_correct_indices = np.array(correct_indices)
        np_sum_indices = np.array(sum_indices)
        np.save(f"{model_path}.predictions1", np_predictions)
        np.save(f"{model_path}.correct_indices", np_correct_indices)
        np.save(f"{model_path}.sum_indices", np_sum_indices)
        print(sum_indices)
    else:
        predictions = np.load(f"{model_path}.predictions1.npy")
        real_key = util.load_csv('{}/{}/secret_key.csv'.format(
            args.traces_path, str(load_args.data_set)),
                                 dtype=np.int)
        key_guesses = util.load_csv(
            '{}/{}/Value/key_guesses_ALL_transposed.csv'.format(
                args.traces_path, str(load_args.data_set)),
            delimiter=' ',
            dtype=np.int,
            start=load_args.train_size + load_args.validation_size,
            size=load_args.attack_size)

    # Start a thread for each prediction
    groups_of = 7
    for k in range(math.ceil(len(predictions) / float(groups_of))):

        # Start groups of processes
        processes = []
        for i in range(k * groups_of, (k + 1) * groups_of, 1):
            if i >= len(predictions):
                break
            print(f"i: {i}")

            p = Process(target=threaded_run_test,
                        args=(args, predictions[i], folder, args.run,
                              network_name, model_params, real_key, i))
            processes.append(p)
            p.start()

        # Wait for the processes to finish
        for p in processes:
            p.join()
            print('Joined process')
Ejemplo n.º 6
0
def run(args):

    # Save the models to this folder
    dir_name = generate_folder_name(args)

    # Arguments for loading data
    load_args = {"unmask": args.unmask,
                 "use_hw": args.use_hw,
                 "traces_path": args.traces_path,
                 "sub_key_index": args.subkey_index,
                 "raw_traces": args.raw_traces,
                 "size": args.train_size + args.validation_size,
                 "train_size": args.train_size,
                 "validation_size": args.validation_size,
                 "domain_knowledge": True,
                 "desync": args.desync,
                 "use_noise_data": args.use_noise_data,
                 "start": 0,
                 "data_set": args.data_set}

    # Load data and chop into the desired sizes
    load_function = load_data_set(args.data_set)
    print(load_args)
    x_train, y_train, plain = load_function(load_args)
    x_validation = x_train[args.train_size:args.train_size + args.validation_size]
    y_validation = y_train[args.train_size:args.train_size + args.validation_size]
    x_train = x_train[0:args.train_size]
    y_train = y_train[0:args.train_size]
    p_train = None
    p_validation = None
    if plain is not None:
        p_train = plain[0:args.train_size]
        p_validation = plain[args.train_size:args.train_size + args.validation_size]

    print('Shape x: {}'.format(np.shape(x_train)))

    # Arguments for initializing the model
    init_args = {"sf": args.spread_factor,
                 "input_shape": args.input_shape,
                 "n_classes": 9 if args.use_hw else 256,
                 "kernel_size": args.kernel_size,
                 "channel_size": args.channel_size,
                 "num_layers": args.num_layers,
                 "max_pool": args.max_pool
                 }

    # Do the runs
    for i in range(args.runs):
        # Initialize the network and the weights
        network = args.init(init_args)
        init_weights(network, args.init_weights)

        # Filename of the model + the folder
        filename = 'model_r{}_{}'.format(i, network.name())
        model_save_file = '{}/{}/{}.pt'.format(args.model_save_path, dir_name, filename)

        print('Training with learning rate: {}, desync {}'.format(args.lr, args.desync))

        if args.domain_knowledge:
            network, res = train_dk2(x_train, y_train, p_train,
                                     train_size=args.train_size,
                                     x_validation=x_validation,
                                     y_validation=y_validation,
                                     p_validation=p_validation,
                                     validation_size=args.validation_size,
                                     network=network,
                                     epochs=args.epochs,
                                     batch_size=args.batch_size,
                                     lr=args.lr,
                                     checkpoints=args.checkpoints,
                                     save_path=model_save_file,
                                     loss_function=args.loss_function,
                                     l2_penalty=args.l2_penalty,
                                     )
        else:
            network, res = train(x_train, y_train,
                                 train_size=args.train_size,
                                 x_validation=x_validation,
                                 y_validation=y_validation,
                                 validation_size=args.validation_size,
                                 network=network,
                                 epochs=args.epochs,
                                 batch_size=args.batch_size,
                                 lr=args.lr,
                                 checkpoints=args.checkpoints,
                                 save_path=model_save_file,
                                 loss_function=args.loss_function,
                                 l2_penalty=args.l2_penalty,
                                 optimizer=args.optimizer
                                 )
        # Save the results of the accuracy and loss during training
        save_loss_acc(model_save_file, filename, res)

        # Make sure don't mess with our min/max of the spread network
        if isinstance(network, SpreadNet):
            network.training = False

        # Save the final model
        save_model(network, model_save_file)
Ejemplo n.º 7
0
            "lr": '%.2E' % Decimal(0.0001),
            "l2_penalty": 0,
            "train_size": 1000,
            "kernel_size": 20,
            "num_layers": 2,
            "channel_size": 16,
            "network_name": "SpreadV3", #""DenseNorm",
            "init_weights": "",
            "run": 0
}

args = util.EmptySpace()
for key, value in settings.items():
    setattr(args, key, value)

folder = "/media/rico/Data/TU/thesis/runs{}/{}".format(args.experiment, util.generate_folder_name(args))
filename = folder + f"/model_r{args.run}_" + util_classes.get_save_name(args.network_name, settings) + ".pt"
model = load_model(args.network_name, filename)

print(model)

x_test, _, _, _, _ = util.load_ascad_test_traces({
    "sub_key_index": 2,
    "desync": 0,
    "traces_path": "/media/rico/Data/TU/thesis/data",
    "unmask": args.unmask,
    "use_hw": args.use_hw
})
x_test = x_test
print(f"Shape x_test {np.shape(x_test)}")
x_test = torch.from_numpy(x_test.astype(np.float32)).to(util.device)
Ejemplo n.º 8
0
    "l2_penalty": 0.0005,
    "train_size": 40000,
    "kernel_size": 50,
    "num_layers": 1,
    "channel_size": 128,
    "max_pool": 64,
    "network_name": "SmallCNN",
    "init_weights": "kaiming",
    "run": 0
}
args = util.EmptySpace()
for key, value in settings.items():
    setattr(args, key, value)

folder = "/media/rico/Data/TU/thesis/runs3{}/{}".format(
    args.experiment, util.generate_folder_name(args))
filename = folder + f"/model_r{args.run}_" + util_classes.get_save_name(
    args.network_name, settings) + ".pt"
model = load_model(args.network_name, filename)
print(model)

c = model.cnn[0]
print(c)
# print(model.block1[0][0].weights)
w = c.weight
print(c.bias.size())
# exit()
shape_weight = w.size()
print(f"Shape weight: {shape_weight}")

for channel_index in range(shape_weight[1]):