Пример #1
0
def train_speck_distinguisher(num_epochs, num_rounds=7, depth=1):
    #create the network
    net = make_resnet(depth=depth, reg_param=10**-5)
    net.compile(optimizer='adam', loss='mse', metrics=['acc'])
    #generate training and validation data
    X, Y = sp.make_train_data(10**7, num_rounds)
    X_eval, Y_eval = sp.make_train_data(10**6, num_rounds)
    #set up model checkpoint
    check = make_checkpoint(wdir + 'best' + str(num_rounds) + 'depth' +
                            str(depth) + '.h5')
    #create learnrate schedule
    lr = LearningRateScheduler(cyclic_lr(10, 0.002, 0.0001))
    #train and evaluate
    h = net.fit(X,
                Y,
                epochs=num_epochs,
                batch_size=bs,
                validation_data=(X_eval, Y_eval),
                callbacks=[lr, check])
    np.save(wdir + 'h' + str(num_rounds) + 'r_depth' + str(depth) + '.npy',
            h.history['val_acc'])
    np.save(wdir + 'h' + str(num_rounds) + 'r_depth' + str(depth) + '.npy',
            h.history['val_loss'])
    dump(
        h.history,
        open(wdir + 'hist' + str(num_rounds) + 'r_depth' + str(depth) + '.p',
             'wb'))
    print("Best validation accuracy: ", np.max(h.history['val_acc']))
    return (net, h)
Пример #2
0
def train_distinguisher(num_epochs, num_rounds=7, depth=1, neurons=1024, data_train=2**18, data_test=2**16,cipher="speck",difference=(0,0x0020),start_round=1,pre_trained_model="fresh"):
    if (cipher=="speck"): 
      wdir = './speck_nets/';
      if (pre_trained_model=="fresh"):
          net = make_resnet(depth=depth, reg_param=10**-5, d1=32, d2=neurons,num_blocks=2,word_size=int(16/2));#word size decreased(8) 
          net.compile(optimizer='adam',loss='mse',metrics=['acc']);
      else:
          net = load_model(pre_trained_model);
      net.summary();
      X, Y = sp.make_train_data(data_train,num_rounds,diff=difference,r_start=start_round);
      X_eval, Y_eval = sp.make_train_data(data_test, num_rounds,diff=difference,r_start=start_round);
    elif (cipher=="simon"):
      wdir = './simon_nets/';
      if (pre_trained_model=="fresh"):
          net = make_resnet(depth=depth, reg_param=10**-5, d1=32, d2=neurons,num_blocks=2,word_size=int(16/2));#word size decreased(8) because c0^c1
          net.compile(optimizer='adam',loss='mse',metrics=['acc']);
      else:
          net = load_model(pre_trained_model);
      net.summary();
      X, Y = si.make_train_data(data_train,num_rounds,diff=difference,r_start=start_round);
      X_eval, Y_eval = si.make_train_data(data_test, num_rounds,diff=difference,r_start=start_round);
    elif (cipher=="GIFT_64"):
      wdir = './gift_64_nets/';
      if (pre_trained_model=="fresh"): 
          net = make_resnet(depth=depth,d1=64,d2=neurons,num_blocks=4,word_size=int(16/2)); #word size decreased(8) because c0^c1
          net.compile(optimizer='adam',loss='mse',metrics=['acc']);
      else:
          net = load_model(pre_trained_model);
      net.summary();
      
      X, Y = gift.make_train_data(data_train,num_rounds,diff=difference,r_start=start_round);
      X_eval, Y_eval = gift.make_train_data(data_test, num_rounds,diff=difference,r_start=start_round);

    print(difference);
    if not os.path.exists(wdir):
      os.makedirs(wdir)
    #set up model checkpoint
    if (pre_trained_model=="fresh"): 
      check = make_checkpoint(wdir+'best_'+str(num_rounds)+'_start_'+str(start_round)+'_depth_'+str(depth)+'_diff_'+str(difference)+'_data_train_'+str(data_train)+'_data_test_'+str(data_test)+"_epoch-{epoch:02d}_val_acc-{val_acc:.2f}" + '.h5');
      print("Model will be stroed in File: " + wdir+'best_'+str(num_rounds)+'_start_'+str(start_round)+'_depth_'+str(depth)+'_diff_'+str(difference)+'_data_train_'+str(data_train)+'_data_test_'+str(data_test)+'.h5');
    else:
      check = make_checkpoint(pre_trained_model[:pre_trained_model.index("_epoch")]+"_epoch_imp-{epoch:02d}_val_acc_imp-{val_acc:.2f}" + '.h5'); 
      print("Model will be stroed in File: " + pre_trained_model[:pre_trained_model.index("_epoch")]+"_epoch_imp---_val_acc_imp----.h5");
    
    lr = LearningRateScheduler(cyclic_lr(10,0.002, 0.0001));
    h = net.fit(X,Y,epochs=num_epochs,batch_size=5000,validation_data=(X_eval, Y_eval), callbacks=[lr,check]);
    np.save(wdir+'h'+str(num_rounds)+'r_depth'+str(depth)+'.npy', h.history['val_acc']);
    np.save(wdir+'h'+str(num_rounds)+'r_depth'+str(depth)+'.npy', h.history['val_loss']);
    dump(h.history,open(wdir+'hist'+str(num_rounds)+'r_depth'+str(depth)+'.p','wb'));
    print("Best validation accuracy: ", np.max(h.history['val_acc']));
    return(net, h);
def train_preprocessor(n, nr, epochs):
    net = tn.make_resnet(depth=1)
    net.compile(optimizer='adam', loss='mse', metrics=['acc'])
    #create a random input difference
    diff_in = (randint(0, 2**16), randint(0, 2**16))
    X, Y = sp.make_train_data(n, nr, diff=diff_in)
    net.fit(X, Y, epochs=epochs, batch_size=5000, validation_split=0.1)
    net_pp = Model(inputs=net.layers[0].input, outputs=net.layers[-2].output)
    return (net_pp)
def evaluate_diff(diff, net_pp, nr=3, n=1000):
    if (diff == 0): return (0.0)
    d = (diff >> 16, diff & 0xffff)
    X, Y = sp.make_train_data(2 * n, nr, diff=d)
    Z = net_pp.predict(X, batch_size=5000)
    #perceptron.fit(Z[0:n],Y[0:n]);
    linear_model.fit(Z[0:n], Y[0:n])
    #val_acc = perceptron.score(Z[n:],Y[n:]);
    Y2 = linear_model.predict(Z[n:])
    Y2bin = (Y2 > 0.5)
    val_acc = float(np.sum(Y2bin == Y[n:])) / n
    return (val_acc)
Пример #5
0
net8.load_weights('net8_small.h5');

def evaluate(net,X,Y):
    Z = net.predict(X,batch_size=10000).flatten();
    Zbin = (Z > 0.5);
    diff = Y - Z; mse = np.mean(diff*diff);
    n = len(Z); n0 = np.sum(Y==0); n1 = np.sum(Y==1);
    acc = np.sum(Zbin == Y) / n;
    tpr = np.sum(Zbin[Y==1]) / n1;
    tnr = np.sum(Zbin[Y==0] == 0) / n0;
    mreal = np.median(Z[Y==1]);
    high_random = np.sum(Z[Y==0] > mreal) / n0;
    print("Accuracy: ", acc, "TPR: ", tpr, "TNR: ", tnr, "MSE:", mse);
    print("Percentage of random pairs with score higher than median of real pairs:", 100*high_random);

X5,Y5 = sp.make_train_data(10**6,5);
X6,Y6 = sp.make_train_data(10**6,6);
X7,Y7 = sp.make_train_data(10**6,7);
X8,Y8 = sp.make_train_data(10**6,8);

X5r, Y5r = sp.real_differences_data(10**6,5);
X6r, Y6r = sp.real_differences_data(10**6,6);
X7r, Y7r = sp.real_differences_data(10**6,7);
X8r, Y8r = sp.real_differences_data(10**6,8);

print('Testing neural distinguishers against 5 to 8 blocks in the ordinary real vs random setting');
print('5 rounds:');
evaluate(net5, X5, Y5);
print('6 rounds:');
evaluate(net6, X6, Y6);
print('7 rounds:');