Esempio n. 1
0
    print("Input data shape", shape(data))

    index = np.arange(len(label))
    np.random.shuffle(index)
    label = label[index]
    data = data[index]

    trX=data[:600]
    trY=label[:600]
    teX=data[600:]
    teY=label[600:]

    x = tf.placeholder(tf.float32, shape=(None, 1000))
    y = tf.placeholder(tf.float32, shape=(None, 4))

    model = dnn_model(input_dim=1000)
    predictions = model(x)
    sgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(loss=keras.losses.categorical_crossentropy,
                  optimizer=keras.optimizers.Adadelta(),
                  metrics=['accuracy'])

    model.fit(trX, trY, batch_size=batch_size, epochs=nb_epoch, shuffle=True)  # validation_split=0.1
    # model.save_weights('dnn_clean.h5')
    score = model.evaluate(teX, teY, verbose=0)
    print('Test loss:', score[0])
    print('Test accuracy:', score[1])


    with sess.as_default():
        adv_sample=[]
Esempio n. 2
0
    labels = labels[index]
    teY_original = labels[230000:]
    labels = keras.utils.to_categorical(labels, num_classes=None)
    signals = signals[index]
    signals = np.expand_dims(signals, axis=2)

    trX = signals[:230000]
    trY = labels[:230000]
    teX = signals[230000:]
    teY = labels[230000:]

    print("Input label shape", shape(labels))
    print("Input data shape", shape(signals))

    model = dnn_model(input_dim=640)

    x = tf.placeholder(tf.float32, shape=(None, 640, 1))
    y = tf.placeholder(tf.float32, shape=(None, 17))

    #################################logits########################################
    predictions = model(x)  ###### after softmax
    predictions_logits = predictions.op.inputs[0]  ###logits, before softmax
    predictions = predictions_logits
    #################################logits#######################################

    #sgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(loss=keras.losses.categorical_crossentropy,
                  optimizer=keras.optimizers.Nadam(lr=0.002,
                                                   beta_1=0.9,
                                                   beta_2=0.999,