model.compile( optimizer='adam', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=['accuracy']) # Change to True if you want to train from scratch train = False if (train): # Save the untrained weights for future training with modified dataset model.save_weights('h5/cnnc-untrained.h5') model.fit(train_images, train_labels, batch_size=100, epochs=5, validation_data=(test_images, test_labels)) model.save_weights('h5/cnnc-trained.h5') else: model.load_weights('h5/cnnc-trained.h5') test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2) print("Accuracy before faults:", test_acc) tfi.inject(model=model, confFile="confFiles/sample.yaml") test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2) print("Accuracy after faults:", test_acc)
totsdc = 0. start = time.time() for i in range(numFaults): train_labels1 = copy.deepcopy(train_labels);train_images1=copy.deepcopy(train_images) model.load_weights('h5/lenet-trained.h5') ind = [] init = random.sample(range(num), numInjections+offset) for i in init: test_loss, test_acc = model.evaluate(test_images[i:i+1], test_labels[i:i+1], verbose=0) if(test_acc == 1.): ind.append(i) ind = ind[:numInjections] model.load_weights('h5/lenet-untrained.h5') train_images_,train_labels_ = tfi.inject(x_test=train_images1,y_test=train_labels1, confFile=conf) model.fit(train_images_, train_labels_, batch_size=100, epochs=15, validation_data=(test_images, test_labels), verbose=0) sdc = 0. for i in ind: test_loss, test_acc = model.evaluate(test_images[i:i+1], test_labels[i:i+1], verbose=0) if(test_acc == 0.): sdc = sdc + 1. f.write(str(sdc/numInjections)) f.write("\n") totsdc = totsdc + sdc f.write("\n") f.write(str(totsdc/(numFaults*numInjections))) f.write("\n")
start = time.time() conf = sys.argv[1] filePath = sys.argv[2] filePath = os.path.join(filePath, "res.csv") f = open(filePath, "w") numFaults = int(sys.argv[3]) for k in range(numFaults): for i in range(43): count = 0. tesXi = tesX[i] for j in range(30): res = tfi.inject(model=model, x_test=X_test[tesXi[j:j + 1]], confFile=conf) if (res == i): count = count + 1. countX[i] = countX[i] + count for i in range(43): countX[i] = countX[i] / numFaults f.write(str(countX)) f.write("\n") f.write("Time for %d injections: %f seconds" % (numFaults, time.time() - start)) f.close()
''' conf = sys.argv[1] filePath = sys.argv[2] filePath = os.path.join(filePath, "res.csv") f = open(filePath, "w") numFaults = int(sys.argv[3]) numInjections = 10 #numInjections = int(sys.argv[4]) totsdc = 0.0 start = time.time() for i in range(numFaults): model.load_weights('h5/resnet-trained.h5') tfi.inject(model=model, confFile=conf) sdc = 0. for i in ind: image = load_img(images[i], target_size=(224, 224)) image = img_to_array(image) image = image.reshape( (1, image.shape[0], image.shape[1], image.shape[2])) image = preprocess_input(image) out = model.predict(image) label = decode_predictions(out) label = label[0][0] if (label[1] != img_labels[i]): sdc = sdc + 1. f.write(str(sdc / numInjections)) f.write("\n") totsdc = totsdc + sdc
numInjections = 10 #numInjections = int(sys.argv[4]) totsdc = 0.0 start = time.time() for j in range(numFaults): model.load_weights('h5/sqznet-trained.h5') #tfi.inject(model=model, confFile=conf) sdc = 0. for i in ind: image = load_img(images[i], target_size=(227, 227)) image = img_to_array(image) image = image.reshape( (1, image.shape[0], image.shape[1], image.shape[2])) image = preprocess_input(image) res = tfi.inject(model=model, x_test=image, confFile=conf) label_ = decode_predictions(res[0]) label_ = label_[0][0] if (label_[1] != img_labels[i]): sdc = sdc + 1. f.write(str(sdc / numInjections)) f.write("\n") totsdc = totsdc + sdc f.write("\n") f.write(str(totsdc / (numFaults * numInjections))) f.write("\n") f.write("Time for %d injections: %f seconds" % (numFaults * numInjections, time.time() - start)) f.close()