def get_activation_lrpmodule(activation_layer): layer_name = activation_layer.__name__ activation_modules = { "linear": None, "relu": modules.Rect(), "softmax": modules.SoftMax(), } return activation_modules[layer_name]
def roar_kar(keep, random=False, train_only=False): logdir = 'tf_logs/standard/' def get_savedir(): savedir = logdir.replace('tf_logs', 'KAR' if keep else 'ROAR') if not os.path.exists(savedir): os.makedirs(savedir) return savedir # ratio = 0.1 percentiles = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1] attribution_methods = ['normal', 'LRP', 'proposed_method'] if not train_only: DNN = model_io.read('../models/MNIST/LeNet-5.nn') for v in attribution_methods: batch_size = 128 print("{} Step is start".format(v)) if random: print("{} percentile Remove".format(v)) occlude_dataset(DNN=DNN, attribution=v, percentiles=percentiles, random=True, keep=keep, batch_size=batch_size, savedir=get_savedir()) else: print("{} Random Remove".format(v)) occlude_dataset(DNN=DNN, attribution=v, percentiles=percentiles, random=False, keep=keep, batch_size=batch_size, savedir=get_savedir()) print("{} : occlude step is done".format(v)) print("ress record") ress = {k: [] for k in attribution_methods} for _ in range(3): for v in attribution_methods: res = [] for p in percentiles: occdir = get_savedir() + '{}_{}_{}.pickle'.format('{}', v, p) occdir_y = get_savedir() + '{}_{}_{}_{}.pickle'.format( '{}', v, p, 'label') data_train = unpickle(occdir.format('train')) # data_test = unpickle(occdir.format('test')) Xtrain = np.array(data_train) Ytrain = unpickle(occdir_y.format('train')) Ytrain = np.array(Ytrain) Xtest = data_io.read('../data/MNIST/test_images.npy') Ytest = data_io.read('../data/MNIST/test_labels.npy') print("check : {}".format(Ytrain.shape)) Xtest = scale(Xtest) Xtest = np.reshape(Xtest, [Xtest.shape[0], 28, 28, 1]) Xtest = np.pad(Xtest, ((0, 0), (2, 2), (2, 2), (0, 0)), 'constant', constant_values=(-1., )) Ix = Ytest[:, 0].astype(int) Ytest = np.zeros([Xtest.shape[0], np.unique(Ytest).size]) Ytest[np.arange(Ytest.shape[0]), Ix] = 1 print(occdir) # DNN = model_io.read('../models/MNIST/LeNet-5.nn') DNN = modules.Sequential([ modules.Convolution(filtersize=(5,5,1,10),stride = (1,1)),\ modules.Rect(),\ modules.SumPool(pool=(2,2),stride=(2,2)),\ modules.Convolution(filtersize=(5,5,10,25),stride = (1,1)),\ modules.Rect(),\ modules.SumPool(pool=(2,2),stride=(2,2)),\ modules.Convolution(filtersize=(4,4,25,100),stride = (1,1)),\ modules.Rect(),\ modules.SumPool(pool=(2,2),stride=(2,2)),\ modules.Convolution(filtersize=(1,1,100,10),stride = (1,1)),\ modules.Flatten() ]) print("training...") DNN.train(X=Xtrain,\ Y=Ytrain,\ Xval=Xtest,\ Yval=Ytest,\ iters=10**5,\ lrate=0.0001,\ # status = 2,\ batchsize = 128 ) # ypred = DNN.forward(Xtest) acc = np.mean( np.argmax(DNN.forward(Xtest), axis=1) == np.argmax(Ytest, axis=1)) del DNN print('metric model test accuracy is: {:0.4f}'.format(acc)) res.append(acc) print("End of {}:training, accuracy...".format(_)) ress[v].append(res) print("metric...") res_mean = {v: np.mean(v, axis=0) for v in ress.item()} print(res_mean) return res_mean
import tools.model_io import modules import shutil import settings # user init batchsize = 10 numbIters = 1000 # load data X, Y = tools.data_loader.load_data() # setup neural network nn = modules.Sequential([ modules.Linear(settings.nrOfPixels, 4), modules.Rect(), modules.Linear(4, 4), modules.SoftMax() ]) # train neural network nn.train(X['train'], Y['train'], Xval=X['valid'], Yval=Y['valid'], batchsize=batchsize, iters=numbIters) # determine training name of neural net nnName = 'nn_' + nn.name + ('_(batchsize_{}_number_iterations_{})'.format( batchsize, numbIters))