pure_sz = int(sys.argv[1])
noise_sz = int(sys.argv[2])
back_sz = int(sys.argv[3])

# setting
batch_size = 128
param_file = '/home/sainbar/fastnet-confussion-layer/config/cifar-10-18pct-confussion11x22.cfg'
learning_rate = 1
image_color = 3
image_size = 32
image_shape = (image_color, image_size, image_size, batch_size)
init_model = parser.parse_config_file(param_file)
net = fastnet.net.FastNet(learning_rate, image_shape, init_model)

# prepare data
train_data, train_labels, test_data, test_labels = data_loader.load_cifar10()
data_mean = train_data.mean(axis=1, keepdims=True)
train_data = train_data - data_mean
test_data = test_data - data_mean

# noisy data
noisy_data, noisy_labels = data_loader.load_noisy_labeled()
noisy_data = noisy_data - data_mean
noisy_labels += 11

# background noise
back_data = data_loader.load_noise()
back_data = back_data - data_mean
back_labels = np.ones(back_data.shape[1]) * 10

train_data = np.concatenate(
pure_sz = int(sys.argv[1])
noise_sz = int(sys.argv[2])
back_sz = int(sys.argv[3])

# setting
batch_size = 128
param_file = '/home/sainbar/fastnet-confussion-layer/config/cifar-10-18pct-confussion11x22.cfg'
learning_rate = 1
image_color = 3
image_size = 32
image_shape = (image_color, image_size, image_size, batch_size)
init_model = parser.parse_config_file(param_file)
net = fastnet.net.FastNet(learning_rate, image_shape, init_model)

# prepare data
train_data, train_labels, test_data, test_labels = data_loader.load_cifar10()
data_mean = train_data.mean(axis=1,keepdims=True)
train_data = train_data - data_mean
test_data = test_data - data_mean

# noisy data
noisy_data, noisy_labels = data_loader.load_noisy_labeled()
noisy_data = noisy_data - data_mean
noisy_labels += 11

# background noise
back_data = data_loader.load_noise()
back_data = back_data - data_mean
back_labels = np.ones(back_data.shape[1]) * 10

train_data = np.concatenate((train_data[:,0:pure_sz], noisy_data[:,0:noise_sz], back_data[:,0:back_sz]), axis=1)
	net.checkpoint_name += '_' + args.model	
net.checkpoint_name += '_clean' + str(int(pure_sz/1000)) + 'k'
net.checkpoint_name += '_noisy' + str(int(noisy_sz/1000)) + 'k'
net.checkpoint_name += '_back' + str(int(back_sz/1000)) + 'k'
net.checkpoint_name += '_alpha' + str(alpha)
if args.wdecayX != 1:
	net.checkpoint_name += '_wdX' + str(args.wdecayX)
	for l in net.layers:
		if hasattr(l,'wc'):
			l.wc *= args.wdecayX	
net.output_dir = '~/data/outside-noise-results/results_BU_robust/' + net.checkpoint_name + '/'
if os.path.exists(net.output_dir) == False:
	os.mkdir(net.output_dir)

# prepare data
clean_data, clean_labels, test_data, test_labels = data_loader.load_cifar10()
data_mean = clean_data.mean(axis=1,keepdims=True)
clean_data = clean_data - data_mean
test_data = test_data - data_mean

# background noise
back_data = data_loader.load_noise()
back_data = back_data - data_mean
back_labels = np.ones(back_data.shape[1])
for i in range(back_sz):
	back_labels[i] = i % 10 # easy to reproduce

# noisy data
noisy_data, noisy_labels = data_loader.load_noisy_labeled()
noisy_data = noisy_data - data_mean
Beispiel #4
0
    net.checkpoint_name += '_' + args.model
net.checkpoint_name += '_clean' + str(int(pure_sz / 1000)) + 'k'
net.checkpoint_name += '_noisy' + str(int(noisy_sz / 1000)) + 'k'
net.checkpoint_name += '_back' + str(int(back_sz / 1000)) + 'k'
net.checkpoint_name += '_alpha' + str(alpha)
if args.wdecayX != 1:
    net.checkpoint_name += '_wdX' + str(args.wdecayX)
    for l in net.layers:
        if hasattr(l, 'wc'):
            l.wc *= args.wdecayX
net.output_dir = '~/data/outside-noise-results/results_BU_robust/' + net.checkpoint_name + '/'
if os.path.exists(net.output_dir) == False:
    os.mkdir(net.output_dir)

# prepare data
clean_data, clean_labels, test_data, test_labels = data_loader.load_cifar10()
data_mean = clean_data.mean(axis=1, keepdims=True)
clean_data = clean_data - data_mean
test_data = test_data - data_mean

# background noise
back_data = data_loader.load_noise()
back_data = back_data - data_mean
back_labels = np.ones(back_data.shape[1])
for i in range(back_sz):
    back_labels[i] = i % 10  # easy to reproduce

# noisy data
noisy_data, noisy_labels = data_loader.load_noisy_labeled()
noisy_data = noisy_data - data_mean