sys.path.insert(1, os.path.join(sys.path[0], '../src'))
import nn

parser = argparse.ArgumentParser()
parser.add_argument('source_layout', metavar='source-layout', help='Path source network layout specification')
parser.add_argument('source_weights', metavar='source-weights', help='Path sourcce network weights')
parser.add_argument('target_layout', metavar='target-layout', help='Path target network layout specification')
parser.add_argument('target_weights', metavar='target-weights', help='Path target network weights')
parser.add_argument('layerspec', help='Which layers to copy. Format: a-b-...-z where a-z are 0-based layer numbers')
args = parser.parse_args()

#~ Load source model
print('Loading source model from {0}'.format(args.source_layout))
source_layout = nn.load_layout(args.source_layout)
source_model, source_optimizer = nn.build_model_to_layout(source_layout)

#~ Load source weights
print('\tLoading source weights from {0}'.format(args.source_weights))
source_model.load_weights(args.source_weights)

#~ Load target model
print('Loading target model from {0}'.format(args.target_layout))
target_layout = nn.load_layout(args.target_layout)
target_model, target_optimizer = nn.build_model_to_layout(target_layout)

#~ Load target weights
if os.path.isfile(args.target_weights):
	print('\tLoading target weights from {0}'.format(args.target_weights))
	target_model.load_weights(args.target_weights)
else:
						# paste the filter into the filter collection
						filter_collection.paste(im_filter, (xpos, ypos))

				# save the filter collection of layer 'id'
				filter_collection.save(path + "weights_on_layer_" + str(layer_id) + "_" + filename + ".png")

if __name__ == "__main__":
	#~ Parse parameters
	parser = argparse.ArgumentParser()
	parser.add_argument('weights', help='Path to the weights which are to be loaded')
	parser.add_argument('layout', help='Path network layout specification')
	parser.add_argument('-s', '--savepath', help='Path to save location of the visualized filters', default='./')
	parser.add_argument('-v', '--verbose', help='Determine whether the programm shall print information in the terminal or not', action="store_true")
	parser.add_argument('-n', '--filename', help='Pass a string which is appended to the created image files.', default='')
	args = parser.parse_args()

	# Load model
	print('Loading model from {0}'.format(args.layout))
	layout = nn.load_layout(args.layout)
	model, optimizer = nn.build_model_to_layout(layout)

	#~ Load weights
	print('Loading weights from \"{0}\"'.format(args.weights))
	model.load_weights(args.weights)

	# visualize filters
	print('Generating visualizations and storing to {0}'.format(args.savepath))
	visualize_filters(model, args.savepath, args.filename)

	print('Done')
Beispiel #3
0
x_test, y_test = dataset_io.read_data(test_data_path, resolution, normalize=normalize, grayscale=grayscale) # change

if normalize_output:
	y_train /= max_dim
	y_test /= max_dim

loss_callbacks = []

# do grid search
for m_idx, m in enumerate(momentums):
#for w_idx, w in enumerate(weightscales):
	for l_idx, l in enumerate(learningrates):	
		print("Learning rate: ", l)
		#print("weightscale: ", w)
		print("momentum: ", m)
		model, optimizer = nn.build_model_to_layout(layout, learningrate=l, momentum=m, decay=0.0) #change

		if normalize_output:
			callbacks = [custom_callbacks.StopEarly(3), custom_callbacks.Distortions(x_train, y_train, x_train.shape[0], (float(original_resolution[0]) / float(max_dim), float(original_resolution[1]) / float(max_dim)))]
		else:
			callbacks = [custom_callbacks.StopEarly(3), custom_callbacks.Distortions(x_train, y_train, x_train.shape[0], original_resolution)]
		loss_callback = custom_callbacks.RecordLoss(epochs, nb_labels, x_train, y_train, resolution, model, grayscale)
		callbacks.append(loss_callback)
		loss_callbacks.append(loss_callback)

		# scale weights
		#weights = model.get_weights()
		#helpers.mult_list(weights, w * 20)
		#model.set_weights(weights)

		# Train the model