Ejemplo n.º 1
0
def check_model_list(model_list, datasets):
	opts = default_opt_creator()
	for dataset_str in datasets:
		for model in model_list:
			set_dataset(opts, dataset=dataset_str)
			set_default_opts_based_on_model_dataset(opts)
			set_model_string(opts, model)
			get_model_from_db(model, opts)
Ejemplo n.º 2
0
def check_model_list(model_list, datasets):
	opts = default_opt_creator()
	for dataset_str in datasets:
		for model in model_list:
			set_dataset(opts, dataset=dataset_str)
			set_default_opts_based_on_model_dataset(opts)
			set_model_string(opts, model)
			model_dict= get_model_from_db(model, opts)
			model = model_dict['model']
			model.summary()
Ejemplo n.º 3
0
		print(100 * '*', 3 * '\n', model_str, '\n', dataset_str, 3 * '\n', 100 * '*')
		opts = default_opt_creator()
		opts['experiment_name'] = experiment_name
		opts['experiment_tag'] = experiment_name + '/' + dataset_str + '/' + weight_model_name + 'loaded_to_' + model_str
		set_dataset(opts, dataset=dataset_str)
		opts = set_model_string(opts, model_str)
		opts = set_default_opts_based_on_model_dataset(opts)
		input_shape = opts['training_opts']['dataset']['input_shape']
		nb_class = opts['training_opts']['dataset']['nb_classes']
		# opts = set_expand_rate(opts, param_expand_sel)
		# optimizer = optimizers.Nadam()
		optimizer = optimizers.SGD(lr=opts['optimizer_opts']['lr'], momentum=opts['optimizer_opts']['momentum'],
		                           decay=opts['optimizer_opts']['decay'], nesterov=opts['optimizer_opts']['nestrov'])
		# optimizer = optimizers.Adadelta()
		""" MODEL PREPARE """
		model = get_model_from_db(model_str, opts)
		weight_model = get_model_from_db(weight_model_name, opts)
		model_modification_utils.load_weights_by_block_index_list(model, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], os.path.join(
			global_constant_var.get_experimentcase_abs_path(weight_model_experiment_name, dataset_str, weight_model_name), 'checkpoint'),
		                                                          model_constructor_utils.CONVSH_NAME)
		model_modification_utils.load_weights_by_block_index_list(weight_model, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], os.path.join(
			global_constant_var.get_experimentcase_abs_path(weight_model_experiment_name, dataset_str, weight_model_name), 'checkpoint'),
		                                                          model_constructor_utils.CONVSH_NAME)
		model.compile(loss=opt_utils.get_loss(opts), optimizer=optimizer, metrics=opt_utils.get_metrics(opts))
		weight_model.compile(loss=opt_utils.get_loss(opts), optimizer=optimizer, metrics=opt_utils.get_metrics(opts))
		method_names = find_key_value_to_str_recursive(opts, '', {'param_expand'})
		opts['experiment_name'] = method_names
		# LOAD DATA
		(data_train, label_train), (data_test, label_test) = load_data(dataset_str, opts)
		data_train, data_test = preprocess_data_phase(opts, data_train, data_test)
		data_gen = data_augmentation_phase(opts)
Ejemplo n.º 4
0
					opts = default_opt_creator()
					opts['experiment_name'] = experiment_name

					set_dataset(opts, dataset=dataset_str)
					opts = set_model_string(opts, model_str)
					opts = set_default_opts_based_on_model_dataset(opts)

					input_shape = opts['training_opts']['dataset']['input_shape']
					nb_class = opts['training_opts']['dataset']['nb_classes']
					# opts = set_expand_rate(opts, param_expand_sel)
					# optimizer = optimizers.Nadam()
					optimizer = optimizers.SGD(lr=opts['optimizer_opts']['lr'], momentum=opts['optimizer_opts']['momentum'],
					                           decay=opts['optimizer_opts']['decay'], nesterov=opts['optimizer_opts']['nestrov'])
					# optimizer = optimizers.Adadelta()
					""" MODEL PREPARE """
					model_dict = get_model_from_db(model_str, opts)
					model = model_dict['model']
					model_total_params = (model.count_params() // 100000) / 10
					if (not total_params == 0) and (not model_total_params== total_params):
						set_expand_rate(opts, np.sqrt(total_params/model_total_params)*get_expand_rate(opts));
						print('Expand Rate Changed')
						model_dict = get_model_from_db(model_str, opts)
						model = model_dict['model']
					if total_params==0: total_params = model_total_params;

					opts['experiment_tag'] = experiment_name + '/' + dataset_str + '/' + model_str + '/' + str((model.count_params()//100000)/10)+'M'
					# out_tensor_list  = model_dict['out']
					# output_num  = len(out_tensor_list)
					model.summary()
					# model_modification_utils.load_weights_by_block_index_list(model, [1, 2, 3, 4, 5, 6, 7, 8, 9], os.path.join(
					# 	global_constant_var.get_experimentcase_abs_path(experiment_name, dataset_str, 'nin_tree_berp_1'), 'checkpoint'),
Ejemplo n.º 5
0
     'experiment_tag'] = experiment_name + '/' + dataset_str + '/' + model_str
 set_dataset(opts, dataset=dataset_str)
 opts = set_model_string(opts, model_str)
 opts = set_default_opts_based_on_model_dataset(opts)
 input_shape = opts['training_opts']['dataset']['input_shape']
 nb_class = opts['training_opts']['dataset']['nb_classes']
 # opts = set_expand_rate(opts, param_expand_sel)
 # optimizer = optimizers.Nadam()
 optimizer = optimizers.SGD(
     lr=opts['optimizer_opts']['lr'],
     momentum=opts['optimizer_opts']['momentum'],
     decay=opts['optimizer_opts']['decay'],
     nesterov=opts['optimizer_opts']['nestrov'])
 # optimizer = optimizers.Adadelta()
 """ MODEL PREPARE """
 model = get_model_from_db(model_str, opts)
 model.summary()
 # model_modification_utils.load_weights_by_block_index_list(model, [1, 2, 3, 4, 5, 6, 7, 8, 9], os.path.join(
 # 	global_constant_var.get_experimentcase_abs_path(experiment_name, dataset_str, 'nin_tree_berp_1'), 'checkpoint'),
 #                                                           model_constructor_utils.CONVSH_NAME)
 model.compile(loss=opt_utils.get_loss(opts),
               optimizer=optimizer,
               metrics=opt_utils.get_metrics(opts))
 method_names = find_key_value_to_str_recursive(
     opts, '', {'param_expand'})
 opts['experiment_name'] = method_names
 # LOAD DATA
 (data_train,
  label_train), (data_test,
                 label_test) = load_data(dataset_str, opts)
 data_train, data_test = preprocess_data_phase(