Exemplo n.º 1
0
def exec(model_dir, chips_dir, eval_size):
    tf.logging.set_verbosity(tf.logging.INFO)

    start_time = time.time()

    param_path = dl_utils.new_filepath('train_params.dat', directory=model_dir)
    params = dl_utils.load_object(param_path)
    tf.set_random_seed(params['seed'])

    if eval_size <= 0:
        eval_size = params['eval_size']

    if chips_dir is None:
        chips_dir = params['chips_dir']

    dat_path, exp_path, mtd_path = dl_utils.chips_data_files(chips_dir)
    train_data, eval_data, train_expect, eval_expect, chips_info = dl_utils.train_test_split(
        dat_path, exp_path, mtd_path, eval_size)

    print("Evaluating the model stored into " + model_dir)

    estimator = tf.estimator.Estimator(model_fn=md.description,
                                       params=params,
                                       model_dir=model_dir)
    do_evaluation(estimator, eval_data, eval_expect, 'EVALUATING', params)
Exemplo n.º 2
0
def exec(img_path,
         output_dir,
         chip_size,
         pad_size,
         flip,
         rotate,
         shuffle=True,
         offset_list=[[0, 0]],
         nodata_value=-50.0,
         discard_nodata=False):
    print("Analyzing " + img_path + " image.")
    dat_path, exp_path, mtd_path = dl_utils.chips_data_files(output_dir)

    chips_info = dl_utils.chips_info(img_path, nodata_value, chip_size,
                                     pad_size, offset_list, rotate, flip,
                                     discard_nodata)

    dl_utils.save_object(mtd_path, chips_info)

    dat_ndarray = np.memmap(dat_path,
                            dtype=chips_info['dat_dtype'],
                            mode='w+',
                            shape=chips_info['dat_shape'])
    exp_ndarray = np.memmap(exp_path,
                            dtype=chips_info['exp_dtype'],
                            mode='w+',
                            shape=chips_info['exp_shape'])

    print("Generating " + str(chips_info['dat_shape'][0]) + " chips into " +
          output_dir + " directory.")
    dl_utils.generate_chips(img_path, dat_ndarray, exp_ndarray, nodata_value,
                            chip_size, pad_size, offset_list, rotate, flip,
                            discard_nodata)

    if shuffle:
        print("Shuffling generated chips.")
        shuffle_chips(dat_ndarray, exp_ndarray, chips_info['n_chips'])

    dat_ndarray.flush()
    exp_ndarray.flush()

    return dat_ndarray, exp_ndarray
if __name__ == "__main__":
	args = parse_args()

	chips_dir = args.chips_dir
	output_dir = args.output_dir
	eval_size = args.eval_size
	batch_size = args.batch_size
	epochs = args.epochs
	seed = args.seed
	params = vars(args)

	tf.set_random_seed(seed)
	tf.logging.set_verbosity(tf.logging.INFO)

	dat_path, exp_path, mtd_path = dl_utils.chips_data_files(chips_dir)
	train_data, test_data, train_expect, test_expect, chips_info = dl_utils.train_test_split(dat_path, exp_path, mtd_path, eval_size)

	print("Memory size: %d Mb" % ( ((train_data.size * train_data.itemsize) + (test_data.size * test_data.itemsize))*0.000001 ))
	print("Train data shape: " + str(train_data.shape))
	print("Train label shape: " + str(train_expect.shape))
	print("Train params: " + str(params))

	dl_utils.mkdirp(output_dir)
	param_path = dl_utils.new_filepath('train_params.dat', directory=output_dir)
	chips_info_path = dl_utils.new_filepath('chips_info.dat', directory=output_dir)
	
	dl_utils.save_object(param_path, params)
	dl_utils.save_object(chips_info_path, chips_info)

	estimator = tf.estimator.Estimator(model_fn=md.description, params=params, model_dir=output_dir)