def load_dataset_from_config(self, config): """ Loads train and test datasets from the given config :param config: Dict gotten from a YAML file :type config: :return: None :rtype: """ self._dataset = SpartanDataset(mode="train", config=config) self._dataset_test = SpartanDataset(mode="test", config=config) self.load_dataset()
def load_dataset(self): """ Loads a dataset, construct a trainloader. Additionally creates a dataset and DataLoader for the test data :return: :rtype: """ batch_size = self._config['training']['batch_size'] num_workers = self._config['training']['num_workers'] if self._dataset is None: self._dataset = SpartanDataset.make_default_10_scenes_drill() # self._dataset.load_all_pose_data() self._dataset.load_all_knots_info() self._dataset.set_parameters_from_training_config(self._config) self._data_loader = torch.utils.data.DataLoader(self._dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, drop_last=True) # create a test dataset if self._config["training"]["compute_test_loss"]: if self._dataset_test is None: self._dataset_test = SpartanDataset(mode="test", config=self._dataset.config) self._dataset_test.load_all_pose_data() self._dataset_test.set_parameters_from_training_config(self._config) self._data_loader_test = torch.utils.data.DataLoader(self._dataset_test, batch_size=batch_size, shuffle=True, num_workers=2, drop_last=True)
def pdc_train(dataset_config, train_config, dataset_name, logging_dir, num_iterations, dimension): # print("training args") # print(dataset_config) # print(train_config) # print(dataset_name) # print(logging_dir) # print(num_iterations) # print(dimension) print('dataset_name') print(dataset_name) dataset = SpartanDataset(config=dataset_config) d = dimension # the descriptor dimension name = dataset_name.split('/')[-1] + "_%d" %(d) train_config["training"]["logging_dir_name"] = name print('logging dir name') print(name) train_config["training"]["logging_dir"] = logging_dir train_config["dense_correspondence_network"]["descriptor_dimension"] = d train_config["training"]["num_iterations"] = num_iterations print "training descriptor of dimension %d" %(d) start_time = time.time() train = DenseCorrespondenceTraining(dataset=dataset, config=train_config) train.run() end_time = time.time() print "finished training descriptor of dimension %d using time %.2f seconds" %(d, end_time-start_time)
def load_specific_dataset(self): dataset_config_filename = os.path.join( utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'dataset', 'composite', 'rope_nonrigid_412vert_only.yaml') dataset_config = utils.getDictFromYamlFilename(dataset_config_filename) self._dataset = SpartanDataset(config=dataset_config)
def load_specific_dataset(self): dataset_config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'dataset', 'composite', 'caterpillar_only_9.yaml') # dataset_config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', # 'dense_correspondence', # 'dataset', 'composite', '4_shoes_all.yaml') # st() dataset_config = utils.getDictFromYamlFilename(dataset_config_filename) self._dataset = SpartanDataset(config=dataset_config)
def load_training_dataset(self): """ Loads the dataset that this was trained on :return: a dataset object, loaded with the config as set in the dataset.yaml :rtype: SpartanDataset """ network_params_folder = self.path_to_network_params_folder network_params_folder = utils.convert_to_absolute_path(network_params_folder) dataset_config_file = os.path.join(network_params_folder, 'dataset.yaml') config = utils.getDictFromYamlFilename(dataset_config_file) return SpartanDataset(config_expanded=config)
def evaulate_model(model_lst, output_dir=None, num_image_pairs=100, gt_dataset_config=None): if not (gt_dataset_config is None): gt_dataset = SpartanDataset(config_expanded=gt_dataset_config) else: gt_dataset=None DCE = DenseCorrespondenceEvaluation for subdir in model_lst: print("evaluate model {}".format(subdir)) start_time = time.time() output_subdir = os.path.join(utils.get_data_dir(), output_dir, subdir.split('/')[-1]) DCE.run_evaluation_on_network(model_folder=subdir, compute_descriptor_statistics=True, cross_scene=False, output_dir=output_subdir, num_image_pairs=num_image_pairs,dataset=gt_dataset) end_time = time.time() print("evaluation takes %.2f seconds" %(end_time - start_time))
def load_configuration(self): # config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', config_filename = os.path.join(DIR_PROJ, 'config', 'dense_correspondence', 'dataset', 'composite', 'caterpillar_only_9.yaml') config = utils.getDictFromYamlFilename(config_filename) # train_config_file = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', train_config_file = os.path.join(DIR_PROJ, 'config', 'dense_correspondence', 'training', 'training.yaml') self.train_config = utils.getDictFromYamlFilename(train_config_file) self.dataset = SpartanDataset(config=config) # st() logging_dir = "code/data_volume/pdc/trained_models/tutorials" num_iterations = 3500 descr_dim = 3 # the descriptor dimension self.train_config["training"][ "logging_dir_name"] = "caterpillar_%d" % (descr_dim) self.train_config["training"]["logging_dir"] = logging_dir self.train_config["dense_correspondence_network"][ "descriptor_dimension"] = descr_dim self.train_config["training"]["num_iterations"] = num_iterations
def __init__(self, config_filename='shoes_all.yaml'): with HiddenPrints(): self.config_filename = os.path.join( utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'dataset', 'composite', config_filename) self.train_config_filename = os.path.join( utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'training', 'training.yaml') self.config = utils.getDictFromYamlFilename(self.config_filename) self.train_config = utils.getDictFromYamlFilename( self.train_config_filename) self.dataset = SpartanDataset(config=self.config) self.dataset.set_parameters_from_training_config(self.train_config) # holds centroid and radius for each scene # these are for min and max z values currently. maybe include x, y, and z in the future. # self.centroid_and_radius[scene_name]["centroid"] or self.centroid_and_radius[scene_name]["radius"] self.centroid_and_radius = {}
import dense_correspondence_manipulation.utils.utils as utils dc_source_dir = utils.getDenseCorrespondenceSourceDir() sys.path.append(dc_source_dir) sys.path.append( os.path.join(dc_source_dir, "dense_correspondence", "correspondence_tools")) from dense_correspondence.dataset.spartan_dataset_masked import SpartanDataset, ImageType from dense_correspondence_manipulation.simple_pixel_correspondence_labeler.annotate_correspondences import label_colors, draw_reticle, pil_image_to_cv2, drawing_scale_config config_filename = os.path.join( utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'dataset', 'composite', 'caterpillar_baymax_starbot_onlymulti_front.yaml') config = utils.getDictFromYamlFilename(config_filename) sd = SpartanDataset(config=config) sd.set_train_mode() annotated_data_yaml_filename = os.path.join(os.getcwd(), "new_annotated_pairs.yaml") annotated_data = utils.getDictFromYamlFilename(annotated_data_yaml_filename) index_of_pair_to_display = 0 def draw_points(img, img_points_picked): for index, img_point in enumerate(img_points_picked): color = label_colors[index % len(label_colors)] draw_reticle(img, int(img_point["u"]), int(img_point["v"]), color)
print("descriptor_filename", descriptor_filename) print("processing image %d of %d" % (counter, num_images)) counter += 1 if __name__ == "__main__": dc_source_dir = utils.getDenseCorrespondenceSourceDir() config_filename = os.path.join(dc_source_dir, 'config', 'dense_correspondence', 'evaluation', 'lucas_evaluation.yaml') eval_config = utils.getDictFromYamlFilename(config_filename) default_config = utils.get_defaults_config() utils.set_cuda_visible_devices(default_config['cuda_visible_devices']) dce = DenseCorrespondenceEvaluation(eval_config) network_name = "caterpillar_M_background_0.500_3" dcn = dce.load_network_from_config(network_name) dataset_config_file = os.path.join(dc_source_dir, 'config', 'dense_correspondence', 'dataset', 'composite', 'caterpillar_only_9.yaml') dataset_config = utils.getDictFromYamlFilename(dataset_config_file) dataset = SpartanDataset(config=dataset_config) scene_name = SCENE_NAME save_dir = SAVE_DIR compute_descriptor_images_for_single_scene(dataset, scene_name, dcn, save_dir) print("finished cleanly")
parser.add_argument("--data_name", type=str, default="caterpillar_upright.yaml") parser.add_argument("--run_prefix", type=str, default="caterpillar") parser.add_argument("--training_yaml", type=str, default="training.yaml") args = parser.parse_args() config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'dataset', 'composite', args.data_name) config = utils.getDictFromYamlFilename(config_filename) train_config_file = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'training', args.training_yaml) train_config = utils.getDictFromYamlFilename(train_config_file) dataset = SpartanDataset(config=config) dataset_test = None if train_config["training"]["compute_test_loss"]: dataset_test=SpartanDataset(mode="test", config=config) logging_dir = "trained_models/tutorials" #num_iterations = 3500 d = 3 # the descriptor dimension name = f"{args.run_prefix}_%d" %(d) train_config["training"]["logging_dir_name"] = name train_config["training"]["logging_dir"] = logging_dir train_config["dense_correspondence_network"]["descriptor_dimension"] = d #train_config["training"]["num_iterations"] = num_iterations
from dense_correspondence.training.training import DenseCorrespondenceTraining from dense_correspondence.dataset.spartan_dataset_masked import SpartanDataset logging.basicConfig(level=logging.INFO) from dense_correspondence.evaluation.evaluation import DenseCorrespondenceEvaluation config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'dataset', 'composite', 'caterpillar_only.yaml') config = utils.getDictFromYamlFilename(config_filename) train_config_file = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 'training', 'training.yaml') train_config = utils.getDictFromYamlFilename(train_config_file) dataset = SpartanDataset(config=config) logging_dir = "code/data_volume/pdc/trained_models/2018-10-15/" num_iterations = (1500/4)-1 d = 2 # the descriptor dimension name = "shoes_progress_actually_iterative_%d" %(d) train_config["training"]["logging_dir_name"] = name train_config["training"]["logging_dir"] = logging_dir train_config["dense_correspondence_network"]["descriptor_dimension"] = d train_config["training"]["num_iterations"] = num_iterations TRAIN = True EVALUATE = True # All of the saved data for this network will be located in the # code/data_volume/pdc/trained_models/tutorials/caterpillar_3 folder