def pdc_train(dataset_config, train_config, dataset_name, logging_dir, num_iterations, dimension):

    # print("training args")
    # print(dataset_config)
    # print(train_config)
    # print(dataset_name)
    # print(logging_dir)
    # print(num_iterations)
    # print(dimension)
    print('dataset_name')
    print(dataset_name)

    dataset = SpartanDataset(config=dataset_config)

    d = dimension # the descriptor dimension
    name = dataset_name.split('/')[-1] + "_%d" %(d)
    train_config["training"]["logging_dir_name"] = name

    print('logging dir name')
    print(name)

    train_config["training"]["logging_dir"] = logging_dir
    train_config["dense_correspondence_network"]["descriptor_dimension"] = d
    train_config["training"]["num_iterations"] = num_iterations
    print "training descriptor of dimension %d" %(d)
    start_time = time.time()
    train = DenseCorrespondenceTraining(dataset=dataset, config=train_config)
    train.run()
    end_time = time.time()
    print "finished training descriptor of dimension %d using time %.2f seconds" %(d, end_time-start_time)
    def train(self):
        # This should take about ~12-15 minutes with a GTX 1080 Ti

        # All of the saved data for this network will be located in the
        # code/data_volume/pdc/trained_models/tutorials/caterpillar_3 folder

        descr_dim = self.train_config["dense_correspondence_network"][
            "descriptor_dimension"]
        print("training descriptor of dimension %d" % (descr_dim))
        train = DenseCorrespondenceTraining(dataset=self.dataset,
                                            config=self.train_config)
        train.run()
        print("finished training descriptor of dimension %d" % (descr_dim))
예제 #3
0
from dense_correspondence.dataset.spartan_dataset_masked import SpartanDataset
logging.basicConfig(level=logging.INFO)

from dense_correspondence.evaluation.evaluation import DenseCorrespondenceEvaluation

config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence',
                               'dataset', 'composite', 'toy.yaml')
config = utils.getDictFromYamlFilename(config_filename)

train_config_file = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence',
                               'training', 'toy_training.yaml')

train_config = utils.getDictFromYamlFilename(train_config_file)
dataset = SpartanDataset(config=config)

logging_dir = "/home/zhouxian/git/pytorch-dense-correspondence/pdc/trained_models/tutorials"
d = 3 # the descriptor dimension
name = "toy_hacker_%d" %(d)
train_config["training"]["logging_dir_name"] = name
train_config["training"]["logging_dir"] = logging_dir
train_config["dense_correspondence_network"]["descriptor_dimension"] = d

TRAIN = True
EVALUATE = True

if TRAIN:
    print "training descriptor of dimension %d" %(d)
    train = DenseCorrespondenceTraining(dataset=dataset, config=train_config)
    train.run()
    # train.run_from_pretrained('/home/zhouxian/git/pytorch-dense-correspondence/pdc/trained_models/tutorials/backup/toy_hack_3')
    print "finished training descriptor of dimension %d" %(d)
#     train.run()
#     print "finished training descriptor of dimension %d" %(d)

# quit()

### ITERATIVE


# First 
config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 
                               'dataset', 'composite', 'shoe_train_1_green_nike.yaml')
config = utils.getDictFromYamlFilename(config_filename)
dataset = SpartanDataset(config=config)

print("training descriptor of dimension %d" %(d))
train = DenseCorrespondenceTraining(dataset=dataset, config=train_config)
train.run()
print("finished training descriptor of dimension %d" %(d))

# Second 
config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence', 
                               'dataset', 'composite', 'shoe_train_1_gray_nike.yaml')
config = utils.getDictFromYamlFilename(config_filename)
dataset = SpartanDataset(config=config)

print("training descriptor of dimension %d" %(d))
train_config["training"]["logging_dir_name"] = name+"1"
train = DenseCorrespondenceTraining(dataset=dataset, config=train_config)
train.run_from_pretrained("2018-10-15/"+name)
print("finished training descriptor of dimension %d" %(d))
                                 'training.yaml')

train_config = utils.getDictFromYamlFilename(train_config_file)
dataset = SpartanDataset(config=config)

logging_dir = "/home/priya/code/data_volume/pdc_synthetic_2/trained_models/tutorials"
num_iterations = args.iters
d = args.dim  # the descriptor dimension
name = args.name
train_config["training"]["logging_dir_name"] = name
train_config["training"]["logging_dir"] = logging_dir
train_config["dense_correspondence_network"]["descriptor_dimension"] = d
train_config["training"]["num_iterations"] = num_iterations
if args.normalization == "unit":
    train_config["dense_correspondence_network"]["normalize"] = True
    print("Using unit normalization")
else:
    assert args.normalization == "standard"  # By default, if "normalize" is not in the config, it defaults to False

if args.depth_invariant == True:
    train_config["dense_correspondence_network"]["depth_invariant"] = True
    print("Using depth invariant... ")

train = DenseCorrespondenceTraining(dataset=dataset, config=train_config)
if args.resume:
    train.run_from_pretrained(
        "/home/priya/code/data_volume/pdc_synthetic_2/trained_models/tutorials/{}"
        .format(args.name))
else:
    train.run()
예제 #6
0
train_config_file = os.path.join(utils.getDenseCorrespondenceSourceDir(),
                                 'config', 'dense_correspondence', 'training',
                                 'training.yaml')

train_config = utils.getDictFromYamlFilename(train_config_file)
dataset = SpartanDataset(config=config)

logging_dir = "/home/priya/code/data_volume/pdc_synthetic_2/trained_models/tutorials"
num_iterations = args.iters
d = args.dim  # the descriptor dimension
name = args.name
train_config["training"]["logging_dir_name"] = name
train_config["training"]["logging_dir"] = logging_dir
train_config["dense_correspondence_network"]["descriptor_dimension"] = d
train_config["training"]["num_iterations"] = num_iterations
if args.normalization == "unit":
    train_config["dense_correspondence_network"]["normalize"] = True
    print("Using unit normalization")
else:
    assert args.normalization == "standard"  # By default, if "normalize" is not in the config, it defaults to False

if args.depth_invariant == True:
    train_config["dense_correspondence_network"]["depth_invariant"] = True
    print("Using depth invariant... ")

train = DenseCorrespondenceTraining(dataset=dataset, config=train_config)
if args.resume:
    train.run_from_pretrained("simulated/{}".format(args.name))
else:
    train.run()