def __init__(self, db, config): self.feature_db = FeatureDatabase(config) self.db = db self.grasp_kernel = kernels.SquaredExponentialKernel( sigma=config['kernel_sigma'], l=config['kernel_l']) self.neighbor_kernel = kernels.SquaredExponentialKernel( sigma=1.0, l=(1 / config['prior_neighbor_weight'])) self.neighbor_distance = config['prior_neighbor_distance'] self.num_neighbors = config['prior_num_neighbors'] self.config = config self.grasp_kernel_tolerance = config['kernel_tolerance'] self.prior_kernel_tolerance = config['prior_kernel_tolerance']
training_datasets.append(all_training_subset_keys[:train_size]) else: training_datasets.append(all_training_subset_keys) if isinstance(val_size, (int)): val_datasets.append(all_val_keys[:val_size]) else: val_datasets.append(all_val_keys) if isinstance(test_size, (int)): test_datasets.append(all_test_keys[:test_size]) else: test_datasets.append(all_test_keys) # load all feature vectors feature_db = FeatureDatabase(config) all_feature_vectors = feature_db.feature_vectors() # add training datasets print print 'Creating training datasets' for training_dataset, training_size in zip(training_datasets, training_sizes): suffix = str(training_size) + '_train' print 'Creating set', suffix create_index_file(training_dataset, index_file_template % (suffix)) create_nn_with_keys(all_feature_vectors, training_dataset, suffix) # add val datasets print print 'Creating validation datasets' for val_dataset, training_size in zip(val_datasets, training_sizes):