# Highest grasp roll angle [deg] to still be included
highest_psi_angle = 20

# Paths to datasets
dataset = 'oblique-C-xyzq_unaligned_Testing'
testing_dir = DATA_PATH + dataset + '/tensors/'
stripped_dir = DATA_PATH + dataset + '_psi_%d/' % highest_psi_angle


# Initiate tensor datasets
config_filename = testing_dir + '../config.json'
with open(config_filename, 'r') as myfile:
    data = myfile.read()
tensor_config = json.loads(data)
stripped_dset = TensorDataset(stripped_dir, tensor_config)
tensor_datapoint = stripped_dset.datapoint_template

# Load tensor identifiers from original dataset
files_in_dir = os.listdir(testing_dir)
filetypes = list(set(['_'.join(file.split('_')[:-1]) for file in files_in_dir]))
if 'nfig.' in filetypes:
    filetypes.remove('nfig.')
if '' in filetypes:
    filetypes.remove('')
tensors_in_dir = list(set([split[-9:-4] for split in files_in_dir]))
tensors_in_dir.sort()
if 'nfig.' in tensors_in_dir:
    tensors_in_dir.remove('nfig.')

# Load grasp roll angles
示例#2
0
    goal_per_grid = min(phi_grasps)
    undersample_ratio = goal_per_grid / phi_grasps

    return undersample_ratio


# Paths to datasets
data_dir = DATA_PATH + 'oblique-C-xyzq_Testing_beta/tensors/'
output_dir = DATA_PATH + 'oblique-C-xyzq_Testing_beta_phi/'

# Initiate tensor datasets
config_filename = data_dir + '../config.json'
with open(config_filename, 'r') as myfile:
    data = myfile.read()
tensor_config = json.loads(data)
undersampled_dset = TensorDataset(output_dir, tensor_config)
tensor_datapoint = undersampled_dset.datapoint_template

# Load sampling distribution
bg = np.load(data_dir + '../phi_grasps.npy')
undersample_distribution = get_undersample_ratio(bg)
undersample_distribution = np.append(undersample_distribution, 1.0)

# Load tensor identifiers from dataset path
files_in_dir = os.listdir(data_dir)
filetypes = list(set(['_'.join(file.split('_')[:-1])
                      for file in files_in_dir]))
tensors_in_dir = list(set([split[-9:-4] for split in files_in_dir]))
tensors_in_dir.sort()

if tensors_in_dir[0] == '.DS_S':
示例#3
0
# Paths to dataset
dset = 'oblique-C-xyzq_phi5_psi20'
data_dir = '{}{}/tensors/'.format(DATA_PATH, dset)
training_dir = '{}{}_Training/'.format(DATA_PATH, dset)
testing_dir = '{}{}_Testing/'.format(DATA_PATH, dset)
replication_test_dir = DATA_PATH + 'overhead-A-xyzr_Testing/tensors/'
if 'images' in os.listdir(data_dir + '../'):
    SINGLE_IMAGES = True

# Initiate tensor datasets
config_filename = data_dir + '../config.json'
with open(config_filename, 'r') as myfile:
    data = myfile.read()
tensor_config = json.loads(data)
training_dset = TensorDataset(training_dir, tensor_config)
testing_dset = TensorDataset(testing_dir, tensor_config)
tensor_datapoint = testing_dset.datapoint_template

try:
    copyfile(data_dir + 'config.json', training_dir + '/tensors/config.json')
    copyfile(data_dir + 'config.json', testing_dir + '/tensors/config.json')
except FileNotFoundError:
    print("Could not find config file. Skip copying.")

if SINGLE_IMAGES:
    os.mkdir(training_dir + '/images/')
    os.mkdir(testing_dir + '/images/')

# Get tensor identifiers of original dataset
files_in_dir = os.listdir(data_dir)
示例#4
0
"""

SINGLE_IMAGES = False

# Paths to dataset
data_dir = DATA_PATH + 'oblique-C-xyzq_Training/tensors/'
validation_dir = DATA_PATH + 'oblique-C-xyzq_Validation_full/'
if 'images' in os.listdir(data_dir + '../'):
    SINGLE_IMAGES = True

# Initiate tensor datasets
config_filename = data_dir + '../config.json'
with open(config_filename, 'r') as myfile:
    data = myfile.read()
tensor_config = json.loads(data)
validation_dset = TensorDataset(validation_dir, tensor_config)
tensor_datapoint = validation_dset.datapoint_template

try:
    copyfile(data_dir + 'config.json', validation_dir + '/tensors/config.json')
except:
    print("Cant find file {}".format(data_dir + 'config.json'))

if SINGLE_IMAGES:
    os.mkdir(validation_dir + '/images/')

# Get tensor identifiers of original dataset
files_in_dir = os.listdir(data_dir)
filetypes = list(set(['_'.join(file.split('_')[:-1])
                      for file in files_in_dir]))
tensors_in_dir = list(set([split[-9:-4] for split in files_in_dir]))
示例#5
0
""" Script to combine two dataset to a single dataset."""

DSET_1 = DATA_PATH + 'Unrot_3DOF_Testing/tensors/'
DSET_2 = DATA_PATH + '20210702_Unrot_Testing/tensors/'
NEW = DATA_PATH + 'Unrot_Testing'

if not os.path.exists(NEW):
    os.mkdir(NEW)

config_filename = DSET_1 + '../config.json'
with open(config_filename, 'r') as myfile:
    data = myfile.read()
tensor_config = json.loads(data)
tensor_config['fields'].pop('ferrari_canny', None)
tensor_config['fields'].pop('force_closure', None)
new_dset = TensorDataset(NEW, tensor_config)
tensor_datapoint = new_dset.datapoint_template

pointers = ['pose_labels', 'image_labels']

# Add DSET_1
print("Copy dataset 1")
files_in_dir = os.listdir(DSET_1)
filetypes = list(set(['_'.join(file.split('_')[:-1])
                      for file in files_in_dir]))
unwanted_files = ['force_closure', 'ferrari_canny']
for unwanted in unwanted_files:
    if unwanted in filetypes:
        filetypes.remove(unwanted)
tensors_in_dir = list(set([split[-9:-4] for split in files_in_dir]))
tensors_in_dir.sort()