def load_test_dataset(N=100, NP=200, s=0, sp=4000): obc = np.zeros((N, 7, 2), dtype=np.float32) temp = np.fromfile('../../dataset/obs.dat') obs = temp.reshape(len(temp) / 2, 2) temp = np.fromfile('../../dataset/obs_perm2.dat', np.int32) perm = temp.reshape(77520, 7) ## loading obstacles for i in range(0, N): for j in range(0, 7): for k in range(0, 2): obc[i][j][k] = obs[perm[i + s][j]][k] Q = Encoder() Q.load_state_dict(torch.load('../models/cae_encoder.pkl')) if torch.cuda.is_available(): Q.cuda() obs_rep = np.zeros((N, 28), dtype=np.float32) k = 0 for i in range(s, s + N): temp = np.fromfile('../../dataset/obs_cloud/obc' + str(i) + '.dat') temp = temp.reshape(len(temp) / 2, 2) obstacles = np.zeros((1, 2800), dtype=np.float32) obstacles[0] = temp.flatten() inp = torch.from_numpy(obstacles) inp = Variable(inp).cuda() output = Q(inp) output = output.data.cpu() obs_rep[k] = output.numpy() k = k + 1 ## calculating length of the longest trajectory max_length = 0 path_lengths = np.zeros((N, NP), dtype=np.int8) for i in range(0, N): for j in range(0, NP): fname = '../../dataset/e' + str(i + s) + '/path' + str(j + sp) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) / 2, 2) path_lengths[i][j] = len(path) if len(path) > max_length: max_length = len(path) paths = np.zeros((N, NP, max_length, 2), dtype=np.float32) ## padded paths for i in range(0, N): for j in range(0, NP): fname = '../../dataset/e' + str(i + s) + '/path' + str(j + sp) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) / 2, 2) for k in range(0, len(path)): paths[i][j][k] = path[k] return obc, obs_rep, paths, path_lengths
def load_train_dataset(N=100, NP=4000, folder='../data/simple/', s=0): # load data as [path] # for each path, it is # [[input],[target],[env_id]] obs = [] # add start s for i in range(0, N): #load obstacle point cloud temp = np.fromfile(folder + 'obs_cloud/obc' + str(i + s) + '.dat') obs.append(temp) obs = np.array(obs) ## calculating length of the longest trajectory max_length = 0 path_lengths = np.zeros((N, NP), dtype=np.int8) for i in range(0, N): for j in range(0, NP): fname = folder + 'env/' + 'e' + str(i + s) + '/path' + str(j) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) // 2, 2) path_lengths[i][j] = len(path) if len(path) > max_length: max_length = len(path) paths = np.zeros((N, NP, max_length, 2), dtype=np.float32) ## padded paths for i in range(0, N): for j in range(0, NP): fname = folder + 'env/' + 'e' + str(i + s) + '/path' + str(j) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) // 2, 2) for k in range(0, len(path)): paths[i][j][k] = path[k] dataset = [] targets = [] env_indices = [] for i in range(0, N): for j in range(0, NP): if path_lengths[i][j] > 0: for m in range(0, path_lengths[i][j] - 1): data = np.concatenate( (paths[i][j][m], paths[i][j][path_lengths[i][j] - 1])).astype(np.float32) targets.append(paths[i][j][m + 1]) dataset.append(data) env_indices.append(i) # only return raw data (in order), follow below to randomly shuffle data = list(zip(dataset, targets, env_indices)) random.shuffle(data) dataset, targets, env_indices = list(zip(*data)) dataset = list(dataset) targets = list(targets) env_indices = list(env_indices) return obs, dataset, targets, env_indices
def load_test_dataset(N=100, NP=200, s=0, sp=4000, folder='../data/simple/'): # load shape representation of obstacle obc = np.zeros((N, 7, 2), dtype=np.float32) temp = np.fromfile(folder + 'obs.dat') obs = temp.reshape(len(temp) // 2, 2) temp = np.fromfile(folder + 'obs_perm2.dat', np.int32) perm = temp.reshape(77520, 7) for i in range(0, N): for j in range(0, 7): for k in range(0, 2): obc[i][j][k] = obs[perm[i + s][j]][k] # load point cloud representation of obstacle obs = [] for i in range(s, s + N): temp = np.fromfile(folder + 'obs_cloud/obc' + str(i) + '.dat') obs.append(temp) obs = np.array(obs).astype(np.float32) ## calculating length of the longest trajectory max_length = 0 path_lengths = np.zeros((N, NP), dtype=np.int8) for i in range(0, N): for j in range(0, NP): fname = folder + 'env/' + 'e' + str(i + s) + '/path' + str(j + sp) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) // 2, 2) path_lengths[i][j] = len(path) if len(path) > max_length: max_length = len(path) paths = np.zeros((N, NP, max_length, 2), dtype=np.float32) ## padded paths for i in range(0, N): for j in range(0, NP): fname = folder + 'env/' + 'e' + str(i + s) + '/path' + str(j + sp) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) // 2, 2) for k in range(0, len(path)): paths[i][j][k] = path[k] return obc, obs, paths, path_lengths
def calc_length(para): i, j = para fname = './dataset2/e' + str(i) + '/path' + str(j) + '.dat' if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(int(len(path) / 2), 2) return len(path) return 0
def load_raw_dataset(N=100,NP=4000,s=0,sp=0,folder='../data/simple/'): obc=np.zeros((N,10,3),dtype=np.float32) temp=np.fromfile(folder+'obs.dat') obs=temp.reshape(len(temp)//3,3) temp=np.fromfile(folder+'obs_perm2.dat',np.int32) perm=temp.reshape(184756,10) ## loading obstacles for i in range(0,N): for j in range(0,10): for k in range(0,3): obc[i][j][k]=obs[perm[i+s][j]][k] obs = [] k=0 for i in range(s,s+N): temp=np.fromfile(folder+'obs_cloud/obc'+str(i)+'.dat') obs.append(temp) obs = np.array(obs).astype(np.float32) ## calculating length of the longest trajectory max_length=0 path_lengths=np.zeros((N,NP),dtype=np.int8) for i in range(0,N): for j in range(0,NP): fname=folder+'e'+str(i+s)+'/path'+str(j+sp)+'.dat' if os.path.isfile(fname): path=np.fromfile(fname) path=path.reshape(len(path)//3,3) path_lengths[i][j]=len(path) if len(path)> max_length: max_length=len(path) paths=np.zeros((N,NP,max_length,3), dtype=np.float32) ## padded paths for i in range(0,N): for j in range(0,NP): fname=folder+'e'+str(i+s)+'/path'+str(j+sp)+'.dat' if os.path.isfile(fname): path=np.fromfile(fname) path=path.reshape(len(path)//3,3) for k in range(0,len(path)): paths[i][j][k]=path[k] return obc,obs,paths,path_lengths
def gen_path(para, max_len): i, j = para fname = './dataset2/e' + str(i) + '/path' + str(j) + '.dat' paths = np.zeros((max_len, 2)) if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(int(len(path) / 2), 2) paths[:len(path)] = path return paths
def load_dataset(N=100, NP=4000): # N=100, NP=4000 Q = Encoder() Q.load_state_dict(torch.load('AE/models/cae_encoder.pkl')) if torch.cuda.is_available(): Q.cuda() obs_rep = np.zeros((N, 28), dtype=np.float32) for i in range(0, N): # load obstacle point cloud # import os # print(os.getcwd()) # os.system('pwd') temp = np.fromfile( '/home/muhayyuddin/MPNet/MPNet/dataset/obs_cloud/obc' + str(i) + '.dat') temp = temp.reshape(len(temp) // 2, 2) obstacles = np.zeros((1, 2800), dtype=np.float32) obstacles[0] = temp.flatten() inp = torch.from_numpy(obstacles) inp = Variable(inp).cuda() output = Q(inp) output = output.data.cpu() obs_rep[i] = output.numpy() ## calculating length of the longest trajectory max_length = 0 path_lengths = np.zeros((N, NP), dtype=np.int8) for i in range(0, N): for j in range(0, NP): fname = 'dataset/e' + str(i) + '/path' + str(j) + '.dat' # print("fname : ", fname) if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) // 2, 2) path_lengths[i][j] = len(path) if len(path) > max_length: max_length = len(path) paths = np.zeros((N, NP, max_length, 2), dtype=np.float32) ## padded paths for i in range(0, N): for j in range(0, NP): fname = 'dataset/e' + str(i) + '/path' + str(j) + '.dat' # print("fname : ", fname) if os.path.isfile(fname): path = np.fromfile(fname) path = path.reshape(len(path) // 2, 2) for k in range(0, len(path)): paths[i][j][k] = path[k] # print("Path ", path) dataset = [] targets = [] for i in range(0, N): for j in range(0, NP): if path_lengths[i][j] > 0: for m in range(0, path_lengths[i][j] - 1): data = np.zeros(32, dtype=np.float32) for k in range(0, 28): data[k] = obs_rep[i][k] data[28] = paths[i][j][m][0] data[29] = paths[i][j][m][1] data[30] = paths[i][j][path_lengths[i][j] - 1][0] data[31] = paths[i][j][path_lengths[i][j] - 1][1] # print("Data at line 91 ",data) # print("Path at line 92 ", paths[i][j][m + 1]) targets.append(paths[i][j][m + 1]) dataset.append(data) # print("dataset",dataset) # print("targets",targets) data = list(zip(dataset, targets)) random.shuffle(data) # print("data = ", data) dataset, targets = zip(*data) return np.asarray(dataset), np.asarray(targets)
def load_dataset(N=100,NP=4000): Q = Encoder() Q.load_state_dict(torch.load('../models/cae_encoder.pkl')) if torch.cuda.is_available(): Q.cuda() obs_rep=np.zeros((N,28),dtype=np.float32) for i in range(0,N): #load obstacle point cloud temp=np.fromfile('../../dataset/obs_cloud/obc'+str(i)+'.dat') temp=temp.reshape(len(temp)/2,2) obstacles=np.zeros((1,2800),dtype=np.float32) obstacles[0]=temp.flatten() inp=torch.from_numpy(obstacles) inp=Variable(inp).cuda() output=Q(inp) output=output.data.cpu() obs_rep[i]=output.numpy() ## calculating length of the longest trajectory max_length=0 path_lengths=np.zeros((N,NP),dtype=np.int8) for i in range(0,N): for j in range(0,NP): fname='../../dataset/e'+str(i)+'/path'+str(j)+'.dat' if os.path.isfile(fname): path=np.fromfile(fname) path=path.reshape(len(path)/2,2) path_lengths[i][j]=len(path) if len(path)> max_length: max_length=len(path) paths=np.zeros((N,NP,max_length,2), dtype=np.float32) ## padded paths for i in range(0,N): for j in range(0,NP): fname='../../dataset/e'+str(i)+'/path'+str(j)+'.dat' if os.path.isfile(fname): path=np.fromfile(fname) path=path.reshape(len(path)/2,2) for k in range(0,len(path)): paths[i][j][k]=path[k] dataset=[] targets=[] for i in range(0,N): for j in range(0,NP): if path_lengths[i][j]>0: for m in range(0, path_lengths[i][j]-1): data=np.zeros(32,dtype=np.float32) for k in range(0,28): data[k]=obs_rep[i][k] data[28]=paths[i][j][m][0] data[29]=paths[i][j][m][1] data[30]=paths[i][j][path_lengths[i][j]-1][0] data[31]=paths[i][j][path_lengths[i][j]-1][1] targets.append(paths[i][j][m+1]) dataset.append(data) data=zip(dataset,targets) random.shuffle(data) dataset,targets=zip(*data) return np.asarray(dataset),np.asarray(targets)
def load_test_dataset(N=100, NP=200, s=0, sp=4000): WTF = True if WTF: obc = None else: obc = np.zeros((N, 7, 2), dtype=np.float32) temp = np.fromfile(DATASET_ROOT + '/obs.dat') obs = temp.reshape(len(temp) / 2, 2) temp = np.fromfile(DATASET_ROOT + '/obs_perm2.dat', np.int32) perm = temp.reshape(77520, 7) ## loading obstacles for i in range(0, N): for j in range(0, 7): for k in range(0, 2): obc[i][j][k] = obs[perm[i + s][j]][k] Q = Encoder() #Q.load_state_dict(torch.load('../models/cae_encoder.pkl')) Q.load_state_dict(torch.load('AE/models/cae_encoder.pkl')) if torch.cuda.is_available(): Q.cuda() obs_path_pair = load_map_paths() # import glob # import regex # # ############################################################### # obc_filelist = glob.glob(DATASET_ROOT+'/obs_cloud/obc*.dat') # # # use all maps in our custom dataset if "traj-occ120" in DATASET_ROOT: N = len(obs_path_pair) NP = NP else: N = 100 # obc_filelist = obc_filelist[:N] # print(len(obc_filelist)) # # obc_filelist.sort(key=lambda x : int(regex.search('.*obc([0-9]+).dat', x).group(1))) ############################################################### # print(obs_path_pair) import matplotlib.pyplot as plt # # for obs, paths in obs_path_pair: # plt.scatter(*obs.reshape(-1,2).T) # for p in paths: # plt.plot(*p.reshape(-1,2).T) # plt.show() # obs_rep=np.zeros((N,28),dtype=np.float32) obs_rep = np.zeros((N, 28), dtype=np.float32) k = 0 all_obstacles = [] for obs, _ in obs_path_pair: obs = obs.reshape(-1, 2) all_obstacles.append(obs) inp = torch.from_numpy(obs.copy().flatten()) inp = Variable(inp).cuda() output = Q(inp.float()) output = output.data.cpu() obs_rep[k] = output.numpy() k += 1 ## calculating length of the longest trajectory max_length = 0 path_lengths = np.zeros((N, NP), dtype=np.int8) for i, (_, paths) in enumerate(obs_path_pair): for j, path in enumerate(paths[:NP]): path = path.reshape(-1, 2) path_lengths[i][j] = len(path) if len(path) > max_length: max_length = len(path) paths = np.zeros((N, NP, max_length, 2), dtype=np.float32) ## padded paths for i, (_, _paths) in enumerate(obs_path_pair): for j, path in enumerate(_paths[:NP]): path = path.reshape(-1, 2) # path += 10 paths[i][j][:len(path)] = path # for k in range(0,len(path)): ############################## # import matplotlib.pyplot as plt # plt.scatter(*all_obstacles[i].T) # for j in range(len(paths[i])): # p = paths[i][j][:path_lengths[i][j]] # plt.plot(*p.T) # plt.show() print(path_lengths) return obc, obs_rep, paths, path_lengths, all_obstacles
def load_test_dataset(N=100, NP=200, s=0, sp=4000): WTF = True if WTF: obc = None else: obc = np.zeros((N, 7, 2), dtype=np.float32) temp = np.fromfile(DATASET_ROOT + '/obs.dat') obs = temp.reshape(len(temp) / 2, 2) temp = np.fromfile(DATASET_ROOT + '/obs_perm2.dat', np.int32) perm = temp.reshape(77520, 7) ## loading obstacles for i in range(0, N): for j in range(0, 7): for k in range(0, 2): obc[i][j][k] = obs[perm[i + s][j]][k] Q = Encoder() Q.load_state_dict(torch.load('../models/cae_encoder.pkl')) if torch.cuda.is_available(): Q.cuda() import glob import regex ############################################################### obc_filelist = glob.glob(DATASET_ROOT + '/obs_cloud/obc*.dat') # use all maps in our custom dataset if "traj-occ120" not in DATASET_ROOT: N = len(obc_filelist) NP = NP else: N = 10 obc_filelist = obc_filelist[:N] print(len(obc_filelist)) obc_filelist.sort( key=lambda x: int(regex.search('.*obc([0-9]+).dat', x).group(1))) ############################################################### # obs_rep=np.zeros((N,28),dtype=np.float32) obs_rep = np.zeros((N, 28), dtype=np.float32) k = 0 all_obstacles = [] for fn in obc_filelist: temp = np.fromfile(fn) temp = temp.reshape(len(temp) / 2, 2) all_obstacles.append(temp) obstacles = np.zeros((1, 2800), dtype=np.float32) obstacles[0] = temp.flatten() inp = torch.from_numpy(obstacles) inp = Variable(inp).cuda() output = Q(inp) output = output.data.cpu() obs_rep[k] = output.numpy() k += 1 # store all path file list all_path_filelist = [] for i in range(len(obc_filelist)): path_filelist = glob.glob(DATASET_ROOT + '/e' + str(i) + '/path*.dat') path_filelist.sort( key=lambda x: int(regex.search('.*path([0-9]+).dat', x).group(1))) all_path_filelist.append(path_filelist[:NP]) ## calculating length of the longest trajectory max_length = 0 path_lengths = np.zeros((N, NP), dtype=np.int8) for i, path_filelist in enumerate(all_path_filelist): for j, fname in enumerate(path_filelist): path = np.fromfile(fname) path = path.reshape(-1, 2) path_lengths[i][j] = len(path) if len(path) > max_length: max_length = len(path) paths = np.zeros((N, NP, max_length, 2), dtype=np.float32) ## padded paths for i, path_filelist in enumerate(all_path_filelist): for j, fname in enumerate(path_filelist): path = np.fromfile(fname) path = path.reshape(-1, 2) # path += 10 for k in range(0, len(path)): paths[i][j][k] = path[k] ############################## import matplotlib.pyplot as plt plt.scatter(*all_obstacles[i].T) for j in range(len(paths[i])): p = paths[i][j][:path_lengths[i][j]] plt.plot(*p.T) plt.show() print(path_lengths) return obc, obs_rep, paths, path_lengths, all_obstacles