def __init__(self, root, verbose=True): """ :param root: root location :param: verbose: {boolean} """ data_root = join(root, 'epfl_campus') if not isdir(data_root): makedirs(data_root) self.verbose = verbose self.data_root = data_root # download data: seq_root = join(data_root, 'CampusSeq1') self.seq_root = seq_root if not isdir(seq_root): seq_zip = join(data_root, 'CampusSeq1.zip') if not isfile(seq_zip): url = 'http://188.138.127.15:81/Datasets/CampusSeq1.zip' if verbose: print('\ndownload ' + url) download.download(url, seq_zip) if verbose: print('\nunzip ' + seq_zip) unzip.unzip(seq_zip, data_root, verbose, del_after_unzip=True) # gt is taken from here: http://campar.in.tum.de/Chair/MultiHumanPose P0 = np.array([[439.06, 180.81, -26.946, 185.95], [-5.3416, 88.523, -450.95, 1324], [0.0060594, 0.99348, -0.11385, 5.227]]) P1 = np.array([[162.36, -438.34, -17.508, 3347.4], [73.3, -10.043, -443.34, 1373.5], [0.99035, -0.047887, -0.13009, 6.6849]]) P2 = np.array([[237.58, 679.93, -26.772, -1558.3], [-43.114, 21.982, -713.6, 1962.8], [-0.83557, 0.53325, -0.13216, 11.202]]) self.Calib = [P0, P1, P2] # GT binary file actorsGTmat = join(seq_root, 'actorsGT.mat') assert isfile(actorsGTmat) M = loadmat(actorsGTmat) Actor3d = M['actor3D'][0] persons = [] for pid in range(3): pts = [] Person = Actor3d[pid] n = len(Person) for frame in range(n): pose = Person[frame][0] if len(pose) == 1: pts.append(None) elif len(pose) == 14: pts.append(pose) else: raise ValueError("Weird pose length:" + str(pose)) persons.append(pts) self.Y = persons
def __init__(self, data_root, z_is_up=True, store_binary=False): """ :param data_root: root location for data :param z_is_up: if True ensure that z points upwards :param store_binary: if True store the extracted video sequence as numpy binary for faster access """ assert isdir(data_root) root = join(data_root, 'cmu_mocap') if not isdir(root): makedirs(root) subject_folder = join(root, 'all_asfamc/subjects') if not isdir(subject_folder): print("[CMU MoCap] download file") zip_files = ['allasfamc.zip'] for zip_name in zip_files: url = 'http://mocap.cs.cmu.edu/' + zip_name zip_file = join(root, zip_name) if not isfile(zip_file): print('\t[downloading] ', url) download.download(url, zip_file) print('\t[unzipping] ', zip_file) unzip.unzip(zip_file, root) self.subjects = sorted(listdir(subject_folder)) self.subject_folder = subject_folder self.z_is_up = z_is_up self.store_binary = store_binary
def __init__(self, root, username, pw): """ :param root: :param username: to access the data a username and :param pw: password are required """ data_root = join(root, 'celltrackingchallenge') self.data_root = data_root test_root = join(data_root, 'test') train_root = join(data_root, 'train') if not isdir(test_root): makedirs(test_root) if not isdir(train_root): makedirs(train_root) Train_urls = CellTrackingChallenge2D.get_training_dataset_urls() Test_urls = CellTrackingChallenge2D.get_test_dataset_urls() for url_train, url_test in zip(Train_urls, Test_urls): fname = utils.get_filename_from_url(url_test) assert fname == utils.get_filename_from_url(url_train) fzip_train = join(train_root, fname) fzip_test = join(test_root, fname) if not isfile(fzip_train): utils.talk( "could not find train file:" + fname + ", .. downloading", True) download.download_with_login(url_train, train_root, username, pw) if not isfile(fzip_test): utils.talk( "could not find test file:" + fname + ", .. downloading", True) download.download_with_login(url_test, test_root, username, pw) data_name = fname[:-4] cur_train_loc = join(train_root, data_name) cur_test_loc = join(test_root, data_name) if not isdir(cur_train_loc): utils.talk( "Could not find folder " + cur_train_loc + ', .. unzip', True) unzip.unzip(fzip_train, train_root) if not isdir(cur_test_loc): utils.talk( "Could not find folder " + cur_test_loc + ', .. unzip', True) unzip.unzip(fzip_test, test_root)
def __init__(self, root): """ files :param root: """ assert isdir(root) data_root = join(root, 'CAD_120') if not isdir(data_root): makedirs(data_root) self.data_root = data_root self.actions = sorted([ 'arranging_objects', 'cleaning_objects', 'having_meal', 'making_cereal', 'microwaving_food', 'picking_objects', 'stacking_objects', 'taking_food', 'taking_medicine', 'unstacking_objects' ]) base_url = 'http://pr.cs.cornell.edu/humanactivities/data/' self.subjects = [1, 3, 4, 5] # map skeleton representation to joint 3d locs only self.items = [0] for i in range(11, 155, 14): for j in range(0, 4): self.items.append(i + j) self.items = np.array(self.items + list((range(155, 171)))) # map our reduced joint + conf to actual 3d data items3d = [] for i in range(1, 61, 4): items3d += [i + j for j in range(3)] self.items3d = np.array(items3d) for pid in self.subjects: dir_name = 'Subject%01d_annotations' % pid dir_loc = join(data_root, dir_name) if not isdir(dir_loc): zip_loc = join(data_root, dir_name + '.tar.gz') if not isfile(zip_loc): print('download ' + dir_name) url = base_url + dir_name + '.tar.gz' download.download(url, zip_loc) # unzip folder print('unzip ', zip_loc) unzip.unzip(zip_loc, dir_loc)
def download_and_unzip(self, url, zipfile_name=None, dest_folder=None, dest_force=True, root_folder=None): """ Downloads and unzips a zipped data file """ if dest_folder is None: dest = join(self.root, self.name) else: dest = join(self.root, dest_folder) if dest_force: export_folder = dest else: self.root_export if root_folder is None: root = self.root else: root = join(self.root, root_folder) if not exists(root): makedirs(root) if not exists(dest): utils.talk("could not find folder " + dest + "...", self.verbose) if zipfile_name is None: fzip = join(root, self.name + ".zip") else: fzip = join(root, zipfile_name) if isfile(fzip): utils.talk('found ' + fzip, self.verbose) else: utils.talk("could not find file " + fzip, self.verbose) utils.talk("download from " + url, self.verbose) with urllib.request.urlopen(url) as res, open(fzip, 'wb') as f: utils.talk(url + " downloaded..", self.verbose) shutil.copyfileobj(res, f) unzip.unzip(fzip, export_folder, self.verbose, self.force_unzip_with_os_tools) else: utils.talk(dest + ' found :)', self.verbose)
def __init__(self, root): """ ctor :param root: location of the downloaded data """ self.root = root for f in Nuclei.get_required_zips(): fzip = join(root, f) assert isfile( fzip), '.zip file ' + fzip + ' must be downloaded by hand' real_file = f[0:-4] # get rid of .zip if real_file.endswith('.csv'): already_unzipped = isfile(join(root, real_file)) else: already_unzipped = isdir(join(root, real_file)) if not already_unzipped: if real_file.endswith('.csv'): unzip.unzip(fzip, root) else: # unzip into sub-folder unzip.unzip(fzip, join(root, real_file))
def __init__(self, root, username, password, verbose=True): """ """ utils.talk("UMPM", verbose) data_root = join(root, 'umpm') self.data_root = data_root root_url = 'http://umpm-mirror.cs.uu.nl/download/' if not isdir(data_root): makedirs(data_root) calib_zip = join(data_root, 'umpm_camcalib1.zip') if not isfile(calib_zip): calib_url = 'http://umpm-mirror.cs.uu.nl/download/umpm_camcalib1.zip' download.download_with_login(calib_url, data_root, username, password) assert isfile(calib_zip) calib_dir = join(data_root, 'Calib') if not isdir(calib_dir): unzip.unzip(calib_zip, data_root) assert isdir(calib_dir) for file in UMPM.get_file_list(): cur_loc = join(data_root, file) fzip = join(cur_loc, file + ".zip") cur_url = root_url + file + '.zip' # fc3d_gt = join(data_root, file + '.c3d') # cur_url_gt = root_url_gt + file + '.c3d' if not isdir(cur_loc): utils.talk("could not find location " + file, verbose) if not isfile(fzip): utils.talk("could not find file " + file + '.zip', verbose) download.download_with_login(cur_url, cur_loc, username, password) if not isdir(join(cur_loc, 'Groundtruth')): # is not unzipped utils.talk("unzipping " + fzip, verbose) unzip.unzip(fzip, cur_loc, del_after_unzip=True) video_loc = join(cur_loc, 'Video') lzma_videos = [ join(video_loc, f) for f in listdir(video_loc) if f.endswith('.xz') ] for lzma_video in lzma_videos: utils.talk('unzipping video ' + lzma_video, verbose) unzip.unzip(lzma_video, video_loc, del_after_unzip=True)
def get(data_root, frame): """ :param data_root: :param frame: starting at frame 0 :return: """ seq_zipname = 'player2sequence1.zip' seq_dir = 'Sequence 1' player = 2 root = join(data_root, 'football2') root = join(root, 'player' + str(player)) if not isdir(root): makedirs(root) seq_url = 'http://www.csc.kth.se/cvap/cvg/MultiViewFootballData/' + seq_zipname seq_dir = join(root, seq_dir) if not isdir(seq_dir): seq_zip = join(root, seq_zipname) if not isfile(seq_zip): print('downloading... ', seq_url) download(seq_url, seq_zip) print('unzipping... ', seq_zip) unzip(seq_zip, root) pos2d_file = join(seq_dir, 'positions2d.txt') pos2d = np.loadtxt(pos2d_file) N = 14 # number joints C = 3 # number cameras T = len(pos2d) / 2 / N / C assert floor(T) == ceil(T) T = int(T) pos2d_result = np.zeros((2, N, C, T)) counter = 0 for t in range(T): for c in range(C): for n in range(N): for i in range(2): pos2d_result[i, n, c, t] = pos2d[counter] counter += 1 pos2d = pos2d_result # ~~~ pos3d ~~~ pos3d_file = join(seq_dir, 'positions3d.txt') assert isfile(pos3d_file) pos3d = np.loadtxt(pos3d_file) pos3d_result = np.zeros((3, N, T)) assert T == int(len(pos3d) / 3 / N) counter = 0 for t in range(T): for n in range(N): for i in range(3): pos3d_result[i, n, t] = pos3d[counter] counter += 1 pos3d = pos3d_result # ~~~ Cameras ~~~ cam_file = join(seq_dir, 'cameras.txt') assert isfile(cam_file) cams = np.loadtxt(cam_file) cameras = np.zeros((2, 4, C, T)) assert T == int(len(cams) / 2 / 4 / C) counter = 0 for t in range(T): for c in range(C): for j in range(4): for i in range(2): cameras[i, j, c, t] = cams[counter] counter += 1 Im = [] h = -1; w = -1 for cam in ['Camera 1', 'Camera 2', 'Camera 3']: im_dir = join(seq_dir, cam) assert isdir(im_dir) im_name = join(im_dir, "%05d.png" % (frame+1)) assert isfile(im_name) im = cv2.cvtColor(cv2.imread(im_name), cv2.COLOR_BGR2RGB) Im.append(im) if w == -1 or h == -1: assert h == -1 and w == -1 h, w, _ = im.shape else: h_, w_, _ = im.shape assert h_ == h and w_ == w Im = np.array(Im) Calib = [] for cid in [0, 1, 2]: cam = np.zeros((3, 4)) cam[0:2, :] = cameras[:, :, cid, frame] cam[2,3] = 1 Calib.append(AffineCamera(cam, w, h)) # h x w x cam Pts2d = [] for cid in [0, 1, 2]: d2d = pos2d[:,:,cid, frame] Pts2d.append(d2d) d3d = pos3d[:, :, frame] return Im, np.transpose(d3d), Calib
def __init__(self, root, verbose=True): """ :param root: :param verbose: """ if verbose: print('\n**Shelf dataset**') data_root = join(root, 'tum_shelf') if not isdir(data_root): makedirs(data_root) self.verbose = verbose self.data_root = data_root # download data url = 'http://campar.cs.tum.edu/files/belagian/multihuman/Shelf.tar.bz2' data_folder = join(data_root, 'Shelf') if not isdir(data_folder): zip_filename = join(data_root, 'Shelf.tar.bz2') if not isfile(zip_filename): if verbose: print('\tdownload ' + url) download.download(url, zip_filename) if verbose: print('\nunzip ' + zip_filename) unzip.unzip(zip_filename, data_root, verbose) if verbose: print('\n') # load Calibration data seq_root = join(data_root, 'Shelf') self.seq_root = seq_root calibration_dir = join(seq_root, 'Calibration') assert isdir(calibration_dir) self.Calib = [] for cam in ['P0.txt', 'P1.txt', 'P2.txt', 'P3.txt', 'P4.txt']: fname = join(calibration_dir, cam) assert isfile(fname) P = np.loadtxt(fname, delimiter=',') self.Calib.append(P) # GT binary file actorsGTmat = join(seq_root, 'actorsGT.mat') assert isfile(actorsGTmat) M = loadmat(actorsGTmat) Actor3d = M['actor3D'][0] persons = [] for pid in range(4): pts = [] Person = Actor3d[pid] n = len(Person) for frame in range(n): pose = Person[frame][0] if len(pose) == 1: pts.append(None) elif len(pose) == 14: pts.append(pose) else: raise ValueError("Weird pose length:" + str(pose)) persons.append(pts) self.Y = persons
def __init__(self, root, verbose=True): """ :param root: :param verbose: """ EXCLUDE_VID = {'1516'} # this videos are 'broken' if verbose: print('\n**PennAction [cropped]**') data_root = join(root, 'pennaction_cropped') if not isdir(data_root): makedirs(data_root) url = 'http://188.138.127.15:81/Datasets/penn-crop.zip' data_folder = join(data_root, 'penn-crop') if not isdir(data_folder): zip_filename = join(data_root, 'penn-crop.zip') if not isfile(zip_filename): if verbose: print('\tdownload ', url) download.download(url, zip_filename) if verbose: print('\tunzip ', zip_filename) unzip.unzip(zip_filename, data_root, verbose=verbose) self.data_folder = data_folder if verbose: print('') self.frames_folder = join(data_folder, 'frames') labels_folder = join(data_folder, 'labels') self.labels_folder = labels_folder assert isdir(labels_folder) ids = [name[0:4] for name in sorted(listdir(labels_folder))] self.ids = ids # split train/val validation_indices_file = join(data_folder, 'valid_ind.txt') assert isfile(validation_indices_file) validation_indices = np.loadtxt(validation_indices_file) validation_indices = ['%04d' % idx for idx in validation_indices] lookup = set(validation_indices) self.train_ids = [] self.val_ids = [] for vid in ids: if vid not in lookup and vid not in EXCLUDE_VID: self.train_ids.append(vid) elif vid not in EXCLUDE_VID: self.val_ids.append(vid) # find the meta-data for each video id self.meta = dict() for vid in ids: vid_labels_file = join(labels_folder, vid + '.mat') L = loadmat(vid_labels_file) n_frames = L['nframes'] dimensions = L['dimensions'] X = np.expand_dims(L['x'], axis=2) Y = np.expand_dims(L['y'], axis=2) V = np.expand_dims(L['visibility'], axis=2) gt = np.concatenate([X, Y, V], axis=2) self.meta[vid] = { 'n_frames': n_frames[0][0], 'dimensions': np.squeeze(dimensions), 'gt': gt }