def start(dset=None, frame_num=0): main.initialize() if not FOR_REAL: if dset is None: dataset.load_random_dataset() else: dataset.load_dataset(dset) while dataset.frame_num < frame_num: dataset.advance() name = dset name = os.path.split(name)[1] custom = os.path.join('data/sets/', name, 'gt.txt') if os.path.exists(custom): # Try dataset directory first fname = custom else: import re # Fall back on generic ground truth file match = re.match('.*_z(\d)m_(.*)', name) number = int(match.groups()[0]) fname = 'data/experiments/gt/gt%d.txt' % number with open(fname) as f: GT = grid.gt2grid(f.read()) grid.initialize_with_groundtruth(GT) else: config.load('data/newest_calibration') opennpy.align_depth_to_rgb() dataset.setup_opencl()
def run_grid(): datasets = glob.glob('data/sets/study_*') try: shutil.rmtree(out_path) except: pass os.mkdir(out_path) for name in datasets: #for name in ('data/sets/cube',): dataset.load_dataset(name) name = os.path.split(name)[1] d = dict(name=name) folder = os.path.join(out_path, name) os.mkdir(folder) global modelmat modelmat = None main.initialize() import re number = int(re.match('.*_z(\d)m_.*', name).groups()[0]) with open('data/experiments/gt/gt%d.txt' % number) as f: GT = grid.gt2grid(f.read()) grid.initialize_with_groundtruth(GT) total = 0 output = [] try: while 1: try: dataset.advance() except (IOError, ValueError): break if dataset.frame_num % 30 == 0: print name, dataset.frame_num t1 = time.time() once() t2 = time.time() total += t2 - t1 output.append((main.R_correct.copy(), grid.occ.copy())) except Exception as e: print e d['frames'] = dataset.frame_num d['time'] = total d['output'] = output with open(os.path.join(folder, 'output.pkl'), 'w') as f: pickle.dump(d, f) with open(os.path.join(folder, 'final_output.txt'), 'w') as f: f.write(grid.grid2gt(grid.occ))
def run_grid(): datasets = glob.glob('data/sets/study_*') try: shutil.rmtree(out_path) except: pass os.mkdir(out_path) for name in datasets: #for name in ('data/sets/cube',): dataset.load_dataset(name) name = os.path.split(name)[1] d = dict(name=name) folder = os.path.join(out_path, name) os.mkdir(folder) global modelmat modelmat = None main.initialize() import re number = int(re.match('.*_z(\d)m_.*', name).groups()[0]) with open('data/experiments/gt/gt%d.txt' % number) as f: GT = grid.gt2grid(f.read()) grid.initialize_with_groundtruth(GT) total = 0 output = [] try: while 1: try: dataset.advance() except (IOError, ValueError): break if dataset.frame_num % 30 == 0: print name, dataset.frame_num t1 = time.time() once() t2 = time.time() total += t2-t1 output.append((main.R_correct.copy(), grid.occ.copy())) except Exception as e: print e d['frames'] = dataset.frame_num d['time'] = total d['output'] = output with open(os.path.join(folder, 'output.pkl'),'w') as f: pickle.dump(d, f) with open(os.path.join(folder, 'final_output.txt'),'w') as f: f.write(grid.grid2gt(grid.occ))
def run_grid(): datasets = glob.glob('data/sets/study_*') try: os.mkdir(out_path) except OSError: print "Couldn't make create output directory [%s], it may already exist." % out_path print "Remove it and try again." return False for name in datasets: dataset.load_dataset(name) name = os.path.split(name)[1] d = dict(name=name) folder = os.path.join(out_path, name) os.mkdir(folder) main.initialize() import re number = int(re.match('.*_z(\d)m_.*', name).groups()[0]) with open('data/experiments/gt/gt%d.txt' % number) as f: GT = grid.gt2grid(f.read()) grid.initialize_with_groundtruth(GT) total = 0 output = [] try: while 1: try: dataset.advance() except (IOError, ValueError): break if dataset.frame_num % 30 == 0: print name, dataset.frame_num t1 = time.time() once() t2 = time.time() total += t2-t1 output.append((main.R_correct.copy(), grid.occ.copy())) except Exception as e: print e d['frames'] = dataset.frame_num d['time'] = total d['output'] = output with open(os.path.join(folder, 'output.pkl'),'w') as f: pickle.dump(d, f) with open(os.path.join(folder, 'final_output.txt'),'w') as f: f.write(grid.grid2gt(grid.occ))
def start(dset=None, frame_num=0): main.initialize() if not FOR_REAL: if dset is None: dataset.load_random_dataset() else: dataset.load_dataset(dset) while dataset.frame_num < frame_num: dataset.advance() else: config.load('data/newest_calibration') opennpy.align_depth_to_rgb() dataset.setup_opencl()
def block_setup(self): """Initialize blockplayer stuff""" glxcontext.makecurrent() main.initialize() config.load('data/newest_calibration') opennpy.align_depth_to_rgb() dataset.setup_opencl() self.blocks = None self.block_loop_quit = False self.block_initialized = True self.block_loop()
def start(dset=None, frame_num=0): main.initialize() #with open('data/experiments/collab/2011.txt') as f: global target_model with open('data/experiments/collab/block.txt') as f: target_model = grid.gt2grid(f.read()) #grid.initialize_with_groundtruth(GT) if not FOR_REAL: if dset is None: dataset.load_random_dataset() else: dataset.load_dataset(dset) while dataset.frame_num < frame_num: dataset.advance() else: config.load('data/newest_calibration') opennpy.align_depth_to_rgb() dataset.setup_opencl()
def start(): main.initialize() config.load('data/newest_calibration') opennpy.align_depth_to_rgb() dataset.setup_opencl()