def run(self): while not self.kill_received: # get a task try: job = self.work_queue.get_nowait() except Queue.Empty: break start_time = time.time(); model_dir=job.model_dir; model_name =job.model_name; grey_offset = job.grey_offset; print("Model dir:") print model_dir print("Model Name:") print model_name print("Creating a Scene"); boxm_batch.init_process("boxmCreateSceneProcess"); boxm_batch.set_input_string(0, model_dir + "/" + model_name + ".xml"); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene= dbvalue(scene_id, scene_type); print("Splitting the scene"); boxm_batch.init_process("boxmSplitSceneProcess"); boxm_batch.set_input_from_db(0, scene); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); apm_scene = dbvalue(scene_id, scene_type); (scene_id, scene_type) = boxm_batch.commit_output(1); alpha_scene = dbvalue(scene_id, scene_type); print("Save Scene"); boxm_batch.init_process("boxmSaveScene RawProcess"); boxm_batch.set_input_from_db(0,alpha_scene); boxm_batch.set_input_string(1,model_dir + "/drishti/alpha_scene"); boxm_batch.set_input_unsigned(2,0); boxm_batch.set_input_unsigned(3,1); boxm_batch.run_process(); #free memory boxm_batch.clear(); print ("Runing time for worker:", self.name) print(time.time() - start_time); #output exit code in this case #important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting self.result_queue.put(0);
def run(self): while not self.kill_received: # get a task try: job = self.work_queue.get_nowait() except Queue.Empty: break start_time = time.time(); model_dir=job.model_dir; ply_file =job.ply_file; grey_offset = job.grey_offset; boxm_batch.set_stdout('logs/log_' + str(os.getpid())+ ".txt"); boxm_batch.init_process("boxmCreateSceneProcess"); boxm_batch.set_input_string(0, model_dir +"/pmvs_scene.xml"); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene= dbvalue(scene_id, scene_type); boxm_batch.init_process("boxm_create_scene_from_ply_process"); boxm_batch.set_input_string(0,ply_file); boxm_batch.set_input_from_db(1,scene); boxm_batch.set_input_float(2,grey_offset); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene = dbvalue(scene_id, scene_type); print("Save Scene"); boxm_batch.init_process("boxmSaveSceneRawProcess"); boxm_batch.set_input_from_db(0,scene); boxm_batch.set_input_string(1,model_dir + "/drishti/ply_scene"); boxm_batch.set_input_unsigned(2,0); boxm_batch.set_input_unsigned(3,1); boxm_batch.run_process(); #free memory boxm_batch.reset_stdout(); boxm_batch.clear(); print ("Runing time for worker:", self.name) print(time.time() - start_time); #output exit code in this case #important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting self.result_queue.put(0);
def run(self): while not self.kill_received: # get a task try: job = self.work_queue.get_nowait() except Queue.Empty: break start_time = time.time() print("Creating a Scene") boxm_batch.init_process("boxmCreateSceneProcess") boxm_batch.set_input_string(0, job.input_scene_path) boxm_batch.run_process() (scene_id, scene_type) = boxm_batch.commit_output(0) scene = dbvalue(scene_id, scene_type) print("Save Scene") boxm_batch.init_process("boxmSaveOccupancyRawProcess") boxm_batch.set_input_from_db(0, scene) boxm_batch.set_input_string(1, job.output_scene_path) boxm_batch.set_input_unsigned(2, 0) boxm_batch.set_input_unsigned(3, 1) boxm_batch.run_process() print("Runing time for worker:", self.name) print(time.time() - start_time)
def runprobe(event): posx = event.x posy = event.y array2d = list() plt.figure(1) plt.clf() print ("Run ray tracing") boxm_batch.init_process("boxmOclRunRenderProbeProcess") boxm_batch.set_input_from_db(0, scene_mgr) boxm_batch.set_input_from_db(1, cam) boxm_batch.set_input_unsigned(2, posx) boxm_batch.set_input_unsigned(3, posy) boxm_batch.set_input_float(4, (image2.getpixel((posx, posy))) / 255.0) boxm_batch.run_process() for i in range(0, 10): (scene_id, scene_type) = boxm_batch.commit_output(i) array_1d = dbvalue(scene_id, scene_type) vallist = boxm_batch.get_bbas_1d_array_float(scene_id) array2d.append(vallist) for i in [1, 2, 3, 5]: plt.plot(array2d[0], array2d[i]) # plt.plot(array2d[0],array2d[7]); plt.legend(("Omega", "Mean0", "Alpha", "Mean1"), loc="upper left") print (image2.getpixel((posx, posy))) / 255.0 plt.show()
def neighborchange(event): posx = event.x posy = event.y array2d = list() vallist = list() plt.figure(1) plt.clf() for i in (-1, 0): for j in (-1, 0): print ("Run ray tracing") boxm_batch.init_process("boxmOclRunRenderProbeProcess") boxm_batch.set_input_from_db(0, scene_mgr) boxm_batch.set_input_from_db(1, cam) boxm_batch.set_input_unsigned(2, posx + i) boxm_batch.set_input_unsigned(3, posy + j) boxm_batch.set_input_float(4, (image2.getpixel((posx, posy))) / 255.0) boxm_batch.run_process() (scene_id, scene_type) = boxm_batch.commit_output(10) x = boxm_batch.get_input_float(scene_id) vallist.append(x) print vallist
class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string dir = "/Users/isa/Experiments/super3d/scene/expectedImgs_2" test_frames = [8, 112, 96, 208] for frame in test_frames: boxm_batch.init_process("vilLoadImageViewProcess") boxm_batch.set_input_string(0, dir + "/predicted_img_mask_%(#)05d.tiff" % {"#": frame}) boxm_batch.run_process() (id, type) = boxm_batch.commit_output(0) vis_img = dbvalue(id, type) boxm_batch.init_process("vilThresholdImageProcess") boxm_batch.set_input_from_db(0, vis_img) boxm_batch.set_input_float(1, 0.99) boxm_batch.set_input_bool(2, True) boxm_batch.run_process() (id, type) = boxm_batch.commit_output(0) mask_img = dbvalue(id, type) boxm_batch.init_process("vilSaveImageViewProcess") boxm_batch.set_input_from_db(0, mask_img) boxm_batch.set_input_string(1, dir + "/binary_mask_%(#)05d.tiff" % {"#": frame}) boxm_batch.run_process()
import boxm_batch; boxm_batch.register_processes(); boxm_batch.register_datatypes(); class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string model_dir="/Users/isa/Experiments/DowntownBOXM_12_12_4"; print("Creating a Scene"); boxm_batch.init_process("boxmCreateSceneProcess"); boxm_batch.set_input_string(0, model_dir +"/mean_color_scene.xml"); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene= dbvalue(scene_id, scene_type); print("Save Scene"); boxm_batch.init_process("boxmSaveSceneRawProcess"); boxm_batch.set_input_from_db(0,scene); boxm_batch.set_input_string(1, model_dir + "/raw_mean_scene"); boxm_batch.set_input_unsigned(2,0); boxm_batch.set_input_unsigned(3,1); boxm_batch.run_process();
import boxm_batch; boxm_batch.register_processes(); boxm_batch.register_datatypes(); class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string print("Loading Scene"); boxm_batch.init_process("boxmLoadSceneProcess"); boxm_batch.set_input_string(0,"D:\\vj\\data\\CapitolSiteHigh\\boxm\\scene.xml"); boxm_batch.set_input_string(1,"apm_mog_grey"); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene = dbvalue(scene_id, scene_type); print("Loading camera"); boxm_batch.init_process("vpglLoadPerspectiveCameraProcess"); boxm_batch.set_input_string(0,"camera_00116.txt"); boxm_batch.run_process(); (cam_id,cam_type)=boxm_batch.commit_output(0); camera=dbvalue(cam_id, cam_type); print("Rendering Image"); boxm_batch.init_process("boxmRenderExpectedProcess"); boxm_batch.set_input_from_db(0,scene); boxm_batch.set_input_from_db(1,camera); boxm_batch.set_input_unsigned(2,1280); boxm_batch.set_input_unsigned(3,720); boxm_batch.run_process();
keys_available = 0 # if keys have already been extracted, just load them # after finding F between a pair, all matches that are off by 0.6% of # max(image_width, image_height) pixels are considered outliers outlier_threshold_percentage = 0.6 # for an image pair to be connected in the image connectivity graph min_number_of_matches = 16 imgs = [] sizes = [] for i in range(0, img_cnt, 1): print("Loading Image") boxm_batch.init_process("vilLoadImageViewProcess") boxm_batch.set_input_string(0, img_path + img_name % i) boxm_batch.run_process() (id, type) = boxm_batch.commit_output(0) image = dbvalue(id, type) imgs.append(image) boxm_batch.init_process("vilImageSizeProcess") boxm_batch.set_input_from_db(0, image) boxm_batch.run_process() (ni_id, type) = boxm_batch.commit_output(0) (nj_id, type) = boxm_batch.commit_output(1) ni = boxm_batch.get_input_unsigned(ni_id) nj = boxm_batch.get_input_unsigned(nj_id) if ni > nj: sizes.append(ni) else: sizes.append(nj)
boxm_batch.register_datatypes() class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string print("Loading Scene") boxm_batch.init_process("boxmLoadSceneProcess") boxm_batch.set_input_string(0, "D:\\vj\\data\\CapitolSiteHigh\\boxm\\scene.xml") boxm_batch.set_input_string(1, "apm_mog_grey") boxm_batch.run_process() (scene_id, scene_type) = boxm_batch.commit_output(0) scene = dbvalue(scene_id, scene_type) print("Loading camera") boxm_batch.init_process("vpglLoadPerspectiveCameraProcess") boxm_batch.set_input_string(0, "camera_00116.txt") boxm_batch.run_process() (cam_id, cam_type) = boxm_batch.commit_output(0) camera = dbvalue(cam_id, cam_type) print("Rendering Image") boxm_batch.init_process("boxmRenderExpectedProcess") boxm_batch.set_input_from_db(0, scene) boxm_batch.set_input_from_db(1, camera) boxm_batch.set_input_unsigned(2, 1280) boxm_batch.set_input_unsigned(3, 720)
import boxm_batch; boxm_batch.register_processes(); boxm_batch.register_datatypes(); class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string print("Creating a Scene"); boxm_batch.init_process("boxmCreateSceneProcess"); boxm_batch.set_params_process("createScene.xml"); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene = dbvalue(scene_id, scene_type); camera_fnames = "C:/test_images/octree/CapitolSiteHigh/cameras_KRT/camera_%05d.txt"; image_fnames = "C:/test_images/octree/CapitolSiteHigh/video/frame_%05d.png"; import random min_range = 10; last_i = -min_range; nframes = 145; for x in range(125,nframes,1): print("*************************************************************************************"); print x; i = random.randint(0,254); # try, try again if this frame is too close to the last while (abs(i - last_i) < min_range):