def create_LZ4Dict_wait_for_3s_then_close(): #created and locked LZ4Dict for 3s pkl1 = CompressedStorage(test_p, read_only=False) pkl1[1] = np.ones((5, 5)) time.sleep(3) pkl1.push()
def helper_func_sd(paths): num_locs = [] for p in paths: loc_dc = CompressedStorage(p + '/locations.pkl', read_only=True, disable_locking=True) sample_locs = [np.concatenate(sl) for sl in loc_dc.values()] num_locs += [len(sl) for sl in sample_locs] return num_locs
def create_success_expected(a, b, q3): time.sleep(0) start = time.time() try: pkl2 = CompressedStorage(test_p, read_only=True, timeout=1) logging.info('PASSED: create_success_expected') q3.put(0) except RuntimeError as e: logging.warning('FAILED: create_success_expected') q3.put(1)
def create_fail_expected_runtime_error_at_2s(a, b, q2): # logging.debug("Started worker to access file for 2s." time.sleep(0) start = time.time() try: pkl2 = CompressedStorage(test_p, read_only=True, timeout=2) logging.warning('FAILED: create_fail_expected_runtime_error_at_2s') q2.put(1) except RuntimeError as e: logging.info('PASSED: create_fail_expected_runtime_error_at_2s') q2.put(0)
def create_fail_expected_runtime_error_at_1s(a, b, q1): # logging.debug("Started worker to access file for 1s" time.sleep(0) start = time.time() try: pkl2 = CompressedStorage( test_p, read_only=True, timeout=1, max_delay=1 ) # timeout sets the maximum time before failing, not max_delay logging.warning('FAILED: create_fail_expected_runtime_error_at_1s') q1.put(1) except RuntimeError as e: logging.info('PASSED: create_fail_expected_runtime_error_at_1s') q1.put(0)
working_dir = so_kwargs['working_dir'] global_params.wd = working_dir # TODO: preprocess meshes in case they dont exist and then load mesh dict next to the attribute dict for p in so_chunk_paths: # get SV IDs stored in this storage attr_dc_p = p + "/attr_dict.pkl" mesh_dc_p = p + "/mesh.pkl" ad = AttributeDict(attr_dc_p, disable_locking=True) md = MeshStorage(mesh_dc_p, disable_locking=True) svixs = list(ad.keys()) # replace SO objects by using rep_coord and IDs stored in the attribute dict sd = sos_dict_fact(svixs, **so_kwargs) sos = init_sos(sd) # compute locations and use already processed meshes loc_dc_p = p + "/locations.pkl" loc_dc = CompressedStorage(loc_dc_p, disable_locking=True) for so in sos: try: ix = so.id if not ix in md.keys(): verts = so.mesh[1].reshape(-1, 3) else: verts = md[ix][1].reshape(-1, 3) if len(verts) == 0: coords = np.array([ so.rep_coord, ], dtype=np.float32) else: if global_params.config.use_new_renderings_locs: coords = generate_rendering_locs(verts, 2000).astype(np.float32)
working_dir = so_kwargs['working_dir'] global_params.wd = working_dir # adapt working dir woglia = pred_kwargs["woglia"] del pred_kwargs["woglia"] pred_key = pred_kwargs["pred_key"] if 'raw_only' in pred_kwargs: raw_only = pred_kwargs['raw_only'] del pred_kwargs['raw_only'] else: raw_only = False model = NeuralNetworkInterface(**model_kwargs) for p in so_chunk_paths: view_dc_p = p + "/views_woglia.pkl" if woglia else p + "/views.pkl" view_dc = CompressedStorage(view_dc_p, disable_locking=True) svixs = list(view_dc.keys()) if len(svixs) == 0: continue views = list(view_dc.values()) if raw_only and views[0].shape[1] != 1: for ii in range(len(views)): views[ii] = views[ii][:, 1] sd = sos_dict_fact(svixs, **so_kwargs) sos = init_sos(sd) probas = predict_views(model, views, sos, return_proba=True, **pred_kwargs) attr_dc_p = p + "/attr_dict.pkl" ad = AttributeDict(attr_dc_p, disable_locking=True) for ii in range(len(sos)): ad[sos[ii].id][pred_key] = probas[ii] ad.push()