def runTest(self): fafb = Fafb() p = (217400, 164242, 438817) # z,y,x convention crop_size = (16, 160, 160) p_crop = fafb.get_crops([p], crop_size) normalized_p_crop = fafb.normalize(p_crop) self.assertTrue( np.all(np.shape(normalized_p_crop) == np.shape(p_crop))) self.assertTrue(np.min(normalized_p_crop) >= -1) self.assertTrue(np.max(normalized_p_crop) <= 1)
def runTest(self): fafb = Fafb() p = (217400, 164242, 438817) # z,y,x convention crop_size = (16, 160, 160) p_crop = fafb.get_crops([p], crop_size) self.assertTrue(np.max(p_crop) <= 255) self.assertTrue(np.min(p_crop) >= 0) self.assertTrue(np.all(crop_size == np.shape(p_crop)[1:])) self.assertTrue(1 == np.shape(p_crop)[0]) self.assertTrue(4 == len(np.shape(p_crop)))
def runTest(self): fafb = Fafb() self.assertTrue(fafb.name == "FAFB") self.assertTrue(fafb.container == "/nrs/saalfeld/FAFB00/v14_align_tps_20170818_dmg.n5") self.assertTrue(fafb.dataset == "volumes/raw/s0") self.assertTrue(np.all(fafb.voxel_size == np.array([40, 4, 4])))
def __init__(self): dataset = Fafb() service = Flywire( dataset=dataset, api_url="https://spine.janelia.org/app/transform-service", api_dataset="flywire_v1", ) super().__init__(dataset=dataset, service=service) self.catmaid = FafbCatmaid()
execute=False, expand=True) else: cmd = base_cmd cmd = [c for c in cmd.split(" ") if c != ''] cmd_string = "" for c in cmd: cmd_string += str(c) + " " print(cmd_string) Popen(cmd_string, shell=True) if __name__ == "__main__": log_config("brain.log") submit_jobs( db_credentials= "/groups/funke/home/ecksteinn/Projects/synex/synisterbrain/db_credentials.ini", db_name="synful_synapses", collection_name="partners", predict_id=3, dataset=Fafb(), model=FafbModel(), n_gpus=2, n_cpus=5, batch_size=16, prefetch_factor=20, queue="gpu_any", singularity_container=None, mount_dirs=["/nrs", "/scratch", "/groups", "/misc"])
def __init__(self): dataset = Fafb() service = Catmaid( dataset=dataset, api_url="https://neuropil.janelia.org/tracing/fafb/v14/") super().__init__(dataset=dataset, service=service)
array_data = None if self.data.roi.contains(roi): array = self.data[roi] array.materialize() array_data = array.data.astype(np.float32) array_data = self.dataset.normalize(array_data) if self.transform is not None and array_data is not None: array_data = self.transform(array_data) return {"id": synapse_id, "data": array_data} if __name__ == "__main__": mongo_em = MongoIterator("/groups/funke/home/ecksteinn/Projects/synex/synister/db_credentials.ini", "synful_synapses", "partners", 3, Fafb(), 400, 400, 80, 1,0,1,0) print(Fafb().voxel_size) i = 0 for doc in mongo_em: print(doc) i += 1 if i > 2: break
self.db_name, self.collection_name, self.predict_id, self.dataset, self.dx, self.dy, self.dz, self.n_gpus, self.gpu_id, n_cpus=n_cpus, cpu_id=cpu_id, transform=self.transform_to_tensor) def transform_to_tensor(self, data_array): tensor_array = torch.tensor(data_array) # Add channel and batch dim: tensor_array = tensor_array.unsqueeze(0).unsqueeze(0) return tensor_array if __name__ == "__main__": mongo_em = MongoEM( "/groups/funke/home/ecksteinn/Projects/synex/synister/db_credentials.ini", "synful_synapses", "partners", 3, Fafb(), 400, 400, 80, 2, 1) i = 0 for doc in mongo_em: print(doc) i += 1 if i > 2: break