def __init__(self, mcc, experiment_id, directory, brain_seg_data_dir, parent_struct_id,
              experiment_fields_to_save, details, logger, default_struct_id=997):
     self.experiment_fields_to_save = experiment_fields_to_save
     self.default_struct_id = default_struct_id
     self.parent_struct_id = parent_struct_id
     self.brain_seg_data_dir = brain_seg_data_dir
     self.directory = directory
     self.mcc = mcc
     self.id = experiment_id
     mapi = MouseConnectivityApi()
     while True:
         try:
             self.details = {**details, **(mapi.get_experiment_detail(self.id)[0])}
             break
         except simplejson.errors.JSONDecodeError or urllib.error.URLError or urllib.error.URLError:
             time.sleep(1.0)
     self.logger = logger
     self.subimages = {i['section_number']: i for i in self.details['sub_images']}
     self.seg_data = np.load(f'{self.brain_seg_data_dir}/{self.id}/{self.id}-sections.npz')['arr_0']
     self.structure_tree = self.mcc.get_structure_tree()
     self.structure_ids = self.get_requested_structure_children()
     with open(f'{self.directory}/bboxes.pickle', "rb") as f:
         bboxes = pickle.load(f)
     self.bboxes = {k: v for k, v in bboxes.items() if v}
Exemple #2
0
class ExperimentSectionData(object):
    def __init__(self,
                 mcc,
                 experiment_id,
                 output_dir,
                 anno,
                 meta,
                 rsp,
                 logger,
                 zoom=8,
                 remove_transform_data=True):
        self.remove_transform_data = remove_transform_data
        self.output_dir = output_dir
        os.makedirs(output_dir, exist_ok=True)
        self.mcc = mcc
        self.mapi = MouseConnectivityApi()
        self.anno, self.meta = anno, meta
        self.rsp = rsp
        self.zoom = 8 - zoom
        self.id = experiment_id
        assert zoom >= 0
        self.details = self.mapi.get_experiment_detail(self.id)
        image_resolution = self.details[0]['sub_images'][0]['resolution']
        self.two_d = 1.0 / image_resolution
        self.size = self.mcc.resolution * self.two_d / (2**self.zoom)
        self.dims = (self.details[0]['sub_images'][0]['height'] //
                     (2**self.zoom),
                     self.details[0]['sub_images'][0]['width'] //
                     (2**self.zoom))
        self.root_points = np.array(np.where(self.anno != 0)).T
        self.logger = logger
        self.logger.info(
            f"Initializing displacement transform data for {self.id}...")
        self.__init_transform__()
        self.logger.info(
            f"Performing displacement transformation for {self.id}...")
        self.__init_transformed_points__()

    def __init_transform__(self):
        temp = sitk.ReadImage(f'{self.output_dir}/dfmfld.mhd',
                              sitk.sitkVectorFloat64)
        dfmfld_transform = sitk.DisplacementFieldTransform(temp)

        temp = self.mcc.get_affine_parameters(
            self.id,
            direction='trv',
            file_name=f'{self.output_dir}/aff_param.txt')
        aff_trans = sitk.AffineTransform(3)
        aff_trans.SetParameters(temp.flatten())

        self.transform = sitk.Transform(3, sitk.sitkComposite)
        self.transform.AddTransform(aff_trans)
        self.transform.AddTransform(dfmfld_transform)

    def __init_transformed_points__(self):
        self.transformed_points = self.__transform_points__(
            self.transform,
            self.root_points.astype(float) * self.mcc.resolution)
        self.transformed_points[..., :2] *= self.two_d / (2**self.zoom)
        self.transformed_points[..., 2] /= 100
        self.next_points = self.transformed_points.copy()
        self.next_points[..., :2] += self.size
        self.transformed_points = np.round(self.transformed_points).astype(int)
        self.next_points = np.round(self.next_points).astype(int)

    @staticmethod
    def __transform_points__(composite_transform, points):
        return np.array(list(map(composite_transform.TransformPoint, points)))

    def create_section_data(self):
        first_section = np.min(self.transformed_points[..., -1])
        last_section = np.max(self.transformed_points[..., -1])
        result = np.zeros((*self.dims, last_section + 1), dtype=np.int32)

        self.logger.info(f"Transferring segmentation data for {self.id}...")
        transformed_indices = tuple(
            self.transformed_points.squeeze().T.tolist())
        next_indices = tuple(self.next_points.squeeze().T.tolist())
        original_indices = tuple(self.root_points.squeeze().T.tolist())
        result[transformed_indices[1], transformed_indices[0],
               transformed_indices[2]] = self.anno[original_indices]
        result[next_indices[1], next_indices[0],
               next_indices[2]] = self.anno[original_indices]

        structures = np.unique(result).tolist()
        structures = list(set(structures).difference({0}))
        sorted_indx = np.argsort(
            np.array(
                list(
                    map(lambda x: len(x),
                        self.rsp.structure_tree.ancestor_ids(structures)))))
        structures = np.array(structures)[sorted_indx].tolist()

        new_result = np.zeros_like(result)

        self.logger.info(f"Filling holes for {self.id}...")
        for i, struct in enumerate(structures):
            mask = result == struct
            mask = ndi.binary_closing(
                ndi.binary_fill_holes(mask).astype(np.int32)).astype(np.int32)
            new_result[mask != 0] = struct

        self.logger.info(f"Saving segmentation data for {self.id}...")
        np.savez_compressed(f"{self.output_dir}/{self.id}-sections",
                            new_result)

    def cleanup(self):
        if self.remove_transform_data:
            os.remove(f'{self.output_dir}/dfmfld.mhd')
            os.remove(f'{self.output_dir}/dfmfld.raw')
            os.remove(f'{self.output_dir}/aff_param.txt')