Beispiel #1
0
class LineSetUp(unittest.TestCase):
    def setUp(self):
        """
        Here we set up a problem instance where the solution should
        be a straight line spanning multiple cores.
        """

        self.line_points = [np.array([0., 0., 10. + j]) for j in range(100)]
        self.line_orientations = [np.array([0., 0., 1.])] * len(
            self.line_points)
        self.line_start = self.line_points[0]
        self.line_end = self.line_points[-1]

        self.wrong_candidate_positions = [
            np.array([0., 3., 10. + j]) for j in np.random.randint(0, 100, 30)
        ]
        self.wrong_candidate_orientations = [np.array([1.0, 0.0, 0.0])] * len(
            self.wrong_candidate_positions)

        self.candidates = []
        for i in range(len(self.line_points)):
            candidate = MtCandidate(self.line_points[i],
                                    self.line_orientations[i],
                                    identifier=i + 1,
                                    partner_identifier=-1)

            self.candidates.append(candidate)

        i0 = len(self.line_points) + 1
        for j in range(len(self.wrong_candidate_positions)):
            candidate = MtCandidate(self.wrong_candidate_positions[j],
                                    self.wrong_candidate_orientations[j],
                                    identifier=i0 + j,
                                    partner_identifier=-1)

        try:
            self.name_db = "unittest"
            self.collection = "Line"
            self.db = DB()
            self.client = self.db.get_client(self.name_db,
                                             self.collection,
                                             overwrite=True)

            self.addCleanup(self.db.get_client, self.name_db, self.collection,
                            True)
        except Exception as e:
            print "Make sure that a DB instance is running befoer executing the test suite"
            raise e

        self.db.write_candidates(name_db=self.name_db,
                                 prob_map_stack_chunk=None,
                                 offset_chunk=None,
                                 gs=None,
                                 ps=None,
                                 voxel_size=[1., 1., 1.],
                                 id_offset=0,
                                 collection=self.collection,
                                 overwrite=True,
                                 candidates=self.candidates)

        self.roi_x = {"min": 0, "max": 20}
        self.roi_y = {"min": 0, "max": 20}
        self.roi_z = {"min": 0, "max": 120}

        self.db.connect_candidates(name_db=self.name_db,
                                   collection=self.collection,
                                   x_lim=self.roi_x,
                                   y_lim=self.roi_y,
                                   z_lim=self.roi_z,
                                   distance_threshold=3.5)
Beispiel #2
0
def write_candidate_graph(max_chunks,
                          max_dset,
                          pm_chunks,
                          pm_dset,
                          name_db,
                          collection,
                          db_credentials,
                          distance_threshold,
                          voxel_size,
                          cores,
                          cf_lists,
                          volume_offset,
                          overwrite=False,
                          mp=True):

    logger.info("Extract candidates...")
    db = DB(db_credentials)
    n_chunk = 0
    id_offset = 1

    # Overwrite if necesseray:
    graph = db.get_collection(name_db, collection, overwrite=overwrite)
    for chunk in max_chunks:
        logger.info("Extract chunk {}/{}...".format(n_chunk, len(max_chunks)))

        f = h5py.File(chunk, "r")
        attrs = f[max_dset].attrs.items()
        f.close()

        chunk_limits = attrs[1][1]
        offset_chunk = [
            chunk_limits[0][0], chunk_limits[1][0], chunk_limits[2][0]
        ]

        candidates = extract_maxima_candidates(chunk, max_dset, offset_chunk,
                                               id_offset)

        id_offset_tmp = db.write_candidates(name_db,
                                            collection,
                                            candidates,
                                            voxel_size,
                                            overwrite=False)

        assert (graph.find({
            "selected": {
                "$exists": True
            }
        }).count() == id_offset_tmp)

        id_offset = id_offset_tmp + 1
        n_chunk += 1

    # Don't forward SIGINT to child processes
    sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
    pool = multiprocessing.Pool()
    signal.signal(signal.SIGINT, sigint_handler)

    bound_connect_candidates_alias = functools.partial(
        connect_candidates_alias, db)

    logger.info("Connect candidates...")
    try:
        for cf_core_ids in cf_lists:
            logger.info("Connecting {}".format(cf_core_ids))

            results = []
            if mp:
                for core_id in cf_core_ids:
                    logger.info("Add context {} to pool (mp: {})".format(
                        core_id, mp))
                    core = cores[core_id]
                    results.append(
                        pool.apply_async(bound_connect_candidates_alias, (
                            name_db,
                            collection,
                            core.x_lim_context,
                            core.y_lim_context,
                            core.z_lim_context,
                            distance_threshold,
                        )))

                # Catch exceptions and SIGINTs
                for result in results:
                    result.get(60 * 60 * 24 * 3)
            else:
                for core_id in cf_core_ids:
                    core = cores[core_id]
                    results.append(
                        db.connect_candidates(
                            name_db,
                            collection,
                            core.x_lim_context,
                            core.y_lim_context,
                            core.z_lim_context,
                            distance_threshold,
                        ))
    finally:
        pool.terminate()
        pool.join()

    logger.info("Add edge costs...")
    n_chunk = 0
    for chunk in pm_chunks:
        logger.info("Work on chunk {}/{}...".format(n_chunk, len(pm_chunks)))

        f = h5py.File(chunk, "r")
        attrs = f[pm_dset].attrs.items()
        shape = np.shape(np.array(f[pm_dset]))
        f.close()

        db.add_edge_cost(name_db, collection, voxel_size, volume_offset, chunk,
                         pm_dset)
        n_chunk += 1