Exemple #1
0
    def predict(self, src):
        stride = self.options["stride"]
        sharpen = self.options["sharpen"]
        shrink = self.options["shrink"]
        p_size = self.options["p_size"]
        g_size = self.options["g_size"]
        n_cell = self.options["n_cell"]
        n_tree_eval = self.options["n_tree_eval"]
        nms = self.options["nms"] if "nms" in self.options else False
        thrs = self.model["thrs"]
        fids = self.model["fids"]
        cids = self.model["cids"]
        edge_bnds = self.model["edge_bnds"]
        edge_pts = self.model["edge_pts"]
        n_seg = self.model["n_seg"]
        segs = self.model["segs"]
        p_rad = p_size / 2
        g_rad = g_size / 2

        pad = cv2.copyMakeBorder(src,
                                 p_rad,
                                 p_rad,
                                 p_rad,
                                 p_rad,
                                 borderType=cv2.BORDER_REFLECT)

        reg_ch, ss_ch = self.get_shrunk_channels(pad)

        if sharpen != 0:
            pad = conv_tri(pad, 1)

        dst = predict_core(pad, reg_ch, ss_ch, shrink, p_size, g_size, n_cell,
                           stride, sharpen, n_tree_eval, thrs, fids, cids,
                           n_seg, segs, edge_bnds, edge_pts)

        if sharpen == 0:
            alpha = 2.1 * stride**2 / g_size**2 / n_tree_eval
        elif sharpen == 1:
            alpha = 1.8 * stride**2 / g_size**2 / n_tree_eval
        else:
            alpha = 1.65 * stride**2 / g_size**2 / n_tree_eval

        dst = N.minimum(dst * alpha, 1.0)
        dst = conv_tri(dst, 1)[g_rad:src.shape[0] + g_rad,
                               g_rad:src.shape[1] + g_rad]

        if nms:
            dy, dx = N.gradient(conv_tri(dst, 4))
            _, dxx = N.gradient(dx)
            dyy, dxy = N.gradient(dy)
            orientation = N.arctan2(dyy * N.sign(-dxy) + 1e-5, dxx)
            orientation[orientation < 0] += N.pi

            dst = non_maximum_supr(dst, orientation, 1, 5, 1.02)

        return dst
    def predict(self, src):
        stride = self.options["stride"]
        sharpen = self.options["sharpen"]
        shrink = self.options["shrink"]
        p_size = self.options["p_size"]
        g_size = self.options["g_size"]
        n_cell = self.options["n_cell"]
        n_tree_eval = self.options["n_tree_eval"]
        nms = self.options["nms"] if "nms" in self.options else False
        thrs = self.model["thrs"]
        fids = self.model["fids"]
        cids = self.model["cids"]
        edge_bnds = self.model["edge_bnds"]
        edge_pts = self.model["edge_pts"]
        n_seg = self.model["n_seg"]
        segs = self.model["segs"]
        p_rad = p_size / 2
        g_rad = g_size / 2

        pad = cv2.copyMakeBorder(src, p_rad, p_rad, p_rad, p_rad,
                                 borderType=cv2.BORDER_REFLECT)

        reg_ch, ss_ch = self.get_shrunk_channels(pad)

        if sharpen != 0:
            pad = conv_tri(pad, 1)

        dst = predict_core(pad, reg_ch, ss_ch, shrink, p_size, g_size, n_cell,
                           stride, sharpen, n_tree_eval, thrs, fids, cids,
                           n_seg, segs, edge_bnds, edge_pts)

        if sharpen == 0:
            alpha = 2.1 * stride ** 2 / g_size ** 2 / n_tree_eval
        elif sharpen == 1:
            alpha = 1.8 * stride ** 2 / g_size ** 2 / n_tree_eval
        else:
            alpha = 1.65 * stride ** 2 / g_size ** 2 / n_tree_eval

        dst = N.minimum(dst * alpha, 1.0)
        dst = conv_tri(dst, 1)[g_rad: src.shape[0] + g_rad,
                               g_rad: src.shape[1] + g_rad]

        if nms:
            dy, dx = N.gradient(conv_tri(dst, 4))
            _, dxx = N.gradient(dx)
            dyy, dxy = N.gradient(dy)
            orientation = N.arctan2(dyy * N.sign(-dxy) + 1e-5, dxx)
            orientation[orientation < 0] += N.pi

            dst = non_maximum_supr(dst, orientation, 1, 5, 1.02)

        return dst
    def get_shrunk_channels(self, src):
        shrink = self.options["shrink"]
        n_orient = self.options["n_orient"]
        grd_smooth_rad = self.options["grd_smooth_rad"]
        grd_norm_rad = self.options["grd_norm_rad"]

        luv = rgb2luv(src)
        size = (luv.shape[0] / shrink, luv.shape[1] / shrink)
        channels = [resize(luv, size)]

        for scale in [1.0, 0.5]:
            img = resize(luv, (luv.shape[0] * scale, luv.shape[1] * scale))
            img = conv_tri(img, grd_smooth_rad)

            magnitude, orientation = gradient(img, grd_norm_rad)

            downscale = max(1, int(shrink * scale))
            hist = histogram(magnitude, orientation, downscale, n_orient)

            channels.append(resize(magnitude, size)[:, :, None])
            channels.append(resize(hist, size))

        channels = N.concatenate(channels, axis=2)

        reg_smooth_rad = self.options["reg_smooth_rad"] / float(shrink)
        ss_smooth_rad = self.options["ss_smooth_rad"] / float(shrink)

        if reg_smooth_rad > 1.0:
            reg_ch = conv_tri(channels, int(round(reg_smooth_rad)))
        else:
            reg_ch = conv_tri(channels, reg_smooth_rad)

        if ss_smooth_rad > 1.0:
            ss_ch = conv_tri(channels, int(round(ss_smooth_rad)))
        else:
            ss_ch = conv_tri(channels, ss_smooth_rad)

        return reg_ch, ss_ch
    def merge_trees(self):
        """
        Accumulate trees and merge into final model
        """

        n_tree = self.options["n_tree"]
        g_size = self.options["g_size"]

        if not os.path.exists(self.forest_dir):
            os.makedirs(self.forest_dir)

        forest_path = os.path.join(self.forest_dir, self.forest_name)
        if os.path.exists(forest_path):
            print "Found model, reusing..."
            return

        trees = []
        for i in xrange(n_tree):
            tree_file = self.tree_prefix + str(i + 1) + ".h5"
            tree_path = os.path.join(self.tree_dir, tree_file)

            with tables.open_file(tree_path, filters=self.comp_filt) as mfile:
                tree = {"fids": mfile.get_node("/fids")[:],
                        "thrs": mfile.get_node("/thrs")[:],
                        "cids": mfile.get_node("/cids")[:],
                        "segs": mfile.get_node("/segs")[:]}
            trees.append(tree)

        max_n_node = 0
        for i in xrange(n_tree):
            max_n_node = max(max_n_node, trees[i]["fids"].shape[0])

        # merge all fields of all trees
        thrs = N.zeros((n_tree, max_n_node), dtype=N.float64)
        fids = N.zeros((n_tree, max_n_node), dtype=N.int32)
        cids = N.zeros((n_tree, max_n_node), dtype=N.int32)
        segs = N.zeros((n_tree, max_n_node, g_size, g_size), dtype=N.int32)
        for i in xrange(n_tree):
            tree = trees[i]
            n_node = tree["fids"].shape[0]
            thrs[i, :n_node] = tree["thrs"].flatten()
            fids[i, :n_node] = tree["fids"].flatten()
            cids[i, :n_node] = tree["cids"].flatten()
            segs[i, :n_node] = tree["segs"]

        # remove very small segments (<=5 pixels)
        n_seg = N.max(segs.reshape((n_tree, max_n_node, g_size ** 2)), axis=2) + 1
        for i in xrange(n_tree):
            for j in xrange(max_n_node):
                m = n_seg[i, j]
                if m <= 1:
                    continue

                S = segs[i, j]
                remove = False

                for k in xrange(m):
                    Sk = (S == k)
                    if N.count_nonzero(Sk) > 5:
                        continue

                    S[Sk] = N.median(S[conv_tri(Sk.astype(N.float64), 1) > 0])
                    remove = True

                if remove:
                    S = N.unique(S, return_inverse=True)[1]
                    segs[i, j] = S.reshape((g_size, g_size))
                    n_seg[i, j] = N.max(S) + 1

        # store compact representations of sparse binary edge patches
        n_bnd = self.options["sharpen"] + 1
        edge_pts = []
        edge_bnds = N.zeros((n_tree, max_n_node, n_bnd), dtype=N.int32)
        for i in xrange(n_tree):
            for j in xrange(max_n_node):
                if cids[i, j] != 0 or n_seg[i, j] <= 1:
                    continue

                E = gradient(segs[i, j].astype(N.float64))[0] > 0.01
                E0 = 0

                for k in xrange(n_bnd):
                    r, c = N.nonzero(E & (~ E0))
                    edge_pts += [r[m] * g_size + c[m] for m in xrange(len(r))]
                    edge_bnds[i, j, k] = len(r)

                    E0 = E
                    E = conv_tri(E.astype(N.float64), 1) > 0.01

        segs = segs.reshape((-1, segs.shape[-2], segs.shape[-1]))
        edge_pts = N.asarray(edge_pts, dtype=N.int32)
        edge_bnds = N.hstack(([0], N.cumsum(edge_bnds.flatten()))).astype(N.int32)

        with tables.open_file(forest_path, "w", filters=self.comp_filt) as mfile:
            mfile.create_carray("/", "thrs", obj=thrs)
            mfile.create_carray("/", "fids", obj=fids)
            mfile.create_carray("/", "cids", obj=cids)
            mfile.create_carray("/", "edge_bnds", obj=edge_bnds)
            mfile.create_carray("/", "edge_pts", obj=edge_pts)
            mfile.create_carray("/", "n_seg", obj=n_seg)
            mfile.create_carray("/", "segs", obj=segs)
            mfile.close()
Exemple #5
0
    def merge_trees(self):
        """
        Accumulate trees and merge into final model
        """

        n_tree = self.options["n_tree"]
        g_size = self.options["g_size"]

        if not os.path.exists(self.forest_dir):
            os.makedirs(self.forest_dir)

        forest_path = os.path.join(self.forest_dir, self.forest_name)
        if os.path.exists(forest_path):
            print("Found model, reusing...")
            return

        trees = []
        for i in xrange(n_tree):
            tree_file = self.tree_prefix + str(i + 1) + ".h5"
            tree_path = os.path.join(self.tree_dir, tree_file)

            with tables.open_file(tree_path, filters=self.comp_filt) as mfile:
                tree = {"fids": mfile.get_node("/fids")[:],
                        "thrs": mfile.get_node("/thrs")[:],
                        "cids": mfile.get_node("/cids")[:],
                        "segs": mfile.get_node("/segs")[:]}
            trees.append(tree)

        max_n_node = 0
        for i in xrange(n_tree):
            max_n_node = max(max_n_node, trees[i]["fids"].shape[0])

        # merge all fields of all trees
        thrs = N.zeros((n_tree, max_n_node), dtype=N.float64)
        fids = N.zeros((n_tree, max_n_node), dtype=N.int32)
        cids = N.zeros((n_tree, max_n_node), dtype=N.int32)
        segs = N.zeros((n_tree, max_n_node, g_size, g_size), dtype=N.int32)
        for i in xrange(n_tree):
            tree = trees[i]
            n_node = tree["fids"].shape[0]
            thrs[i, :n_node] = tree["thrs"].flatten()
            fids[i, :n_node] = tree["fids"].flatten()
            cids[i, :n_node] = tree["cids"].flatten()
            segs[i, :n_node] = tree["segs"]

        # remove very small segments (<=5 pixels)
        n_seg = N.max(segs.reshape((n_tree, max_n_node, g_size ** 2)), axis=2) + 1
        for i in xrange(n_tree):
            for j in xrange(max_n_node):
                m = n_seg[i, j]
                if m <= 1:
                    continue

                S = segs[i, j]
                remove = False

                for k in xrange(m):
                    Sk = (S == k)
                    if N.count_nonzero(Sk) > 5:
                        continue

                    S[Sk] = N.median(S[conv_tri(Sk.astype(N.float64), 1) > 0])
                    remove = True

                if remove:
                    S = N.unique(S, return_inverse=True)[1]
                    segs[i, j] = S.reshape((g_size, g_size))
                    n_seg[i, j] = N.max(S) + 1

        # store compact representations of sparse binary edge patches
        n_bnd = self.options["sharpen"] + 1
        edge_pts = []
        edge_bnds = N.zeros((n_tree, max_n_node, n_bnd), dtype=N.int32)
        for i in xrange(n_tree):
            for j in xrange(max_n_node):
                if cids[i, j] != 0 or n_seg[i, j] <= 1:
                    continue

                E = gradient(segs[i, j].astype(N.float64))[0] > 0.01
                E0 = 0

                for k in xrange(n_bnd):
                    r, c = N.nonzero(E & (~ E0))
                    edge_pts += [r[m] * g_size + c[m] for m in xrange(len(r))]
                    edge_bnds[i, j, k] = len(r)

                    E0 = E
                    E = conv_tri(E.astype(N.float64), 1) > 0.01

        segs = segs.reshape((-1, segs.shape[-2], segs.shape[-1]))
        edge_pts = N.asarray(edge_pts, dtype=N.int32)
        edge_bnds = N.hstack(([0], N.cumsum(edge_bnds.flatten()))).astype(N.int32)

        with tables.open_file(forest_path, "w", filters=self.comp_filt) as mfile:
            mfile.create_carray("/", "thrs", obj=thrs)
            mfile.create_carray("/", "fids", obj=fids)
            mfile.create_carray("/", "cids", obj=cids)
            mfile.create_carray("/", "edge_bnds", obj=edge_bnds)
            mfile.create_carray("/", "edge_pts", obj=edge_pts)
            mfile.create_carray("/", "n_seg", obj=n_seg)
            mfile.create_carray("/", "segs", obj=segs)
            mfile.close()