Пример #1
0
    def construct(self, engine):
        """Collect all segments.
        """
        t = self.osmtables.member.data
        stm_get_ways = select([t.c.member_id])\
                         .where(t.c.member_type == 'W')\
                         .where(t.c.relation_id == bindparam('id'))\
                         .compile(engine)
        with engine.begin() as conn:
            self._compute_first(conn)

            # manual indexes
            relidx = Index("%s_rels_idx" % (self.data.name),
                           self.data.c.rels,
                           postgresql_using='gin')
            wayidx = Index("%s_ways_idx" % (self.data.name),
                           self.data.c.ways,
                           postgresql_using='gin')
            ndsidx = Index("%s_nodes_idx" % (self.data.name),
                           self.data.c.nodes,
                           postgresql_using='gin')
            # drop indexes if any
            conn.execute(DropIndexIfExists(relidx))
            conn.execute(DropIndexIfExists(wayidx))
            conn.execute(DropIndexIfExists(ndsidx))

            self.truncate(conn)

            wayproc = _WayCollector(self,
                                    engine,
                                    creation_mode=True,
                                    numthreads=self.numthreads)

        with engine.begin() as conn:
            sortedrels = list(wayproc.relations)
            sortedrels.sort()
            todo = len(sortedrels)
            done = 0
            for rel in sortedrels:
                log.log(logging.INFO if done % 100 == 0 else logging.DEBUG,
                        "Processing relation %d (%d %%)", rel,
                        done * 100 / todo)
                ways = conn.execute(stm_get_ways, {'id': rel})
                for w in ways:
                    wayproc.add_way(conn, w[0])

                # Put the ways collected so far into segments
                wayproc.process_segments()
                done += 1

            wayproc.finish()

            # finally prepare indices to speed up update
            relidx.create(conn)
            wayidx.create(conn)
            ndsidx.create(conn)
Пример #2
0
    def construct(self, engine):
        h = self.rtree.data
        idx = sa.Index(self.data.name + '_iname_idx',
                       sa.func.upper(self.c.name))

        with engine.begin() as conn:
            conn.execute(DropIndexIfExists(idx))
            self.truncate(conn)

            max_depth = conn.scalar(sa.select([saf.max(h.c.depth)]))

        subtab = sa.select([h.c.child, saf.max(h.c.depth).label("lvl")])\
                   .group_by(h.c.child).alias()

        # Process relations by hierarchy, starting with the highest depth.
        # This guarantees that the geometry of member relations is already
        # available for processing the relation geometry.
        if max_depth is not None:
            for level in range(max_depth, 1, -1):
                subset = self.rels.data.select()\
                          .where(subtab.c.lvl == level)\
                          .where(self.rels.c.id == subtab.c.child)
                self.insert_objects(engine, subset)

        # Lastly, process all routes that are nobody's child.
        subset = self.rels.data.select()\
                 .where(self.rels.c.id.notin_(
                     sa.select([h.c.child], distinct=True).as_scalar()))
        self.insert_objects(engine, subset)

        with engine.begin() as conn:
            idx.create(conn)
    def construct(self, engine):
        idx = sa.Index(self.data.name + '_iname_idx',
                       sa.func.upper(self.data.c.name))

        with engine.begin() as conn:
            conn.execute(DropIndexIfExists(idx))
            self.truncate(conn)

        self._insert_objects(engine)

        with engine.begin() as conn:
            idx.create(conn)
Пример #4
0
    def construct(self, engine):
        self.truncate(engine)

        # manual indexes
        relidx = sa.Index(self.data.name + "_rels_idx",
                          self.data.c.rels,
                          postgresql_using='gin')
        ndsidx = sa.Index(self.data.name + "_nodes_idx",
                          self.data.c.nodes,
                          postgresql_using='gin')
        # drop indexes if any
        engine.execute(DropIndexIfExists(relidx))
        engine.execute(DropIndexIfExists(ndsidx))

        w = self.way_src.data
        r = self.relway_view

        sub = sa.select([r.c.way_id, array_agg(r.c.relation_id).label('rels')])\
                .group_by(r.c.way_id).alias('aggway')

        cols = [sub.c.way_id, sub.c.rels, w.c.nodes]
        if hasattr(self, 'transform_tags'):
            cols.append(w.c.tags)

        sql = sa.select(cols).where(w.c.id == sub.c.way_id)

        res = engine.execution_options(stream_results=True).execute(sql)
        workers = self.create_worker_queue(engine,
                                           self._process_construct_next)
        for obj in res:
            workers.add_task(obj)

        workers.finish()

        # need reverse lookup indexes for rels and nodes
        relidx.create(engine)
        ndsidx.create(engine)
Пример #5
0
    def construct(self, engine):
        if self.view_only:
            return

        with engine.begin() as conn:
            idx = sa.Index("idx_%s_id" % self.data.name,
                             self.c.id, unique=True)

            conn.execute(DropIndexIfExists(idx))

            self.truncate(conn)

            src = self.src.data.select().where(self.subset)
            sql = self.data.insert().from_select(self.src.data.c, src)
            conn.execute(sql)

            idx.create(conn)
Пример #6
0
    def construct(self, engine):
        ndsidx = sa.Index(self.data.name + "_nodes_idx",
                          self.c.nodes,
                          postgresql_using='gin')

        with engine.begin() as conn:
            conn.execute(DropIndexIfExists(ndsidx))
            self.truncate(conn)

        # insert
        sql = self.src.data.select()
        res = engine.execution_options(stream_results=True).execute(sql)
        workers = self.create_worker_queue(engine,
                                           self._process_construct_next)
        for obj in res:
            workers.add_task(obj)

        workers.finish()

        with engine.begin() as conn:
            ndsidx.create(conn)