Ejemplo n.º 1
0
    def test_search_layers_by_name(self):
        test_layer_1 = DBSession.query(Layer).filter_by(name="TestLayer1").one()
        self.assertEqual(test_layer_1.name, "TestLayer1")
        self.assertEqual(len(test_layer_1.mappable_points), 2)

        test_layer_2 = DBSession.query(Layer).filter_by(name="TestLayer2").one()
        self.assertEqual(test_layer_2.name, "TestLayer2")
        self.assertEqual(len(test_layer_2.mappable_points), 3)
    def tearDown(self):

        DBSession.remove()
        testing.tearDown()

        engine = create_engine('postgresql+psycopg2://thesis_db_user:[email protected]:5432/thesis_test_db')
        DBSession.configure(bind=engine)

        # Drop all the models
        Base.metadata.drop_all(engine)
Ejemplo n.º 3
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.bind = engine
    config = Configurator(settings=settings)
    config.add_static_view('static', 'static', cache_max_age=3600)
    config.add_route('home', '/')
    config.scan()
    return config.make_wsgi_app()
    def generate_cache_for_all_grid_size(class_, layer):
        """ Generate the cache for all species, at all grid levels, that have out-of-date caches
            and have a total of equal to or more than cache_occurrence_clusters_threshold records.
        """
        log = logging.getLogger(__name__)
        log.debug("Generating cache for all grid sizes")

        for grid_size in class_.GRID_SIZES:
            class_.generate_cache_clusters(layer, grid_size)
            DBSession.flush()

        log.debug("Finished generating cache for all grid sizes")
Ejemplo n.º 5
0
    def test_get_layer_points_as_geo_json(self):
        test_layer_1 = DBSession.query(Layer).filter_by(name="TestLayer1").one()
        test_layer_2 = DBSession.query(Layer).filter_by(name="TestLayer2").one()

        q = MappablePoint.get_points_as_geojson(test_layer_1)
        result = q.all()
        self.assertEqual(len(result), 2)
        #        self.assertEqual(result[0].locations, '{"type":"MultiPoint","coordinates":[[20,10]]}')
        #        self.assertEqual(result[1].locations, '{"type":"MultiPoint","coordinates":[[30,10]]}')

        q2 = MappablePoint.get_points_as_geojson(test_layer_2)
        result2 = q2.all()
        self.assertEqual(len(result2), 2)
        self.assertEqual(result2[0].centroid, '{"type":"Point","coordinates":[10,15]}')
        self.assertEqual(result2[1].centroid, '{"type":"Point","coordinates":[30,15]}')
    def test_pre_process(self):
        # Confirm that once the preprocess is complete, we have a cache
        # record for every grid size
        test_emu_layer = DBSession.query(Layer).filter_by(name='Emu').one()
        # self.assertEqual(len(test_emu_layer.cache_records), 0)
        self.assertEqual(len(test_emu_layer.cache_records), len(CachedGriddedAndBoundMappablePoint.GRID_SIZES))

        # Once preproccessing is complete, we should have cached clusters with
        # cluster sizes summing to the total number of mappable points for the
        # layer.
        cache_records = test_emu_layer.cache_records
        for cache_record in cache_records:
            clusters = cache_record.cached_mappable_point_clusters
            total_clusters_size = 0
            for cluster in clusters:
                total_clusters_size += cluster.cluster_size

            self.assertEqual(len(test_emu_layer.mappable_points),total_clusters_size)

        # We would expect that the highest grid size would have the fewest number
        # of cached_mappable_point_clusters, and that the smallest grid size
        # would have the most.
        smallest_grid_size = sorted(CachedGriddedAndBoundMappablePoint.GRID_SIZES)[0]
        largest_grid_size = sorted(CachedGriddedAndBoundMappablePoint.GRID_SIZES)[-1]
        smallest_grid_size_cache_record = next(cache_record for cache_record in cache_records if cache_record.grid_size == smallest_grid_size)
        largest_grid_size_cache_record = next(cache_record for cache_record in cache_records if cache_record.grid_size == largest_grid_size)

        self.assertGreater(
                len(smallest_grid_size_cache_record.cached_mappable_point_clusters),
                len(largest_grid_size_cache_record.cached_mappable_point_clusters)
        )
    def get_points_as_wkt(class_, layer, bbox=[-180,-90,180,90], grid_size=None, **kwargs):

        # If no grid size was provided, calculate it from the bbox
        if grid_size == None:
            grid_size = class_.get_cluster_grid_size(bbox)

        # Normalise our grid size to one of the standard grid sizes
        normalised_grid_size = class_.normalise_grid_size(grid_size)

        cache_record = next(cache_record for cache_record in layer.cache_records if cache_record.grid_size == normalised_grid_size)

        q = DBSession.query(
#            geo_func.ST_AsText(
#                class_.CachedMappablePointCluster.locations
#            ).label("locations"),
            geo_func.ST_AsText(
                class_.CachedMappablePointCluster.centroid
            ).label("centroid"),
            class_.CachedMappablePointCluster.cluster_size.label("cluster_size")
        ).filter(
            class_.CachedMappablePointCluster.centroid.intersects(ST_MakeEnvelope(*bbox))
        ).filter(
            class_.CachedMappablePointCluster.cache_record_id == cache_record.id
        )

        return q
Ejemplo n.º 8
0
 def by_name(class_):
     """ Sort the Layers by name
     """
     Layer = class_
     q = DBSession.query(Layer)
     q = q.order_by(Layer.name)
     return q
Ejemplo n.º 9
0
    def test_get_cluster_centroids_as_geo_json(self):
        test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
        test_layer_2 = DBSession.query(Layer).filter_by(name='TestLayer2').one()

        q = GriddedMappablePoint.get_points_as_geojson(test_layer_1, grid_size=1)
        result = q.all()
        self.assertEqual(result[0].centroid, '{"type":"Point","coordinates":[20,10]}')
        self.assertEqual(result[1].centroid, '{"type":"Point","coordinates":[30,10]}')

        q2 = GriddedMappablePoint.get_points_as_geojson(test_layer_1, grid_size=100)
        result2 = q2.one()
        self.assertEqual(result2.centroid, '{"type":"Point","coordinates":[25,10]}')

        q3 = GriddedMappablePoint.get_points_as_geojson(test_layer_2, grid_size=100)
        result3 = q3.one()
        self.assertEqual(result3.centroid, '{"type":"Point","coordinates":[16.6666666666667,15]}')
Ejemplo n.º 10
0
    def test_get_layer_points_as_wkt(self):
        test_layer_1 = DBSession.query(Layer).filter_by(name="TestLayer1").one()

        q = MappablePoint.get_points_as_wkt(test_layer_1)
        result = q.all()
        self.assertEqual(len(result), 2)

        self.assertEqual(result[0].centroid, "POINT(20 10)")
        #        self.assertEqual(result[0].locations, 'MULTIPOINT(20 10)')
        self.assertEqual(result[1].centroid, "POINT(30 10)")
    def test_get_layer_points_as_geo_json(self):
        test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
        test_layer_2 = DBSession.query(Layer).filter_by(name='TestLayer2').one()

        q = GriddedAndBoundMappablePoint.get_points_as_geojson(test_layer_1, grid_size=1)
        result = q.all()
#        self.assertEqual(result[0].locations, '{"type":"MultiPoint","coordinates":[[20,10]]}')
#        self.assertEqual(result[1].locations, '{"type":"MultiPoint","coordinates":[[30,10]]}')

        q2 = GriddedAndBoundMappablePoint.get_points_as_geojson(test_layer_1, grid_size=100)
        result2 = q2.all()
#        self.assertEqual(result2[0].locations, '{"type":"MultiPoint","coordinates":[[30,10],[20,10]]}')
        self.assertEqual(result2[0].cluster_size, 2)

        q3 = GriddedAndBoundMappablePoint.get_points_as_geojson(test_layer_2, grid_size=1)
        result3 = q3.all()
#        self.assertEqual(result3[0].locations, '{"type":"MultiPoint","coordinates":[[10,15],[10,15]]}')
#        self.assertEqual(result3[1].locations, '{"type":"MultiPoint","coordinates":[[30,15]]}')
        self.assertEqual(result3[0].cluster_size, 2)
        self.assertEqual(result3[1].cluster_size, 1)
    def generate_cache_clusters(class_, layer, grid_size):
        log = logging.getLogger(__name__)
        log.debug("Generating cache for grid size: %s", grid_size)

        cache_record = class_.CacheRecord(grid_size)
        layer.cache_records.append(cache_record)
        DBSession.flush()

        clusters = GriddedAndBoundMappablePoint.get_points_as_wkt(layer, grid_size=grid_size)\
                .filter(
                    MappablePoint.layer_id == layer.id
                )

        i = 0
        for cluster in clusters:
            i += 1
            centroid = cluster.centroid
            cluster_size = cluster.cluster_size
#            locations = cluster.locations
            cached_mappable_cluster = class_.CachedMappablePointCluster(cluster_size, centroid) #, locations)
            cache_record.cached_mappable_point_clusters.append(cached_mappable_cluster)
            if (i % 10000 == 0):
                log.debug("Up to cluster: %i", i)
                DBSession.flush()
Ejemplo n.º 13
0
    def get_points_as_wkt(class_, layer, **kwargs):
        MappablePoint = class_

        q = DBSession.query(
#            geo_func.ST_AsText(
#                ST_Multi(ST_Collect(MappablePoint.location))
#            ).label("locations"),
            geo_func.ST_AsText(
                MappablePoint.location
            ).label('centroid'),
            func.count(MappablePoint.location).label('cluster_size')
        ).filter(
            MappablePoint.layer_id == layer.id
        ).group_by(
            MappablePoint.location
        )

        return q
Ejemplo n.º 14
0
    def get_points_as_wkt(class_, layer, bbox=[-180,-90,180,90], **kwargs):
        MappablePoint = class_

        q = DBSession.query(
#            geo_func.ST_AsText(
#                ST_Collect(MappablePoint.location)
#            ).label("locations"),
            geo_func.ST_AsText(
                MappablePoint.location
            ).label('centroid'),
            func.count(MappablePoint.location).label('cluster_size')
        ).group_by(
            MappablePoint.location
        ).filter(
            MappablePoint.location.intersects(ST_MakeEnvelope(*bbox))
        ).filter(
            MappablePoint.layer_id == layer.id
        )

        return q
Ejemplo n.º 15
0
    def get_points_as_geojson(class_, layer, bbox=[-180,-90,180,90], grid_size=None, **kwargs):
        if grid_size == None:
            grid_size = class_.get_cluster_grid_size(bbox)

        MappablePoint = class_

        q = DBSession.query(
#            geo_func.ST_AsGeoJSON(
#                ST_Collect(MappablePoint.location)
#            ).label("locations"),
            geo_func.ST_AsGeoJSON(
                geo_func.ST_Centroid(ST_Collect(MappablePoint.location))
            ).label('centroid'),
            func.count(MappablePoint.location).label('cluster_size')
        ).group_by(
            ST_SnapToGrid(MappablePoint.location, grid_size)
        ).filter(
            MappablePoint.layer_id == layer.id
        )

        return q
    def get_points_as_wkt(class_, layer, bbox=[-180,-90,180,90], grid_size=None, **kwargs):
        MappablePoint = class_

        if grid_size == None:
            grid_size = class_.get_cluster_grid_size(bbox)

        q = DBSession.query(
#            geo_func.ST_AsText(
#                ST_Collect(MappablePoint.location)
#            ).label("locations"),
            geo_func.ST_AsText(
                geo_func.ST_Centroid(ST_Collect(MappablePoint.location))
            ).label('centroid'),
            func.count(MappablePoint.location).label('cluster_size')
        ).group_by(
            ST_SnapToGrid(MappablePoint.location, grid_size)
        ).filter(
            MappablePoint.layer_id == layer.id
        ).filter(
            MappablePoint.location.intersects(ST_MakeEnvelope(*bbox))
        )

        return q
 def test_emu_fixure_loaded(self):
     test_emu_layer = DBSession.query(Layer).\
         filter_by(name='Emu').one()
     self.assertGreater(len(test_emu_layer.mappable_points), 5)
Ejemplo n.º 18
0
    def setUp(self):

        self.config = testing.setUp()
        engine = create_engine(
            "postgresql+psycopg2://thesis_db_user:[email protected]:5432/thesis_test_db"
        )
        DBSession.configure(bind=engine)
        Base.metadata.create_all(engine)

        with transaction.manager:
            # Add TestLayer1
            test_layer_1 = Layer(name="TestLayer1")

            test_layer_1.mappable_points = [MappablePoint("Point(30 10)"), MappablePoint("Point(20 10)")]

            DBSession.add(test_layer_1)

            # Add TestLayer2
            test_layer_2 = Layer(name="TestLayer2")

            test_layer_2.mappable_points = [
                MappablePoint("Point(10 15)"),
                MappablePoint("Point(10 15)"),
                MappablePoint("Point(30 15)"),
            ]

            DBSession.add(test_layer_2)

            # Add Emu Layer

            tests_path = os.path.dirname(os.path.abspath(__file__))
            test_fixtures_path = os.path.join(tests_path, "fixtures")
            emu_csv_path = os.path.join(test_fixtures_path, "emu.csv")

            emu_layer = Layer(name="Emu")

            with open(emu_csv_path, "rb") as csvfile:
                emu_reader = csv.reader(csvfile)
                rownum = 0
                header = None
                for row in emu_reader:
                    # Save header row.
                    if rownum == 0:
                        header = row
                    else:
                        colnum = 0

                        latitude = 0
                        longitude = 0

                        for col in row:
                            column_label = header[colnum]

                            if column_label == "LNGDEC":
                                longitude = col
                            elif column_label == "LATDEC":
                                latitude = col

                            # print '%-8s: %s' % (column_label, col)
                            colnum += 1

                        if longitude and latitude:
                            mappable_point = MappablePoint("Point(%s %s)" % (longitude, latitude))
                            emu_layer.mappable_points.append(mappable_point)

                    rownum += 1

            DBSession.add(emu_layer)
    def setUp(self):

        self.config = testing.setUp()
        engine = create_engine('postgresql+psycopg2://thesis_db_user:[email protected]:5432/thesis_test_db')
        DBSession.configure(bind=engine)
        Base.metadata.create_all(engine)

        with transaction.manager:
            # Add TestLayer1
            test_layer_1 = Layer(name='TestLayer1')

            test_layer_1.mappable_points = [
                CachedGriddedAndBoundMappablePoint('Point(30 10)'),
                CachedGriddedAndBoundMappablePoint('Point(20 10)'),
            ]

            DBSession.add(test_layer_1)

            # Add TestLayer2
            test_layer_2 = Layer(name='TestLayer2')

            test_layer_2.mappable_points = [
                CachedGriddedAndBoundMappablePoint('Point(10 15)'),
                CachedGriddedAndBoundMappablePoint('Point(10 15)'),
                CachedGriddedAndBoundMappablePoint('Point(30 15)'),
            ]

            DBSession.add(test_layer_2)

            # Add Emu Layer

            tests_path = os.path.dirname(os.path.abspath(__file__))
            test_fixtures_path = os.path.join(tests_path, 'fixtures')
            emu_csv_path = os.path.join(test_fixtures_path, 'emu.csv')

            emu_layer = Layer(name='Emu')

            with open(emu_csv_path, 'rb') as csvfile:
                emu_reader = csv.reader(csvfile)
                rownum = 0
                header = None
                for row in emu_reader:
                    # Save header row.
                    if rownum == 0:
                        header = row
                    else:
                        colnum = 0

                        latitude = 0
                        longitude = 0

                        for col in row:
                            column_label = header[colnum]

                            if column_label == "LNGDEC":
                                longitude = col
                            elif column_label == "LATDEC":
                                latitude = col

                            # print '%-8s: %s' % (column_label, col)
                            colnum += 1

                        if longitude and latitude:
                            mappable_point = CachedGriddedAndBoundMappablePoint('Point(%s %s)' % (longitude, latitude))
                            emu_layer.mappable_points.append(mappable_point)

                    rownum += 1

            DBSession.add(emu_layer)

        layers = DBSession.query(Layer).all()
        for layer in layers:
            CachedGriddedAndBoundMappablePoint.pre_process(layer)
    def test_get_layer_points_as_wkt(self):

        test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
        q = CachedGriddedAndBoundMappablePoint.get_points_as_wkt(test_layer_1, grid_size=1)
        result = q.all()
    def test_bounds_not_intersecting_points(self):

        test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
        q = CachedGriddedAndBoundMappablePoint.get_points_as_geojson(test_layer_1, grid_size=1, bbox=[-180,-89,-170,-80])
        result = q.all()
        self.assertEqual(len(result),0)