Ejemplo n.º 1
0
def create_geometry_dict(item):
    """
    Creates a geometry dict that can be used to add
    geometry information to the result set

    Returns a dict with geometry information if one
    can be created. If not, an empty dict is returned
    """
    res = {}
    try:
        geom_wgs = GEOSGeometry(
            f"POINT ({item['centroid'][0]} {item['centroid'][1]}) ", srid=4326)
    except(AttributeError, KeyError):
        geom_wgs = None

    if geom_wgs:
        # Convert to wgs
        geom = geom_wgs.transform(28992, clone=True).coords
        geom_wgs = geom_wgs.coords
        res = {
            'geometrie_rd_x': int(geom[0]),
            'geometrie_rd_y': int(geom[1]),
            'geometrie_wgs_lat': (
                '{:.7f}'.format(geom_wgs[1])).replace('.', ','),

            'geometrie_wgs_lon': (
                '{:.7f}'.format(geom_wgs[0])).replace('.', ',')

        }
        item.update(res)
Ejemplo n.º 2
0
 def add_polygon(geom_dict):
     try:
         geom = GEOSGeometry(json.dumps(geom_dict), 4326)
     except GEOSException:
         raise ValidationError('GeoJSON is not valid')
     geom.transform(web_mercator)
     geoms.append(geom)
Ejemplo n.º 3
0
def loadAreaContent(request, zoom, gridSize):

    clusterer = MapClusterer(zoom, gridSize)

    params = clusterer.loadJson(request)

    filterstring = clusterer.constructFilterstring(params["filters"])

    geojson = params["geojson"]

    markers = []

    if geojson["type"] == "Feature":
        features = [params["geojson"]]

    elif geojson["type"] == "FeatureCollection":
        features = geojson["features"]
        
        
    for feature in features:
        geometry = GEOSGeometry(json.dumps(feature["geometry"]), srid=clusterer.input_srid)
        geometry.transform(clusterer.srid_db)
        markers_qry = Gis.objects.raw(
            '''SELECT * FROM "%s" WHERE ST_Intersects(%s, ST_GeomFromText('%s',%s) ) %s;''' % (geo_table, geo_column_str, geometry, clusterer.srid_db, filterstring)
        )

        markers += list(markers_qry)
    

    return markers
Ejemplo n.º 4
0
def bbox_to_projection(native_bbox, target_srid=4326):
    """
        native_bbox must be in the form
            ('-81.3962935', '-81.3490249', '13.3202891', '13.3859614', 'EPSG:4326')
    """
    box = native_bbox[:4]
    proj = native_bbox[-1]
    minx, maxx, miny, maxy = [float(a) for a in box]
    try:
        source_srid = int(proj.split(":")[1]) if proj and ':' in proj else int(proj)
    except BaseException:
        source_srid = target_srid

    if source_srid != target_srid:
        try:
            wkt = bbox_to_wkt(_v(minx, x=True, source_srid=source_srid, target_srid=target_srid),
                              _v(maxx, x=True, source_srid=source_srid, target_srid=target_srid),
                              _v(miny, x=False, source_srid=source_srid, target_srid=target_srid),
                              _v(maxy, x=False, source_srid=source_srid, target_srid=target_srid),
                              srid=source_srid)
            poly = GEOSGeometry(wkt, srid=source_srid)
            poly.transform(target_srid)
            projected_bbox = [str(x) for x in poly.extent]
            # Must be in the form : [x0, x1, y0, y1, EPSG:<target_srid>)
            return tuple([projected_bbox[0], projected_bbox[2], projected_bbox[1], projected_bbox[3]]) + \
                ("EPSG:%s" % poly.srid,)
        except BaseException:
            tb = traceback.format_exc()
            logger.debug(tb)

    return native_bbox
Ejemplo n.º 5
0
def gk_to_wgs84(request):

    coords = map(request.GET.get, ["x", "y"])

    if not all(coords):
        return {"status": "fail", "error": "Missing coordinates x and y as GET parameters."}

    try:
        coords = map(float, coords)
    except ValueError:
        return {"status": "fail", "error": "Coordinates should be floats."}

    xl, xh, yl, yh = 372543, 631496, 34152, 197602
    if not (xl <= coords[0] <= xh and yl <= coords[1] <= yh):
        return {
            "status": "fail",
            "error": "Coordinates (%s, %s) out of bounds: %d <= x <= %d and %d <= y <= %d."
            % (coords[0], coords[1], xl, xh, yl, yh),
        }

    geotransform = get_coordtransform()
    point = GEOSGeometry("SRID=3787;POINT (%s %s)" % tuple(coords))
    point.transform(geotransform)
    transformed = (point.x, point.y)

    return {"status": "ok", "gk": coords, "wgs84": transformed, "kml": point.kml}
Ejemplo n.º 6
0
    def post(self, request, *args, **kwargs):
        if request.is_ajax():
            cur_shp_id = kwargs['pk']

            # calculate area as GEOS object using convex hull operation,
            # save it, calculate area value, save it in db as geometry attribute
            cur_shp = Shapefile.objects.get(id=cur_shp_id)
            cur_shp_geom = get_geos_geometry(cur_shp)
            area_geom = GEOSGeometry(cur_shp_geom.convex_hull)

            # calculates the area and update the db entry
            area_geom.set_srid(4326)  # get measure in Google Mercator proj
            area_geom.transform(3857)  # ibidem

            # get area value in Shapefile's projection
            proj_area_geom = area_geom
            proj_area_geom.transform(cur_shp.proj)

            # save the new GEOS area to geometry in db
            new_area = HelperSettlementArea(shapefile_id=cur_shp_id,
                                            poly=area_geom,
                                            storedarea=proj_area_geom.area)
            new_area.save()

            cur_shp.stat_sett_area = True
            cur_shp.save()

            context = context_results(cur_shp_id)
            return render_to_response(
                'appgeostat/shp_detail_table.html', {'context': context})
 def augment_cities(self):
     # Add in county subdivisions, deleting from their shapes any area
     # already covered by a "proper" city.
     fkey = 'cousub'
     starter_cities = Location.objects.filter(location_type=self.city_type)
     within_cities = GEOSGeometry('MULTIPOLYGON EMPTY')
     for city in starter_cities:
         within_cities = within_cities.union(city.location)
     city_pks = [l.pk for l in starter_cities]
     layer = DataSource('%s/%s.shp' % (self.zip_dir, self.datafiles[fkey]['file_name']))[0]
     loc_importer = LocationImporter(layer,
         self.city_type,
         source = self.datafiles[fkey].get('source', 'Unknown'),
         filter_bounds=False,
         verbose=True)
     loc_created_count = loc_importer.save(self.datafiles[fkey]['name_field'])
     townships = Location.objects.filter(location_type=self.city_type).exclude(pk__in=city_pks)
     city_names = Location.objects.filter(location_type=self.city_type,
         pk__in=city_pks).values_list('name', flat=True)
     city_names = [name.lower() for name in city_names]
     for township in townships:
         # If a same-named city already exists, then rename the township to "Cityname area."
         if township.name.lower() in city_names:
             township.name = '%s area' % capwords(township.name)
         else:
             township.name = capwords(township.name)
         township.slug = slugify(township.name)
         township.location = township.location.difference(within_cities)
         township.save()
     return loc_created_count
Ejemplo n.º 8
0
def parse_sos112(timestamp, data):
    data = data.decode(ENCODING).strip()

    records = []
    lines = [i.strip() for i in data.split('\r\n')]
    keys = lines[0].lower().split('\t')

    geotransform = get_coordtransform()

    for line in lines[1:]:
        rec = dict(zip(keys, line.split('\t')))
        y, x = rec['point'].split(',')

        point = GEOSGeometry('SRID=3787;POINT (%s %s)' % (x, y))
        point.transform(geotransform)
        rec['x_wgs'] = point.x
        rec['y_wgs'] = point.y

        records.append(rec)

    json_data = {
        'updated': timestamp,
        'records': records,
        'copyright': u'Uprava RS za zaščito in reševanje',
    }

    return json_data
Ejemplo n.º 9
0
def search_catalog(request, *args, **kwargs):
    """A spatial search for the DataResource catalog. In the future, this will be more thorough, but right now it looks
    for a filter parameter in the request, and inside that a JSON payload including a bbox four-tuple of minx, maxx
     miny, maxy OR a geometry wkt and an optional srid.  It then performs a broad overlap search and returns the results
     as a JSON or JSONP list of::

        [{ "title" : "title",
           "path" : ["breadcrumps", "to", "resource"],
           "url" : "http://mydomain/ga_resources/path/to/resource/title"
        }]
    """
    flt = json.loads(request.REQUEST['filter'])
    if 'bbox' in flt:
        minx, miny, maxx, maxy = flt['bbox']
        geometry = Polygon.from_bbox((minx, miny, maxx, maxy))
    else:
        geometry = GEOSGeometry(flt['boundary'])

    if 'srid' in flt:
        geometry.set_srid(flt['srid'])

    results = DataResource.objects.filter(bounding_box__overlaps=geometry)
    ret = [{'title': r.title, 'path': r.slug.split('/')[:-1], 'url': r.get_abolute_url()} for r in results]

    callback = None
    if 'jsonCallback' in request.REQUEST:
        callback = request.REQUEST['jsonCallback']
    elif 'callback' in request.REQUEST:
        callback = request.REQUEST['callback']

    if callback:
        return HttpResponse(callback + '(' + json.dumps(ret) + ")", mimetype='text/plain')
    else:
        return HttpResponse(json.dumps(ret), mimetype='application/json')
Ejemplo n.º 10
0
class UserProfile(UserenaBaseProfile, gis_models.Model):
    user = gis_models.OneToOneField(User, unique=True,verbose_name=_('user'),related_name='profile')
    interests = gis_models.ManyToManyField(Interest, related_name='+')
    selectedInterest = gis_models.ForeignKey(Interest, related_name='+', null=True, blank=True)
    settings = gis_models.OneToOneField('UserSettings')

    city = gis_models.CharField(max_length=100, null=True) # TODO: change to something like 'locationString'
    location = gis_models.PointField(null=True)
    objects = gis_models.GeoManager()

    def isNewUser(self):
        return self.location == None
        #return (self.interests.count() == 0 or self.selectedInterest == None or self.location == None)

    def save(self, force_insert=False, force_update=False, using=None):
        if self.pk is None:
            self.settings = UserSettings.objects.create()
        self.settings.save()
        super(UserProfile, self).save()

    def setLocation(self, lat, lon, city=None):
        if city:
            self.city = city
        if not self.location:
            self.location = GEOSGeometry('POINT(%s %s)' % (lat, lon))
        else:
            self.location.set_x(float(lat))
            self.location.set_y(float(lon))
        self.save()
Ejemplo n.º 11
0
Archivo: views.py Proyecto: gipi/toma
def place(request):
    """
    Return the 900913 coordinates for the given address.
    Principally used to obtain the coords for a given address
    to centre the map with.
    """
    import geopy
    from django.contrib.gis.geos import GEOSGeometry

    places = geopy.geocoders.Google().geocode(request.GET.get('address'), exactly_one=False)

    dplaces = {}
    for p in places:
        P = 'POINT(' + str(p[1][1]) + ' ' + str(p[1][0]) + ')'
        # SRID 4326 is the common latitude/longitude coords
        pnt = GEOSGeometry(P, srid=4326)
        pnt.transform(900913)
        dplaces[p[0]] = pnt

    return HttpResponse(simplejson.dumps(dict(
        (key, {
            'lat': value.x,
            'lng': value.y,
        }) for key, value in dplaces.iteritems())
    ), mimetype='application/json')
Ejemplo n.º 12
0
    def render(self, name, value, *args, **kwargs):
        # value is either None, a string/unicode value, or a GEOSGeometry
        if value is None:  # no value
            lat, lng = DEFAULT_LAT, DEFAULT_LNG
        else:
            try:
                if isinstance(value, basestring):  # value is unicode/string
                    value = GEOSGeometry(value)
                    lat, lng = value.y, value.x
                elif isinstance(value, dict):
                    lat, lng = value.get('lat'), value.get('lng')
                    #lat, lng = value.get('%s_lat' % name), value.get('%s_lng' % name)
                else:  # value is GEOSGeometry
                    lat, lng = value.y, value.x
            except:
                lat, lng = value, value

        # renders text form elements (for debugging):
        html = self.inner_widget.render(
            'lat',
            lat,
            dict(
                id='id_lat',
                style='width:100px;'))
        html += self.inner_widget.render('lng',
                                         lng,
                                         dict(id='id_lng',
                                              style='width:100px;'))
        return mark_safe(html)
Ejemplo n.º 13
0
    def to_python(self, value):
        """Transform the value to a Geometry object."""
        if value in self.empty_values:
            return None

        if not isinstance(value, GEOSGeometry):
            if hasattr(self.widget, 'deserialize'):
                try:
                    value = self.widget.deserialize(value)
                except GDALException:
                    value = None
            else:
                try:
                    value = GEOSGeometry(value)
                except (GEOSException, ValueError, TypeError):
                    value = None
            if value is None:
                raise forms.ValidationError(self.error_messages['invalid_geom'], code='invalid_geom')

        # Try to set the srid
        if not value.srid:
            try:
                value.srid = self.widget.map_srid
            except AttributeError:
                if self.srid:
                    value.srid = self.srid
        return value
Ejemplo n.º 14
0
def update_city_coords():
    """ Update geolocation_uscity records with valid coordinates. Creates coords
    if they do not exist.
    """
    cursor = connection.cursor()
    cursor_insert = connection.cursor()
    cursor.execute("""
        SELECT id, ST_AsText(ST_Centroid(geom)) "point"
        FROM prep_city_fix
        WHERE coordinate_id IS NULL and geom IS NOT NULL
    """)
    for city in cursor:
        point = GEOSGeometry(city[1])
        point.y = Decimal(str(point.y)).quantize(Decimal('.0000000001'))
        point.x = Decimal(str(point.x)).quantize(Decimal('.0000000001'))
        try:
            coord = Coordinate.objects.get(
                latitude=point.y, longitude=point.x)
        except Coordinate.DoesNotExist:
            coordinate = Coordinate()
            coordinate.latitude = point.y
            coordinate.longitude = point.x
            coordinate.rad_lat = radians(coordinate.latitude)
            coordinate.rad_lon = radians(coordinate.longitude)
            coordinate.sin_rad_lat = sin(coordinate.rad_lat)
            coordinate.cos_rad_lat = cos(coordinate.rad_lat)
            coordinate.save()
            coord = Coordinate.objects.get(
                latitude=point.y, longitude=point.x)
        cursor_insert.execute("""
        UPDATE prep_city_fix
        SET coordinate_id = %s
        WHERE id = %s and coordinate_id is null;
        """ % (coord.id, city[0]))
        transaction.commit_unless_managed()
Ejemplo n.º 15
0
    def get_prep_value(self, value):
        obj = super().get_prep_value(value)
        # When the input is not a geometry or raster, attempt to construct one
        # from the given string input.
        if isinstance(obj, GEOSGeometry):
            pass
        else:
            # Check if input is a candidate for conversion to raster or geometry.
            is_candidate = isinstance(obj, (bytes, str)) or hasattr(obj, '__geo_interface__')
            # Try to convert the input to raster.
            raster = self.get_raster_prep_value(obj, is_candidate)

            if raster:
                obj = raster
            elif is_candidate:
                try:
                    obj = GEOSGeometry(obj)
                except (GEOSException, GDALException):
                    raise ValueError("Couldn't create spatial object from lookup value '%s'." % obj)
            else:
                raise ValueError('Cannot use object with type %s for a spatial lookup parameter.' % type(obj).__name__)

        # Assigning the SRID value.
        obj.srid = self.get_srid(obj)
        return obj
Ejemplo n.º 16
0
Archivo: api.py Proyecto: ciheul/ciheul
    def dehydrate(self, bundle):
        """Serializing. GET method."""
        lonlat = GEOSGeometry(bundle.data['coordinates'], srid=32140)
        bundle.data['coordinates'] = list(lonlat.get_coords())

        # get profile pict
        if bundle.obj.profile_image_id is not None:
            profile_pict = Image.objects.get(pk=bundle.obj.profile_image_id)

        if profile_pict is not None:
            bundle.data['profile_image'] = profile_pict.thumbnail.url

        # get images
        images = Image.objects.filter(shelter_id__exact=bundle.obj.id)
        if images is None:
            return bundle

        image_urls = []
        thumbnail_urls = []

        for image in images:
            image_urls.append(image.image.url)
            thumbnail_urls.append(image.thumbnail.url)
        bundle.data['images'] = image_urls
        bundle.data['thumbnails'] = thumbnail_urls

        return bundle
Ejemplo n.º 17
0
 def from_json(self, data):
     try:
         try:
             geom = GEOSGeometry(str(data.geometry))
             if(hasattr(data.geometry.crs, 'properties')):
                 crs = data.geometry.crs.properties['name']
                 srs = SpatialReference(crs)
                 geom.set_srid(srs.srid)
                 geom.transform(4326)
             ls = LineString(geom[0].coords)
             if(ls.simple == False):
                 return None, 'Error Creating Geometry: Polygon is not Valid'
             self.geom = geom
         except:
             logger.debug(sys.exc_info())
             return None, 'Error Creating Geometry'
         if('name' in data.__dict__['properties']):
             self.name = data.__dict__['properties']['name']
         else:
             return None, 'Name is required'
         if('max_area' in data.__dict__['properties']):
             try:
                 self.max_area = int(data.__dict__['properties']['max_area'])
             except ValueError:
                 return None, 'Invalid Max Area'
         else:
             return None, 'Max Area is Required'
         self.save()
         return self, None
     except:
         # ToDo catch errors specifically and return message/code
         return None, 'Unknown'
Ejemplo n.º 18
0
    def test_transform_noop(self):
        """ Testing `transform` method (SRID match) """
        # transform() should no-op if source & dest SRIDs match,
        # regardless of whether GDAL is available.
        g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
        gt = g.tuple
        g.transform(4326)
        self.assertEqual(g.tuple, gt)
        self.assertEqual(g.srid, 4326)

        g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
        g1 = g.transform(4326, clone=True)
        self.assertEqual(g1.tuple, g.tuple)
        self.assertEqual(g1.srid, 4326)
        self.assertIsNot(g1, g, "Clone didn't happen")

        with mock.patch('django.contrib.gis.gdal.HAS_GDAL', False):
            g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
            gt = g.tuple
            g.transform(4326)
            self.assertEqual(g.tuple, gt)
            self.assertEqual(g.srid, 4326)

            g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
            g1 = g.transform(4326, clone=True)
            self.assertEqual(g1.tuple, g.tuple)
            self.assertEqual(g1.srid, 4326)
            self.assertIsNot(g1, g, "Clone didn't happen")
Ejemplo n.º 19
0
    def to_geojson(self, data, options=None):
        """
        Given some Python data, produces GeoJSON output.
        """
        options = options or {}
        data = self.to_simple(data, options)

        if 'objects' in data:
            data['type'] = "FeatureCollection"
            data['features'] = data['objects']
            del data['objects']

            for index, obj in enumerate(data['features']):
                for key, value in obj.items():

                    if hasattr(value, 'lower') and (value.lower().split('(')[0].strip() in ('point', 'multipoint', 'linestring','multilinestring','polygon','multipolygon')):
                        if options['srid']:
                            srid = options['srid']
                            try:
                                srid=int(srid)
                            except ValueError:
                                pass

                            geometry = GEOSGeometry(value)
                            geometry.transform(srid)
                            geometry = simplejson.loads(geometry.geojson)
                        else:
                            geometry = simplejson.loads(GEOSGeometry(value).geojson)
                        
                        del obj[key]
                        geojson = { 'geometry' : geometry, 'properties' : obj, 'type' : "Feature" }
                        data['features'][index] = geojson

        return simplejson.dumps(data, cls=json.DjangoJSONEncoder, sort_keys=True)
Ejemplo n.º 20
0
 def test_transform_3d(self):
     p3d = GEOSGeometry('POINT (5 23 100)', 4326)
     p3d.transform(2774)
     if GEOS_PREPARE:
         self.assertEqual(p3d.z, 100)
     else:
         self.assertIsNone(p3d.z)
Ejemplo n.º 21
0
    def test_prepared(self):
        "Testing PreparedGeometry support."
        # Creating a simple multipolygon and getting a prepared version.
        mpoly = GEOSGeometry('MULTIPOLYGON(((0 0,0 5,5 5,5 0,0 0)),((5 5,5 10,10 10,10 5,5 5)))')
        prep = mpoly.prepared

        # A set of test points.
        pnts = [Point(5, 5), Point(7.5, 7.5), Point(2.5, 7.5)]
        for pnt in pnts:
            # Results should be the same (but faster)
            self.assertEqual(mpoly.contains(pnt), prep.contains(pnt))
            self.assertEqual(mpoly.intersects(pnt), prep.intersects(pnt))
            self.assertEqual(mpoly.covers(pnt), prep.covers(pnt))

        self.assertTrue(prep.crosses(fromstr('LINESTRING(1 1, 15 15)')))
        self.assertTrue(prep.disjoint(Point(-5, -5)))
        poly = Polygon(((-1, -1), (1, 1), (1, 0), (-1, -1)))
        self.assertTrue(prep.overlaps(poly))
        poly = Polygon(((-5, 0), (-5, 5), (0, 5), (-5, 0)))
        self.assertTrue(prep.touches(poly))
        poly = Polygon(((-1, -1), (-1, 11), (11, 11), (11, -1), (-1, -1)))
        self.assertTrue(prep.within(poly))

        # Original geometry deletion should not crash the prepared one (#21662)
        del mpoly
        self.assertTrue(prep.covers(Point(5, 5)))
def add_geometry_database(table_name, geometry_data, cursor, conn):
    """
    Add the geometry into the database
    """
    # Create table if not exist for the layer
    cursor.execute("""CREATE TABLE IF NOT EXISTS {0} (id serial PRIMARY KEY,
                      geometry geometry(Geometry,3857) NOT NULL,
                      geometry_type varchar(40) NOT NULL)""".format(table_name))

    # Add geometry and geometry type of the geojson into the database
    for feature in range(len(geometry_data['features'])):
        geometry = geometry_data['features'][feature]['geometry']
        geometry_type = geometry['type']

        # Convert geojson into geometry
        geojson = GEOSGeometry(str(geometry), srid=4326)
        geojson.transform(3857)
        geom = geojson.hex.decode()

        # Add the geometry into the table if the geometry doesn't already exist
        cursor.execute("""INSERT INTO {0}(geometry, geometry_type)
                          SELECT ST_SetSRID(\'{1}\'::geometry, 3857) AS geometry,
                                 \'{2}\' AS geometry_typ
                          WHERE NOT EXISTS
                              (SELECT geometry
                               FROM {0}
                               WHERE geometry = ST_SetSRID(\'{1}\'::geometry, 3857))
                       """.format(table_name, geom, geometry_type))

    # Save changes
    conn.commit()
Ejemplo n.º 23
0
    def test_make_line(self):
        """
        Testing the `MakeLine` aggregate.
        """
        if not connection.features.supports_make_line_aggr:
            with self.assertRaises(NotImplementedError):
                City.objects.all().aggregate(MakeLine('point'))
            return

        # MakeLine on an inappropriate field returns simply None
        self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline'])
        # Reference query:
        # SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
        ref_line = GEOSGeometry(
            'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,'
            '-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,'
            '-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)',
            srid=4326
        )
        # We check for equality with a tolerance of 10e-5 which is a lower bound
        # of the precisions of ref_line coordinates
        line = City.objects.aggregate(MakeLine('point'))['point__makeline']
        self.assertTrue(
            ref_line.equals_exact(line, tolerance=10e-5),
            "%s != %s" % (ref_line, line)
        )
    def test_make_line(self):
        """
        Testing the (deprecated) `make_line` GeoQuerySet method and the MakeLine
        aggregate.
        """
        if not connection.features.supports_make_line_aggr:
            # Only PostGIS has support for the MakeLine aggregate. For other
            # backends, test that NotImplementedError is raised
            self.assertRaises(
                NotImplementedError,
                City.objects.all().aggregate, MakeLine('point')
            )
            return

        # Ensuring that a `TypeError` is raised on models without PointFields.
        self.assertRaises(TypeError, State.objects.make_line)
        self.assertRaises(TypeError, Country.objects.make_line)
        # MakeLine on an inappropriate field returns simply None
        self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline'])
        # Reference query:
        # SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
        ref_line = GEOSGeometry(
            'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,'
            '-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,'
            '-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)',
            srid=4326
        )
        # We check for equality with a tolerance of 10e-5 which is a lower bound
        # of the precisions of ref_line coordinates
        line1 = City.objects.make_line()
        line2 = City.objects.aggregate(MakeLine('point'))['point__makeline']
        for line in (line1, line2):
            self.assertTrue(ref_line.equals_exact(line, tolerance=10e-5),
                "%s != %s" % (ref_line, line))
Ejemplo n.º 25
0
    def handle(self, *args, **options):
        from sigeo.obcine.models import Obcina
        import csv
        from django.contrib.gis.geos import GEOSGeometry
        from sigeo.preprocessing import get_coordtransform
        import StringIO

        s = StringIO.StringIO()
        w = csv.writer(s)
        trans = get_coordtransform()
        w.writerow(['id', 'ime', 'uime', 'tip', 'povrsina', 'center', 'geometrija'])

        for ob in Obcina.objects.all():
            center_pt = 'SRID=3787;POINT(%d %d)' % (ob.y_c, ob.x_c)
            pt = GEOSGeometry(center_pt)
            pt.transform(trans)

            row = [
                ob.ob_id,
                ob.ob_ime,
                ob.ob_uime,
                ob.ob_tip,
                ob.ob_pov,
                pt.kml,
                ob.the_geom.kml,
            ]
            w.writerow([unicode(i).encode('utf-8') for i in row])

        print s.getvalue()
Ejemplo n.º 26
0
 def from_json(self, data):
     try:
         try:
             geom = GEOSGeometry(str(data.geometry))
             if(hasattr(data.geometry.crs, 'properties')):
                 crs = data.geometry.crs.properties['name']
                 srs = SpatialReference(crs)
                 geom.set_srid(srs.srid)
                 geom.transform(4326)
             self.geom = geom
         except:
             return None, "Invalid Geometry"
         if('project_id' in data.__dict__['properties']):
             try:
                 self.project = Project.objects.get(id=int(data.__dict__['properties']['project_id']))
             except (ValueError, ObjectDoesNotExist):
                 return None, "Invalid Project"
         else:
             return None, "Project is required"
         if('name' in data.__dict__['properties']):
             self.name = data.__dict__['properties']['name']
         else:
             return None, "Name is Required"
         self.save()
         return self, None
     except:
         return None, "Unexpected Error"
Ejemplo n.º 27
0
    def test_transform_noop(self):
        """ Testing `transform` method (SRID match) """
        # transform() should no-op if source & dest SRIDs match,
        # regardless of whether GDAL is available.
        if gdal.HAS_GDAL:
            g = GEOSGeometry("POINT (-104.609 38.255)", 4326)
            gt = g.tuple
            g.transform(4326)
            self.assertEqual(g.tuple, gt)
            self.assertEqual(g.srid, 4326)

            g = GEOSGeometry("POINT (-104.609 38.255)", 4326)
            g1 = g.transform(4326, clone=True)
            self.assertEqual(g1.tuple, g.tuple)
            self.assertEqual(g1.srid, 4326)
            self.assertIsNot(g1, g, "Clone didn't happen")

        old_has_gdal = gdal.HAS_GDAL
        try:
            gdal.HAS_GDAL = False

            g = GEOSGeometry("POINT (-104.609 38.255)", 4326)
            gt = g.tuple
            g.transform(4326)
            self.assertEqual(g.tuple, gt)
            self.assertEqual(g.srid, 4326)

            g = GEOSGeometry("POINT (-104.609 38.255)", 4326)
            g1 = g.transform(4326, clone=True)
            self.assertEqual(g1.tuple, g.tuple)
            self.assertEqual(g1.srid, 4326)
            self.assertIsNot(g1, g, "Clone didn't happen")
        finally:
            gdal.HAS_GDAL = old_has_gdal
Ejemplo n.º 28
0
    def test_prepared(self):
        "Testing PreparedGeometry support."
        # Creating a simple multipolygon and getting a prepared version.
        mpoly = GEOSGeometry("MULTIPOLYGON(((0 0,0 5,5 5,5 0,0 0)),((5 5,5 10,10 10,10 5,5 5)))")
        prep = mpoly.prepared

        # A set of test points.
        pnts = [Point(5, 5), Point(7.5, 7.5), Point(2.5, 7.5)]
        covers = [True, True, False]  # No `covers` op for regular GEOS geoms.
        for pnt, c in zip(pnts, covers):
            # Results should be the same (but faster)
            self.assertEqual(mpoly.contains(pnt), prep.contains(pnt))
            self.assertEqual(mpoly.intersects(pnt), prep.intersects(pnt))
            self.assertEqual(c, prep.covers(pnt))

        if geos_version_info()["version"] > "3.3.0":
            self.assertTrue(prep.crosses(fromstr("LINESTRING(1 1, 15 15)")))
            self.assertTrue(prep.disjoint(Point(-5, -5)))
            poly = Polygon(((-1, -1), (1, 1), (1, 0), (-1, -1)))
            self.assertTrue(prep.overlaps(poly))
            poly = Polygon(((-5, 0), (-5, 5), (0, 5), (-5, 0)))
            self.assertTrue(prep.touches(poly))
            poly = Polygon(((-1, -1), (-1, 11), (11, 11), (11, -1), (-1, -1)))
            self.assertTrue(prep.within(poly))

        # Original geometry deletion should not crash the prepared one (#21662)
        del mpoly
        self.assertTrue(prep.covers(Point(5, 5)))
Ejemplo n.º 29
0
 def feature_query(self, query, parameters = {}):
     """
     Transforms a SQL query into a GeoJSON-formatted FeatureColection.
     It is possible to define a source SRS and target SRS if necessary
     by passing the keys 'source_srs' and 'target_srs' to the parameters
     dictionary. The SRS can be specified using a ESPG code integer value,
     or a PROJ.4 projection string.
     """
     def printDecimal(d):
         n = 3 # Number of decimal places
         s = str(d)
         s = s[:s.find('.') + 1 + n]
         return '|' + s + '|'
     # Define source and target SRS. If the source SRS is None (default), 
     # it is defined by the data source itself.
     s_srs = None
     if parameters.has_key('source_srs'):
         s_srs = parameters['source_srs']
     # Default target SRID is Google Mercator (EPSG 900913)
     t_srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs'
     if parameters.has_key('target_srs'):
         t_srs = parameters['target_srs']
     cur = self.connection.cursor()
     cur.execute(query)
     cols = cur.description
     id_col = -1
     geometry_col = -1
     for i in range(0, len(cols)):
         if cols[i][0] == 'id':
             id_col = i
         if cols[i][0] == 'geometry':
             geometry_col = i
     result = {}
     result['type'] = 'FeatureCollection'
     features = []
     for m in cur:
         feature = {}
         feature['type'] = 'Feature'
         if geometry_col > -1:
             if s_srs == None:
                 geometry = GEOSGeometry(m[geometry_col])
             else:
                 geometry = GEOSGeometry(m[geometry_col], s_srs)
             # Project if necessary
             if s_srs != t_srs:
                 geometry.transform(t_srs)
             feature['geometry'] = json.loads(geometry.json, parse_float=decimal.Decimal)
         if id_col > -1: feature['id'] = m[id_col]
         properties = {}
         for i in range(0, len(cols)):
             if i != geometry_col: properties[cols[i][0]] = str(m[i]) # this throws a UnicodeEncodeError with unicode strings
         feature['properties'] = properties
         features.append(feature)
     result['features'] = features
     properties = {}
     properties['count'] = len(features)
     result['properties'] = properties
     cur.close()
     return result
Ejemplo n.º 30
0
    def test06_f_expressions(self):
        "Testing F() expressions on GeometryFields."
        # Constructing a dummy parcel border and getting the City instance for
        # assigning the FK.
        b1 = GEOSGeometry(
            'POLYGON((-97.501205 33.052520,-97.501205 33.052576,'
            '-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))',
            srid=4326
        )
        pcity = City.objects.get(name='Aurora')

        # First parcel has incorrect center point that is equal to the City;
        # it also has a second border that is different from the first as a
        # 100ft buffer around the City.
        c1 = pcity.location.point
        c2 = c1.transform(2276, clone=True)
        b2 = c2.buffer(100)
        Parcel.objects.create(name='P1', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)

        # Now creating a second Parcel where the borders are the same, just
        # in different coordinate systems.  The center points are also the
        # same (but in different coordinate systems), and this time they
        # actually correspond to the centroid of the border.
        c1 = b1.centroid
        c2 = c1.transform(2276, clone=True)
        b2 = b1 if connection.features.supports_transform else b1.transform(2276, clone=True)
        Parcel.objects.create(name='P2', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)

        # Should return the second Parcel, which has the center within the
        # border.
        qs = Parcel.objects.filter(center1__within=F('border1'))
        self.assertEqual(1, len(qs))
        self.assertEqual('P2', qs[0].name)

        # This time center2 is in a different coordinate system and needs to be
        # wrapped in transformation SQL.
        qs = Parcel.objects.filter(center2__within=F('border1'))
        if connection.features.supports_transform:
            self.assertEqual('P2', qs.get().name)
        else:
            msg = "This backend doesn't support the Transform function."
            with self.assertRaisesMessage(NotImplementedError, msg):
                list(qs)

        # Should return the first Parcel, which has the center point equal
        # to the point in the City ForeignKey.
        qs = Parcel.objects.filter(center1=F('city__location__point'))
        self.assertEqual(1, len(qs))
        self.assertEqual('P1', qs[0].name)

        # This time the city column should be wrapped in transformation SQL.
        qs = Parcel.objects.filter(border2__contains=F('city__location__point'))
        if connection.features.supports_transform:
            self.assertEqual('P1', qs.get().name)
        else:
            msg = "This backend doesn't support the Transform function."
            with self.assertRaisesMessage(NotImplementedError, msg):
                list(qs)
Ejemplo n.º 31
0
    def _geos_ptr(self):
        from django.contrib.gis.geos import GEOSGeometry

        return GEOSGeometry._from_wkb(self.wkb)
Ejemplo n.º 32
0
 def test_pois_are_attached_to_paths(self):
     geom = GEOSGeometry('POINT(1 1)')
     poi = self.cmd.create_poi(geom, 'bridge', 'infra')
     self.assertEquals([self.path], list(poi.paths.all()))
Ejemplo n.º 33
0
 def test_should_include_null_geometry_in_search_results(self):
     shape = GEOSGeometry('POLYGON((20 20, 40 20, 40 40, 20 40, 20 20))')
     result = self.filter.filter(self.model.objects.all(), shape)
     self.assertEqual(1, len(result))
Ejemplo n.º 34
0
 def test_should_filter_queryset_intersecting_shape(self):
     shape = GEOSGeometry('POLYGON((0 -1, 4 -1, 4 1, 0 1, 0 -1))',
                          srid=settings.SRID)
     shape.transform(API_SRID)
     result = self.filter.filter(self.model.objects.all(), shape)
     self.assertEqual(2, len(result))
Ejemplo n.º 35
0
    def elevation_area(cls, geom):
        xmin, ymin, xmax, ymax = cls._nice_extent(geom)
        width = xmax - xmin
        height = ymax - ymin
        precision = settings.ALTIMETRIC_PROFILE_PRECISION
        max_resolution = settings.ALTIMETRIC_AREA_MAX_RESOLUTION
        if width / precision > max_resolution:
            precision = int(width / max_resolution)
        if height / precision > 10000:
            precision = int(width / max_resolution)
        cursor = connection.cursor()
        cursor.execute(
            "SELECT 1 FROM information_schema.tables WHERE table_name='mnt'")
        if cursor.rowcount == 0:
            logger.warn("No DEM present")
            return {}

        sql = """
            -- Author: Celian Garcia
            WITH columns AS (
                    SELECT generate_series({xmin}::int, {xmax}::int, {precision}) AS x
                ),
                lines AS (
                    SELECT generate_series({ymin}::int, {ymax}::int, {precision}) AS y
                ),
                resolution AS (
                    SELECT x, y
                    FROM (SELECT COUNT(x) AS x FROM columns) AS col,
                         (SELECT COUNT(y) AS y FROM lines)   AS lin
                ),
                points2d AS (
                    SELECT row_number() OVER () AS id,
                           ST_SetSRID(ST_MakePoint(x, y), {srid}) AS geom,
                           ST_Transform(ST_SetSRID(ST_MakePoint(x, y), {srid}), 4326) AS geomll
                    FROM lines, columns
                ),
                draped AS (
                    SELECT id, ST_Value(mnt.rast, p.geom)::int AS altitude
                    FROM mnt, points2d AS p
                    WHERE ST_Intersects(mnt.rast, p.geom)
                ),
                all_draped AS (
                    SELECT geomll, geom, altitude
                    FROM points2d LEFT JOIN draped ON (points2d.id = draped.id)
                    ORDER BY points2d.id
                ),
                extent_latlng AS (
                    SELECT ST_Envelope(ST_Union(geom)) AS extent,
                           MIN(altitude) AS min_z,
                           MAX(altitude) AS max_z,
                           AVG(altitude) AS center_z
                    FROM all_draped
                )
            SELECT extent,
                   ST_transform(extent, 4326),
                   center_z,
                   min_z,
                   max_z,
                   resolution.x AS resolution_w,
                   resolution.y AS resolution_h,
                   altitude
            FROM extent_latlng, resolution, all_draped;
        """.format(xmin=xmin,
                   ymin=ymin,
                   xmax=xmax,
                   ymax=ymax,
                   srid=settings.SRID,
                   precision=precision)
        cursor.execute(sql)
        result = cursor.fetchall()
        first = result[0]
        envelop_native, envelop, center_z, min_z, max_z, resolution_w, resolution_h, a = first
        envelop = GEOSGeometry(envelop, srid=4326)
        envelop_native = GEOSGeometry(envelop_native, srid=settings.SRID)

        altitudes = []
        row = []
        for i, record in enumerate(result):
            if i > 0 and i % resolution_w == 0:
                altitudes.append(row)
                row = []
            elevation = (record[7] or 0.0) - min_z
            row.append(elevation)
        altitudes.append(row)

        area = {
            'center': {
                'x': envelop_native.centroid.x,
                'y': envelop_native.centroid.y,
                'lat': envelop.centroid.y,
                'lng': envelop.centroid.x,
                'z': int(center_z)
            },
            'resolution': {
                'x': resolution_w,
                'y': resolution_h,
                'step': precision
            },
            'size': {
                'x':
                envelop_native.coords[0][2][0] -
                envelop_native.coords[0][0][0],
                'y':
                envelop_native.coords[0][2][1] -
                envelop_native.coords[0][0][1],
                'lat':
                envelop.coords[0][2][0] - envelop.coords[0][0][0],
                'lng':
                envelop.coords[0][2][1] - envelop.coords[0][0][1]
            },
            'extent': {
                'altitudes': {
                    'min': min_z,
                    'max': max_z
                },
                'southwest': {
                    'lat': envelop.coords[0][0][1],
                    'lng': envelop.coords[0][0][0],
                    'x': envelop_native.coords[0][0][0],
                    'y': envelop_native.coords[0][0][1]
                },
                'northwest': {
                    'lat': envelop.coords[0][1][1],
                    'lng': envelop.coords[0][1][0],
                    'x': envelop_native.coords[0][1][0],
                    'y': envelop_native.coords[0][1][1]
                },
                'northeast': {
                    'lat': envelop.coords[0][2][1],
                    'lng': envelop.coords[0][2][0],
                    'x': envelop_native.coords[0][2][0],
                    'y': envelop_native.coords[0][2][1]
                },
                'southeast': {
                    'lat': envelop.coords[0][3][1],
                    'lng': envelop.coords[0][3][0],
                    'x': envelop_native.coords[0][3][0],
                    'y': envelop_native.coords[0][3][1]
                }
            },
            'altitudes': altitudes
        }
        return area
                4: 'Less than 200 mg/L',
                'NA': 'No Data'
            },
            'big_cz': False,
        },
    ],
}

# List of valid stream tables
STREAM_TABLES = {
    'nhd': 'nhdflowline',
    'nhdhr': 'nhdflowlinehr',
    'drb': 'drb_streams_50',
}

DRB_PERIMETER = GEOSGeometry(json.dumps(drb_perimeter['geometry']), srid=4326)
DRB_SIMPLE_PERIMETER = \
    GEOSGeometry(json.dumps(drb_simple_perimeter['geometry']), srid=4326)

# Vizer observation meta data URL.  Happens to be proxied through a local app
# server to avoid Cross Domain request errors
VIZER_ROOT = '/observation/services/get_asset_info.php?'
# For requests that should use a daily cached backed proxy
VIZER_CACHED_ROOT = '/cache' + VIZER_ROOT
VIZER_TYPE_PARAM = '&asset_type=siso'

VIZER_URLS = {
    'layers':
    VIZER_CACHED_ROOT + 'opt=meta' + VIZER_TYPE_PARAM,
    'variable':
    VIZER_ROOT + 'opt=data&asset_id={{asset_id}}&var_id={{var_id}}' +
Ejemplo n.º 37
0
 def test_ensure_point(self):
     self.assertRaises(SpatialError, ensure_point, [38.97127105172941, -95.23592948913574])
     self.assertRaises(SpatialError, ensure_point, GEOSGeometry('POLYGON((-95 38, -96 40, -97 42, -95 38))'))
     ensure_point(Point(-95.23592948913574, 38.97127105172941))
Ejemplo n.º 38
0
    def test_page_rename(self):
        p = Page()
        p.content = "<p>The page content.</p>"
        p.name = "Original page"
        p.save()

        p.rename_to("New page")

        # Renamed-to page should exist.
        new_p = Page.objects.get(name="New page")
        # new_p should have the same content.
        self.assertEqual(new_p.content, p.content)

        # "Original page" should no longer exist.
        pgs = Page.objects.filter(name="Original page")
        self.assertEqual(len(pgs), 0)
        # and a redirect from "original page" to "New page" should exist.
        Redirect.objects.filter(source="original page", destination=new_p)

        ###########################################################
        # Renaming to a page that already exists should raise an
        # exception and not affect the original page.
        ###########################################################
        p = Page()
        p.content = "<p>Hello, world.</p>"
        p.name = "Page A"
        p.save()

        self.assertRaises(exceptions.PageExistsError, p.rename_to, "New page")
        # p should be unaffected.  No redirect should be created.
        p = Page.objects.get(name="Page A")
        self.assertEqual(p.content, "<p>Hello, world.</p>")
        self.assertEqual(len(Redirect.objects.filter(source="page a")), 0)

        ###########################################################
        # Renaming should carry along files and FK'ed items that
        # point to it.
        ###########################################################
        p = Page()
        p.content = "<p>A page with files and a map.</p>"
        p.name = "Page With FKs"
        p.save()
        # Create a file that points at the page.
        pf = PageFile(file=ContentFile("foo"), name="file.txt", slug=p.slug)
        pf.save()
        # Create a redirect that points at the page.
        redirect = Redirect(source="foobar", destination=p)
        redirect.save()
        # Create a map that points at the page.
        points = GEOSGeometry(
            """MULTIPOINT (-122.4378964233400069 37.7971758820830033, -122.3929211425700032 37.7688207875790027, -122.3908612060599950 37.7883584775320003, -122.4056240844700056 37.8013807351830025, -122.4148937988299934 37.8002956347170027, -122.4183270263600036 37.8051784612779969)"""
        )
        map = MapData(points=points, page=p)
        map.save()
        # Add tags to page
        tagset = PageTagSet(page=p)
        tagset.save()
        tag = Tag(name="tag1")
        tag.save()
        tagset.tags.add(tag)

        p.rename_to("New Page With FKs")

        new_p = Page.objects.get(name="New Page With FKs")
        self.assertEqual(len(MapData.objects.filter(page=new_p)), 1)
        self.assertEqual(len(new_p.pagetagset.tags.all()), 1)
        # Two redirects: one we created explicitly and one that was
        # created during rename_to()
        self.assertEqual(len(Redirect.objects.filter(destination=new_p)), 2)
        self.assertEqual(len(PageFile.objects.filter(slug=new_p.slug)), 1)

        # Renaming should keep slugs pointed at old page /and/ copy
        # them to the new page.
        self.assertEqual(len(PageFile.objects.filter(slug=p.slug)), 1)

        ###########################################################
        # Renaming with multiple files.
        ###########################################################
        p = Page()
        p.content = "<p>A new page with multiple files.</p>"
        p.name = "Page with multiple files"
        p.save()
        # Create a file that points at the page.
        pf = PageFile(file=ContentFile("foo"), name="file.txt", slug=p.slug)
        pf.save()
        pf = PageFile(file=ContentFile("foo2"), name="file2.txt", slug=p.slug)
        pf.save()
        pf = PageFile(file=ContentFile("foo3"), name="file3.txt", slug=p.slug)
        pf.save()
        p.rename_to("A page with multiple files 2")

        p = Page.objects.get(name="A page with multiple files 2")
        self.assertEqual(len(PageFile.objects.filter(slug=p.slug)), 3)

        ###########################################################
        # Reverting a renamed page should be possible and should
        # restore files and FK'ed items that were pointed at the
        # original page.  The renamed-to page should still exist
        # after the revert and should still have its own files and
        # FK'ed items pointed at it.
        ###########################################################
        p = Page(name="Page With FKs", slug="page with fks")
        # get the version right before it was deleted
        v_before_deleted = len(p.versions.all()) - 1
        p_h = p.versions.as_of(version=v_before_deleted)
        p_h.revert_to()
        p = Page.objects.get(name="Page With FKs")
        self.assertEqual(len(MapData.objects.filter(page=p)), 1)
        self.assertEqual(len(PageFile.objects.filter(slug=p.slug)), 1)

        p2 = Page.objects.get(name="New Page With FKs")
        self.assertEqual(len(MapData.objects.filter(page=p2)), 1)
        self.assertEqual(len(PageFile.objects.filter(slug=p2.slug)), 1)

        self.assertEqual(len(Redirect.objects.filter(destination=p2)), 1)

        ###########################################################
        # Renaming a page and then renaming it back.
        ###########################################################
        # 1. Simple case
        p = Page(name="Page X", content="<p>Foobar</p>")
        p.save()
        p.rename_to("Page Y")
        self.assertEqual(len(Page.objects.filter(name="Page X")), 0)
        self.assertEqual(len(Page.objects.filter(name="Page Y")), 1)

        p_new = Page.objects.get(name="Page Y")
        p_new.rename_to("Page X")
        self.assertEqual(len(Page.objects.filter(name="Page X")), 1)
        self.assertEqual(len(Page.objects.filter(name="Page Y")), 0)

        # 2. If we have FKs pointed at the page this shouldn't be
        # totally f****d.
        p = Page(name="Page X2", content="<p>Foo X</p>")
        p.save()
        points = GEOSGeometry(
            """MULTIPOINT (-122.4378964233400069 37.7971758820830033, -122.3929211425700032 37.7688207875790027, -122.3908612060599950 37.7883584775320003, -122.4056240844700056 37.8013807351830025, -122.4148937988299934 37.8002956347170027, -122.4183270263600036 37.8051784612779969)"""
        )
        map = MapData(points=points, page=p)
        map.save()
        # Create a file that points at the page.
        pf = PageFile(file=ContentFile("foooo"),
                      name="file_foo.txt",
                      slug=p.slug)
        pf.save()

        p.rename_to("Page Y2")
        p_new = Page.objects.get(name="Page Y2")
        # FK points at the page we renamed to.
        self.assertEqual(len(MapData.objects.filter(page=p_new)), 1)
        self.assertEqual(len(PageFile.objects.filter(slug=p_new.slug)), 1)

        # Now rename it back.
        p_new.rename_to("Page X2")
        p = Page.objects.get(name="Page X2")
        # After rename-back-to, FK points to the renamed-back-to page.
        self.assertEqual(len(MapData.objects.filter(page=p)), 1)
        self.assertEqual(len(PageFile.objects.filter(slug=p.slug)), 1)

        ###########################################################
        # Renaming a page but keeping the same slug
        ###########################################################
        p = Page(name="Foo A", content="<p>Foo A</p>")
        p.save()
        p.rename_to("FOO A")

        # Name has changed.
        self.assertEqual(len(Page.objects.filter(name="FOO A")), 1)
        # Has the same history, with a new entry for the name change.
        p = Page.objects.get(name="FOO A")
        p1, p0 = p.versions.all()
        self.assertEqual(p1.name, 'FOO A')
        self.assertEqual(p0.name, 'Foo A')
        self.assertEqual(p0.content, p1.content)

        ###########################################################
        # Renaming a page twice (A -> B -> C) and then revert A to
        # an existing state.
        ###########################################################
        p = Page(name="Bar A", content="<p>Bar A</p>")
        p.save()
        p.rename_to("Bar B")
        p = Page.objects.get(name="Bar B")
        p.rename_to("Bar C")

        p = Page(name="Bar A", slug="bar a")
        p_h = p.versions.as_of(version=1)
        p_h.revert_to()

        ###########################################################
        # Renaming a page back and forth and reverting.
        ###########################################################
        p = Page(name="Zoo A", content="<p>Zoo A</p>")
        p.save()
        p.rename_to("Zoo B")
        p = Page.objects.get(name="Zoo B")
        p.rename_to("Zoo A")
        p = Page.objects.get(name="Zoo A")
        p.rename_to("Zoo B")

        p = Page(name="Zoo A", slug="zoo a")
        p_h = p.versions.as_of(version=1)
        p_h.revert_to()

        ###########################################################
        # page A, rename to B, then create new A, rename B to C,
        # rename C to B, then revert C to first version
        ###########################################################
        p = Page(name="Mike A", content="<p>A</p>")
        p.save()
        p.rename_to("Mike B")
        new_a = Page(name="Mike A", content="<p>A new</p>")
        new_a.save()
        p = Page.objects.get(name="Mike B")
        p.rename_to("Mike C")
        p = Page.objects.get(name="Mike C")
        p.rename_to("Mike B")

        p_c = Page(name="Mike C", slug="mike c")
        p_h = p_c.versions.as_of(version=1)
        p_h.revert_to()
Ejemplo n.º 39
0
    def buscar(self, query, ciudad_actual_slug=None):
        # podria reemplazar todo esto por un lucene/solr/elasticsearch
        # que tenga un campo texto y un punto asociado

        # Dados estos ejemplos:
        #  - 12 y 64, casco urbano, la plata, buenos aires, argentina
        #  - plaza italia, casco urbano, la plata, buenos aires, argentina
        # la idea es separar lo que hay entre comas, en una lista de tokens
        # 1. tomat token[0] y fijarse si es
        #   una interseccion (contiene ' y ')
        #   una direccion postal (contiene ' n ')
        #   una punto de interes o zona o ciudad o "raw geocoder"
        # 2. tomar la lista de resultados devueltos, que contienen una "precision"
        #   Para cada uno del resto de los tokens:
        #   elevar un 20% la precision si el token coincide con el slug de la ciudad (o zona) donde el punto cae
        #   caso contrario, disminuir en un 20% la precision de ese punto

        ciudad_model = apps.get_model("catastro", "Ciudad")
        zona_model = apps.get_model("catastro", "Zona")
        if query:

            res = self.poi_exact(query)
            if res:
                return res

            query = remove_multiple_strings(
                query.upper(),
                ['AV.', 'AVENIDA', 'CALLE', 'DIAGONAL', 'BOULEVARD'])

            tokens = filter(None, map(unicode.strip, query.split(',')))

            if len(tokens) > 0:
                calles = tokens[0].upper()[:]
            else:
                return []

            separators = [
                'Y', 'ESQ', 'ESQ.', 'ESQUINA', 'ESQUINA.', 'INTERSECCION',
                'CON', 'CRUCE'
            ]
            for sep in separators:
                calles = calles.replace(' ' + sep + ' ', '@')
            calles = calles.split('@')

            if len(calles) == 2:
                res = self.interseccion(calles[0].strip(), calles[1].strip())
            else:

                direccion = tokens[0].upper()[:]
                separators = [
                    'N', 'NUM', 'NUM.', 'NRO', 'NUMERO', 'NUMERO.', 'NO', 'NO.'
                ]
                for sep in separators:
                    direccion = direccion.replace(' ' + sep + ' ', '@')
                direccion = direccion.split('@')

                if len(direccion) == 2:
                    res = self.direccionPostal(direccion[0].strip(),
                                               direccion[1].strip(),
                                               ciudad_actual_slug)
                else:
                    # worst case (ordenar por precision?)
                    res = []
                    for tok in tokens:
                        # PROBLEMA! estos devuelven diccionarios que se acceden asi: punto['item']
                        # PROBLEMA! pero los otros devuelven objetos que se acceden asi punto.attr
                        res += self.poi(tok) + self.zona(tok)

            # ciudad actual, en la que esta el mapa, deberia ser pasada por parametro
            ciudad_actual = ciudad_model.objects.get(slug=ciudad_actual_slug)

            if res:
                res = [
                    r for r in res if ciudad_actual.poligono.intersects(
                        GEOSGeometry(r['geom']))
                ]
            if not res:
                res = []
                for tok in tokens:
                    res += self.poi(tok) + self.zona(tok)
                if res:
                    res = [
                        r for r in res if ciudad_actual.poligono.intersects(
                            GEOSGeometry(r['geom']))
                    ]
            if not res:
                res = []
                for tok in tokens:
                    res += self.rawGeocoder(tok)
                if res:
                    res = [
                        r for r in res if ciudad_actual.poligono.intersects(
                            GEOSGeometry(r['geom']))
                    ]
            if not res:
                res = []
                for tok in tokens:
                    res += self.rawGeocoder(tok + "," + ciudad_actual.nombre)
                if res:
                    res = [
                        r for r in res if ciudad_actual.poligono.intersects(
                            GEOSGeometry(r['geom']))
                    ]

            #
            # aca chequear si los resultados intersectan con el poligono de la ciudad_actual y de la ciudad_entrada o de la zona
            # ciudad entrada estaría en token[1]. Sumar puntos si eso es una ciudad, o si es una zona, a la cual cada punto pertenece.
            areas = []

            # zona o ciudad ingresada (tokens>1)
            for token in tokens[1:]:
                try:
                    areas += [
                        GEOSGeometry(i.poligono)
                        for i in zona_model.objects.fuzzy_like_query(token)
                    ]
                except:
                    pass
                try:
                    areas += [
                        GEOSGeometry(i.poligono)
                        for i in ciudad_model.objects.fuzzy_like_query(token)
                    ]
                except:
                    pass

            if areas:
                # para cada resultado aplicar la subida o bajada de puntaje segun el area en la que se encuentra
                for r in res:
                    # para cada area en la que este resultado intersecta, sumar o restar
                    for area in areas:
                        if area.intersects(GEOSGeometry(r['geom'])):
                            # sumar un 20% a la precision, sino restar un 20%
                            r['precision'] *= 1.8
                        else:
                            r['precision'] *= 0.4
                    if ciudad_actual.poligono.intersects(
                            GEOSGeometry(r['geom'])):
                        r['precision'] *= 1.8
            else:
                # El punto tiene que estar si o si en la ciudad actual
                res = [
                    r for r in res if ciudad_actual.poligono.intersects(
                        GEOSGeometry(r['geom']))
                ]

            # ordenar
            res.sort(key=itemgetter("precision"), reverse=True)
            # si el mejor tiene diferencia de .5 o mas con el segundo y esta sobre el .5 de precision, listo, gano
            # generar alguna otra regla heuristica similar para filtrar
            return res
        else:
            return []
Ejemplo n.º 40
0
def process_shapefiles(request,
                       collection=BiologicalCollectionRecord,
                       additional_fields=None):
    token = request.GET.get('token', None)

    if not token:
        return JsonResponse({'message': 'empty token'})

    shapefiles = Shapefile.objects.filter(
            token=token
    )

    if not additional_fields:
        additional_fields = {
            'present': 'bool',
        }

    for shp in shapefiles:
        shp.token = ''
        shp.save()

    upload_session, created = ShapefileUploadSession.objects.get_or_create(
            uploader=request.user,
            token=token,
            processed=False,
    )

    if created:
        upload_session.shapefiles = shapefiles
        upload_session.save()

    all_shapefiles = upload_session.shapefiles.all()

    needed_ext = ['.shx', '.shp', '.dbf']
    needed_files = {}

    # Check all needed files
    for shp in all_shapefiles:
        name, extension = os.path.splitext(shp.filename)
        if extension in needed_ext:
            needed_files[extension[1:]] = shp
            needed_ext.remove(extension)

    if len(needed_ext) > 0:
        data = {
            'message': 'missing %s' % ','.join(needed_ext)
        }
        upload_session.error = data['message']
        upload_session.save()
        return JsonResponse(data)

    # Extract shapefile into dictionary
    outputs = extract_shape_file(
            shp_file=needed_files['shp'].shapefile,
            shx_file=needed_files['shx'].shapefile,
            dbf_file=needed_files['dbf'].shapefile,
    )

    # disconnect post save handler of location sites
    # it is done from record signal
    signals.post_save.disconnect(
            location_site_post_save_handler,
    )
    signals.post_save.disconnect(
            collection_post_save_update_cluster,
    )

    collection_added = 0

    for geojson in outputs:
        try:
            # Optional fields and value
            location_site_name = geojson['properties']['location']
            properties = geojson['properties']
            geojson_json = json.dumps(geojson['geometry'])
            geometry = GEOSGeometry(geojson_json)
            optional_records = {}

            if (sys.version_info > (3, 0)):
                # Python 3 code in this block
                optional_fields_iter = additional_fields.items()
            else:
                # Python 2 code in this block
                optional_fields_iter = additional_fields. \
                    iteritems()

            for (opt_field, field_type) in optional_fields_iter:
                if opt_field in properties:
                    if field_type == 'bool':
                        properties[opt_field] = properties[opt_field] == 1
                    elif field_type == 'str':
                        properties[opt_field] = properties[opt_field].lower()
                    optional_records[opt_field] = properties[opt_field]

            # Add custodian
            if 'custodian' in properties:
                optional_records['institution_id'] = properties['custodian']

            if geojson['geometry']['type'] == 'Polygon':
                location_type, status = LocationType.objects.get_or_create(
                        name='PolygonObservation',
                        allowed_geometry='POLYGON'
                )
                location_site, created = LocationSite.objects.get_or_create(
                        name=location_site_name,
                        geometry_polygon=geometry,
                        location_type=location_type,
                )
            elif geojson['geometry']['type'] == 'MultiPolygon':
                location_type, status = LocationType.objects.get_or_create(
                        name='MutiPolygonObservation',
                        allowed_geometry='MULTIPOLYGON'
                )
                location_site, created = LocationSite.objects.get_or_create(
                        name=location_site_name,
                        geometry_multipolygon=geometry,
                        location_type=location_type,
                )
            elif geojson['geometry']['type'] == 'LineString':
                location_type, status = LocationType.objects.get_or_create(
                        name='LineObservation',
                        allowed_geometry='LINE'
                )
                location_site, created = LocationSite.objects.get_or_create(
                        name=location_site_name,
                        geometry_line=geometry,
                        location_type=location_type,
                )
            else:
                location_type, status = LocationType.objects.get_or_create(
                        name='PointObservation',
                        allowed_geometry='POINT'
                )
                location_site, created = LocationSite.objects.get_or_create(
                        name=location_site_name,
                        geometry_point=geometry,
                        location_type=location_type,
                )

            collections = collection.objects.filter(
                    original_species_name=properties['species']
            )

            taxonomy = None
            if collections:
                taxonomy = collections[0].taxonomy

            collection_records, created = collection. \
                objects. \
                get_or_create(
                    site=location_site,
                    original_species_name=properties['species'],
                    category=properties['category'].lower(),
                    collection_date=properties['date'],
                    collector=properties['collector'],
                    notes=properties['notes'],
                    taxonomy=taxonomy,
                    owner=request.user,
                    **optional_records)

            if created:
                collection_added += 1

            upload_session.processed = True
            upload_session.save()

        except (ValueError, KeyError) as e:
            upload_session.error = str(e)
            upload_session.save()

    # reconnect signals
    signals.post_save.connect(
            location_site_post_save_handler,
    )
    signals.post_save.connect(
            collection_post_save_update_cluster,
    )

    response_message = 'Added %s records <br/>' % collection_added
    if collection_added > 0:
        response_message += 'Verify your records ' \
                            '<a href="/nonvalidated-user-list/">' \
                            'here</a> <br/>'
    data = {
        'message': response_message
    }
    return JsonResponse(data)
Ejemplo n.º 41
0
 def geos(self):
     "Returns a GEOSGeometry object from this OGRGeometry."
     from django.contrib.gis.geos import GEOSGeometry
     return GEOSGeometry(self.wkb, self.srid)
Ejemplo n.º 42
0
    def process(self):
        """
        This method contains the logic for processing tasks asynchronously
        from a background thread or from a worker. Here tasks that are
        ready to be processed execute some logic. This could be communication
        with a processing node or executing a pending action.
        """

        try:
            if self.pending_action == pending_actions.RESIZE:
                resized_images = self.resize_images()
                self.refresh_from_db()
                self.resize_gcp(resized_images)
                self.pending_action = None
                self.save()

            if self.auto_processing_node and not self.status in [status_codes.FAILED, status_codes.CANCELED]:
                # No processing node assigned and need to auto assign
                if self.processing_node is None:
                    # Assign first online node with lowest queue count
                    self.processing_node = ProcessingNode.find_best_available_node()
                    if self.processing_node:
                        self.processing_node.queue_count += 1 # Doesn't have to be accurate, it will get overridden later
                        self.processing_node.save()

                        logger.info("Automatically assigned processing node {} to {}".format(self.processing_node, self))
                        self.save()

                # Processing node assigned, but is offline and no errors
                if self.processing_node and not self.processing_node.is_online():
                    # If we are queued up
                    # detach processing node, and reassignment
                    # will be processed at the next tick
                    if self.status == status_codes.QUEUED:
                        logger.info("Processing node {} went offline, reassigning {}...".format(self.processing_node, self))
                        self.uuid = ''
                        self.processing_node = None
                        self.status = None
                        self.save()

                    elif self.status == status_codes.RUNNING:
                        # Task was running and processing node went offline
                        # It could have crashed due to low memory
                        # or perhaps it went offline due to network errors.
                        # We can't easily differentiate between the two, so we need
                        # to notify the user because if it crashed due to low memory
                        # the user might need to take action (or be stuck in an infinite loop)
                        raise ProcessingError("Processing node went offline. This could be due to insufficient memory or a network error.")

            if self.processing_node:
                # Need to process some images (UUID not yet set and task doesn't have pending actions)?
                if not self.uuid and self.pending_action is None and self.status is None:
                    logger.info("Processing... {}".format(self))

                    images = [image.path() for image in self.imageupload_set.all()]

                    # Track upload progress, but limit the number of DB updates
                    # to every 2 seconds (and always record the 100% progress)
                    last_update = 0
                    def callback(progress):
                        nonlocal last_update

                        time_has_elapsed = time.time() - last_update >= 2

                        if time_has_elapsed:
                            self.check_if_canceled()

                        if time_has_elapsed or (progress >= 1.0 - 1e-6 and progress <= 1.0 + 1e-6):
                            Task.objects.filter(pk=self.id).update(upload_progress=progress)
                            last_update = time.time()

                    # This takes a while
                    uuid = self.processing_node.process_new_task(images, self.name, self.options, callback)

                    # Refresh task object before committing change
                    self.refresh_from_db()
                    self.uuid = uuid
                    self.save()

                    # TODO: log process has started processing

            if self.pending_action is not None:
                if self.pending_action == pending_actions.CANCEL:
                    # Do we need to cancel the task on the processing node?
                    logger.info("Canceling {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to cancel the task on the processing node
                        # We don't care if this fails (we tried)
                        try:
                            self.processing_node.cancel_task(self.uuid)
                        except ProcessingException:
                            logger.warning("Could not cancel {} on processing node. We'll proceed anyway...".format(self))

                        self.status = status_codes.CANCELED
                        self.pending_action = None
                        self.save()
                    else:
                        raise ProcessingError("Cannot cancel a task that has no processing node or UUID")

                elif self.pending_action == pending_actions.RESTART:
                    logger.info("Restarting {}".format(self))
                    if self.processing_node:

                        # Check if the UUID is still valid, as processing nodes purge
                        # results after a set amount of time, the UUID might have been eliminated.
                        uuid_still_exists = False

                        if self.uuid:
                            try:
                                info = self.processing_node.get_task_info(self.uuid)
                                uuid_still_exists = info['uuid'] == self.uuid
                            except ProcessingException:
                                pass

                        need_to_reprocess = False

                        if uuid_still_exists:
                            # Good to go
                            try:
                                self.processing_node.restart_task(self.uuid, self.options)
                            except ProcessingError as e:
                                # Something went wrong
                                logger.warning("Could not restart {}, will start a new one".format(self))
                                need_to_reprocess = True
                        else:
                            need_to_reprocess = True

                        if need_to_reprocess:
                            logger.info("{} needs to be reprocessed".format(self))

                            # Task has been purged (or processing node is offline)
                            # Process this as a new task
                            # Removing its UUID will cause the scheduler
                            # to process this the next tick
                            self.uuid = ''

                            # We also remove the "rerun-from" parameter if it's set
                            self.options = list(filter(lambda d: d['name'] != 'rerun-from', self.options))
                            self.upload_progress = 0

                        self.console_output = ""
                        self.processing_time = -1
                        self.status = None
                        self.last_error = None
                        self.pending_action = None
                        self.running_progress = 0
                        self.save()
                    else:
                        raise ProcessingError("Cannot restart a task that has no processing node")

                elif self.pending_action == pending_actions.REMOVE:
                    logger.info("Removing {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to delete the resources on the processing node
                        # We don't care if this fails, as resources on processing nodes
                        # Are expected to be purged on their own after a set amount of time anyway
                        try:
                            self.processing_node.remove_task(self.uuid)
                        except ProcessingException:
                            pass

                    # What's more important is that we delete our task properly here
                    self.delete()

                    # Stop right here!
                    return

            if self.processing_node:
                # Need to update status (first time, queued or running?)
                if self.uuid and self.status in [None, status_codes.QUEUED, status_codes.RUNNING]:
                    # Update task info from processing node
                    info = self.processing_node.get_task_info(self.uuid)

                    self.processing_time = info["processingTime"]
                    self.status = info["status"]["code"]

                    current_lines_count = len(self.console_output.split("\n"))
                    console_output = self.processing_node.get_task_console_output(self.uuid, current_lines_count)
                    if len(console_output) > 0:
                        self.console_output += "\n".join(console_output) + '\n'

                        # Update running progress
                        for line in console_output:
                            for line_match, value in self.TASK_OUTPUT_MILESTONES.items():
                                if line_match in line:
                                    self.running_progress = value
                                    break

                    if "errorMessage" in info["status"]:
                        self.last_error = info["status"]["errorMessage"]

                    # Has the task just been canceled, failed, or completed?
                    if self.status in [status_codes.FAILED, status_codes.COMPLETED, status_codes.CANCELED]:
                        logger.info("Processing status: {} for {}".format(self.status, self))

                        if self.status == status_codes.COMPLETED:
                            assets_dir = self.assets_path("")

                            # Remove previous assets directory
                            if os.path.exists(assets_dir):
                                logger.info("Removing old assets directory: {} for {}".format(assets_dir, self))
                                shutil.rmtree(assets_dir)

                            os.makedirs(assets_dir)

                            logger.info("Downloading all.zip for {}".format(self))

                            # Download all assets
                            try:
                                zip_stream = self.processing_node.download_task_asset(self.uuid, "all.zip")
                                zip_path = os.path.join(assets_dir, "all.zip")

                                # Keep track of download progress (if possible)
                                content_length = zip_stream.headers.get('content-length')
                                total_length = int(content_length) if content_length is not None else None
                                downloaded = 0
                                last_update = 0

                                with open(zip_path, 'wb') as fd:
                                    for chunk in zip_stream.iter_content(4096):
                                        downloaded += len(chunk)

                                        # Track progress if we know the content header length
                                        # every 2 seconds
                                        if total_length > 0 and time.time() - last_update >= 2:
                                            Task.objects.filter(pk=self.id).update(running_progress=(self.TASK_OUTPUT_MILESTONES_LAST_VALUE + (float(downloaded) / total_length) * 0.1))
                                            last_update = time.time()

                                        fd.write(chunk)
                            except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, ReadTimeoutError) as e:
                                raise ProcessingTimeout(e)

                            logger.info("Done downloading all.zip for {}".format(self))

                            self.refresh_from_db()
                            self.console_output += "Extracting results. This could take a few minutes...\n";
                            self.save()

                            # Extract from zip
                            with zipfile.ZipFile(zip_path, "r") as zip_h:
                                zip_h.extractall(assets_dir)

                            logger.info("Extracted all.zip for {}".format(self))

                            # Populate *_extent fields
                            extent_fields = [
                                (os.path.realpath(self.assets_path("odm_orthophoto", "odm_orthophoto.tif")),
                                 'orthophoto_extent'),
                                (os.path.realpath(self.assets_path("odm_dem", "dsm.tif")),
                                 'dsm_extent'),
                                (os.path.realpath(self.assets_path("odm_dem", "dtm.tif")),
                                 'dtm_extent'),
                            ]

                            for raster_path, field in extent_fields:
                                if os.path.exists(raster_path):
                                    # Read extent and SRID
                                    raster = GDALRaster(raster_path)
                                    extent = OGRGeometry.from_bbox(raster.extent)

                                    # It will be implicitly transformed into the SRID of the model’s field
                                    # self.field = GEOSGeometry(...)
                                    setattr(self, field, GEOSGeometry(extent.wkt, srid=raster.srid))

                                    logger.info("Populated extent field with {} for {}".format(raster_path, self))

                            self.update_available_assets_field()
                            self.running_progress = 1.0
                            self.console_output += "Done!\n"
                            self.status = status_codes.COMPLETED
                            self.save()

                            from app.plugins import signals as plugin_signals
                            plugin_signals.task_completed.send_robust(sender=self.__class__, task_id=self.id)
                        else:
                            # FAILED, CANCELED
                            self.save()
                    else:
                        # Still waiting...
                        self.save()

        except ProcessingError as e:
            self.set_failure(str(e))
        except (ConnectionRefusedError, ConnectionError) as e:
            logger.warning("{} cannot communicate with processing node: {}".format(self, str(e)))
        except ProcessingTimeout as e:
            logger.warning("{} timed out with error: {}. We'll try reprocessing at the next tick.".format(self, str(e)))
        except TaskInterruptedException as e:
            # Task was interrupted during image resize / upload
            logger.warning("{} interrupted".format(self, str(e)))
Ejemplo n.º 43
0
    def test_all_gis_lookups_with_rasters(self):
        """
        Evaluate all possible lookups for all input combinations (i.e.
        raster-raster, raster-geom, geom-raster) and for projected and
        unprojected coordinate systems. This test just checks that the lookup
        can be called, but doesn't check if the result makes logical sense.
        """
        from django.contrib.gis.db.backends.postgis.operations import PostGISOperations

        # Create test raster and geom.
        rast = GDALRaster(json.loads(JSON_RASTER))
        stx_pnt = GEOSGeometry(
            'POINT (-95.370401017314293 29.704867409475465)', 4326)
        stx_pnt.transform(3086)

        lookups = [(name, lookup)
                   for name, lookup in BaseSpatialField.get_lookups().items()
                   if issubclass(lookup, GISLookup)]
        self.assertNotEqual(lookups, [], 'No lookups found')
        # Loop through all the GIS lookups.
        for name, lookup in lookups:
            # Construct lookup filter strings.
            combo_keys = [
                field + name for field in [
                    'rast__',
                    'rast__',
                    'rastprojected__0__',
                    'rast__',
                    'rastprojected__',
                    'geom__',
                    'rast__',
                ]
            ]
            if issubclass(lookup, DistanceLookupBase):
                # Set lookup values for distance lookups.
                combo_values = [
                    (rast, 50, 'spheroid'),
                    (rast, 0, 50, 'spheroid'),
                    (rast, 0, D(km=1)),
                    (stx_pnt, 0, 500),
                    (stx_pnt, D(km=1000)),
                    (rast, 500),
                    (json.loads(JSON_RASTER), 500),
                ]
            elif name == 'relate':
                # Set lookup values for the relate lookup.
                combo_values = [
                    (rast, 'T*T***FF*'),
                    (rast, 0, 'T*T***FF*'),
                    (rast, 0, 'T*T***FF*'),
                    (stx_pnt, 0, 'T*T***FF*'),
                    (stx_pnt, 'T*T***FF*'),
                    (rast, 'T*T***FF*'),
                    (json.loads(JSON_RASTER), 'T*T***FF*'),
                ]
            elif name == 'isvalid':
                # The isvalid lookup doesn't make sense for rasters.
                continue
            elif PostGISOperations.gis_operators[name].func:
                # Set lookup values for all function based operators.
                combo_values = [
                    rast, (rast, 0), (rast, 0), (stx_pnt, 0), stx_pnt, rast,
                    json.loads(JSON_RASTER)
                ]
            else:
                # Override band lookup for these, as it's not supported.
                combo_keys[2] = 'rastprojected__' + name
                # Set lookup values for all other operators.
                combo_values = [
                    rast, None, rast, stx_pnt, stx_pnt, rast,
                    json.loads(JSON_RASTER)
                ]

            # Create query filter combinations.
            self.assertEqual(
                len(combo_keys),
                len(combo_values),
                'Number of lookup names and values should be the same',
            )
            combos = [x for x in zip(combo_keys, combo_values) if x[1]]
            self.assertEqual(
                [(n, x) for n, x in enumerate(combos) if x in combos[:n]],
                [],
                'There are repeated test lookups',
            )
            combos = [{k: v} for k, v in combos]

            for combo in combos:
                # Apply this query filter.
                qs = RasterModel.objects.filter(**combo)

                # Evaluate normal filter qs.
                self.assertIn(qs.count(), [0, 1])

            # Evaluate on conditional Q expressions.
            qs = RasterModel.objects.filter(Q(**combos[0]) & Q(**combos[1]))
            self.assertIn(qs.count(), [0, 1])
Ejemplo n.º 44
0
    def test06_f_expressions(self):
        "Testing F() expressions on GeometryFields."
        # Constructing a dummy parcel border and getting the City instance for
        # assigning the FK.
        b1 = GEOSGeometry(
            'POLYGON((-97.501205 33.052520,-97.501205 33.052576,'
            '-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))',
            srid=4326)
        pcity = City.objects.get(name='Aurora')

        # First parcel has incorrect center point that is equal to the City;
        # it also has a second border that is different from the first as a
        # 100ft buffer around the City.
        c1 = pcity.location.point
        c2 = c1.transform(2276, clone=True)
        b2 = c2.buffer(100)
        Parcel.objects.create(name='P1',
                              city=pcity,
                              center1=c1,
                              center2=c2,
                              border1=b1,
                              border2=b2)

        # Now creating a second Parcel where the borders are the same, just
        # in different coordinate systems.  The center points are also the
        # same (but in different coordinate systems), and this time they
        # actually correspond to the centroid of the border.
        c1 = b1.centroid
        c2 = c1.transform(2276, clone=True)
        Parcel.objects.create(name='P2',
                              city=pcity,
                              center1=c1,
                              center2=c2,
                              border1=b1,
                              border2=b1)

        # Should return the second Parcel, which has the center within the
        # border.
        qs = Parcel.objects.filter(center1__within=F('border1'))
        self.assertEqual(1, len(qs))
        self.assertEqual('P2', qs[0].name)

        if connection.features.supports_transform:
            # This time center2 is in a different coordinate system and needs
            # to be wrapped in transformation SQL.
            qs = Parcel.objects.filter(center2__within=F('border1'))
            self.assertEqual(1, len(qs))
            self.assertEqual('P2', qs[0].name)

        # Should return the first Parcel, which has the center point equal
        # to the point in the City ForeignKey.
        qs = Parcel.objects.filter(center1=F('city__location__point'))
        self.assertEqual(1, len(qs))
        self.assertEqual('P1', qs[0].name)

        if connection.features.supports_transform:
            # This time the city column should be wrapped in transformation SQL.
            qs = Parcel.objects.filter(
                border2__contains=F('city__location__point'))
            self.assertEqual(1, len(qs))
            self.assertEqual('P1', qs[0].name)
Ejemplo n.º 45
0
 def deserialize(self, value):
     try:
         return GEOSGeometry(value, self.map_srid)
     except (GEOSException, ValueError) as err:
         logger.error("Error creating geometry from value '%s' (%s)", value, err)
     return None
Ejemplo n.º 46
0
 def test_render(self):
     location = GEOSGeometry('POINT(-120 45)')
     expected = '<input name="location" type="text" value="45.0, -120.0" />'
     actual = self.pw.render('location', location)
     self.assertEqual(expected, actual)
Ejemplo n.º 47
0
def resourcebase_post_save(instance, *args, **kwargs):
    """
    Used to fill any additional fields after the save.
    Has to be called by the children
    """
    if not instance.id:
        return

    ResourceBase.objects.filter(id=instance.id).update(
        thumbnail_url=instance.get_thumbnail_url(),
        detail_url=instance.get_absolute_url(),
        csw_insert_date=datetime.datetime.now())
    instance.set_missing_info()

    # we need to remove stale links
    for link in instance.link_set.all():
        if link.name == "External Document":
            if link.resource.doc_url != link.url:
                link.delete()
        else:
            if urlsplit(settings.SITEURL).hostname not in link.url:
                link.delete()

    try:
        if instance.regions and instance.regions.all():
            """
            try:
                queryset = instance.regions.all().order_by('name')
                for region in queryset:
                    print ("%s : %s" % (region.name, region.geographic_bounding_box))
            except:
                tb = traceback.format_exc()
            else:
                tb = None
            finally:
                if tb:
                    logger.debug(tb)
            """
            pass
        else:
            srid1, wkt1 = instance.geographic_bounding_box.split(";")
            srid1 = re.findall(r'\d+', srid1)

            poly1 = GEOSGeometry(wkt1, srid=int(srid1[0]))
            poly1.transform(4326)

            queryset = Region.objects.all().order_by('name')
            global_regions = []
            regions_to_add = []
            for region in queryset:
                try:
                    srid2, wkt2 = region.geographic_bounding_box.split(";")
                    srid2 = re.findall(r'\d+', srid2)

                    poly2 = GEOSGeometry(wkt2, srid=int(srid2[0]))
                    poly2.transform(4326)

                    if poly2.intersection(poly1):
                        regions_to_add.append(region)
                    if region.level == 0 and region.parent is None:
                        global_regions.append(region)
                except:
                    tb = traceback.format_exc()
                    if tb:
                        logger.debug(tb)
            if regions_to_add or global_regions:
                if regions_to_add and len(regions_to_add) > 0 and len(
                        regions_to_add) <= 30:
                    instance.regions.add(*regions_to_add)
                else:
                    instance.regions.add(*global_regions)
    except:
        tb = traceback.format_exc()
        if tb:
            logger.debug(tb)

    # set default License if no specified
    if instance.license is None:
        no_license = License.objects.filter(name="Not Specified")

        if no_license and len(no_license) > 0:
            instance.license = no_license[0]
            instance.save()
Ejemplo n.º 48
0
    def test_geodetic_distance_lookups(self):
        """
        Test distance lookups on geodetic coordinate systems.
        """
        # Line is from Canberra to Sydney.  Query is for all other cities within
        # a 100km of that line (which should exclude only Hobart & Adelaide).
        line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)',
                            4326)
        dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line,
                                                                    D(km=100)))

        self.assertEqual(9, dist_qs.count())
        self.assertEqual([
            'Batemans Bay', 'Canberra', 'Hillsdale', 'Melbourne', 'Mittagong',
            'Shellharbour', 'Sydney', 'Thirroul', 'Wollongong'
        ], self.get_names(dist_qs))

        # Too many params (4 in this case) should raise a ValueError.
        queryset = AustraliaCity.objects.filter(
            point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
        with self.assertRaises(ValueError):
            len(queryset)

        # Not enough params should raise a ValueError.
        with self.assertRaises(ValueError):
            len(
                AustraliaCity.objects.filter(
                    point__distance_lte=('POINT(5 23)', )))

        # Getting all cities w/in 550 miles of Hobart.
        hobart = AustraliaCity.objects.get(name='Hobart')
        qs = AustraliaCity.objects.exclude(name='Hobart').filter(
            point__distance_lte=(hobart.point, D(mi=550)))
        cities = self.get_names(qs)
        self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])

        # Cities that are either really close or really far from Wollongong --
        # and using different units of distance.
        wollongong = AustraliaCity.objects.get(name='Wollongong')
        d1, d2 = D(yd=19500), D(nm=400)  # Yards (~17km) & Nautical miles.

        # Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
        gq1 = Q(point__distance_lte=(wollongong.point, d1))
        gq2 = Q(point__distance_gte=(wollongong.point, d2))
        qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1
                                                                      | gq2)

        # Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
        # instead (we should get the same results b/c accuracy variance won't matter
        # in this test case).
        querysets = [qs1]
        if connection.features.has_distance_spheroid_method:
            gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
            gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
            qs2 = AustraliaCity.objects.exclude(
                name='Wollongong').filter(gq3 | gq4)
            querysets.append(qs2)

        for qs in querysets:
            cities = self.get_names(qs)
            self.assertEqual(
                cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
Ejemplo n.º 49
0
 def extract_poly_from_geometrycollection(self, geo_collection):
     for feature in geo_collection:
         if isinstance(feature, Polygon):
             return self.clean_poly(
                 GEOSGeometry(feature.geojson,
                              srid=self.get_srid('districts')))
Ejemplo n.º 50
0
 def test_wktwriter_constructor_arguments(self):
     wkt_w = WKTWriter(dim=3, trim=True, precision=3)
     ref = GEOSGeometry('POINT (5.34562 23 1.5)')
     ref_wkt = 'POINT Z (5.35 23 1.5)'
     self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
Ejemplo n.º 51
0
    def get_context(self, name, value, attrs):
        # Update the template parameters with any attributes passed in.
        if attrs:
            self.params.update(attrs)
            self.params['editable'] = self.params['modifiable']
        else:
            self.params['editable'] = True

        # Defaulting the WKT value to a blank string -- this
        # will be tested in the JavaScript and the appropriate
        # interface will be constructed.
        self.params['wkt'] = ''

        # If a string reaches here (via a validation error on another
        # field) then just reconstruct the Geometry.
        if value and isinstance(value, str):
            try:
                value = GEOSGeometry(value)
            except (GEOSException, ValueError) as err:
                logger.error("Error creating geometry from value '%s' (%s)",
                             value, err)
                value = None

        if (value and value.geom_type.upper() != self.geom_type
                and self.geom_type != 'GEOMETRY'):
            value = None

        # Constructing the dictionary of the map options.
        self.params['map_options'] = self.map_options()

        # Constructing the JavaScript module name using the name of
        # the GeometryField (passed in via the `attrs` keyword).
        # Use the 'name' attr for the field name (rather than 'field')
        self.params['name'] = name
        # note: we must switch out dashes for underscores since js
        # functions are created using the module variable
        js_safe_name = self.params['name'].replace('-', '_')
        self.params['module'] = 'geodjango_%s' % js_safe_name

        if value:
            # Transforming the geometry to the projection used on the
            # OpenLayers map.
            srid = self.params['srid']
            if value.srid != srid:
                try:
                    ogr = value.ogr
                    ogr.transform(srid)
                    wkt = ogr.wkt
                except GDALException as err:
                    logger.error(
                        "Error transforming geometry from srid '%s' to srid '%s' (%s)",
                        value.srid, srid, err)
                    wkt = ''
            else:
                wkt = value.wkt

            # Setting the parameter WKT with that of the transformed
            # geometry.
            self.params['wkt'] = wkt

        self.params.update(geo_context)
        return self.params
Ejemplo n.º 52
0
# coding: utf-8
from __future__ import unicode_literals, print_function
from datetime import datetime
import os
import pytz
from django.core.management.base import BaseCommand, CommandError
from django.contrib.gis.geos import Point, Polygon, fromstr, GEOSGeometry
import tweepy
from unidecode import unidecode
from twitter.models import Tweet
from ._badwords import BADWORDS, BADWORDS_NOASCIIFY

turkey_geojson = open(os.path.join(os.path.dirname(__file__),
                                   'turkey.geojson')).read()
turkey = GEOSGeometry(turkey_geojson)


def check_for_badwords(text, badwords):
    words = text.split()
    for badword in badwords:
        if any(word.startswith(badword) for word in words):
            return True
        if len(badword.split()) > 1:
            if badword in text:
                return True
    return False


def contains_bad_word(text):
    text = text.lower()
    ascifiedtext = unidecode(text)
Ejemplo n.º 53
0
 def test_should_consider_filter_shape_as_api_srid(self):
     shape = GEOSGeometry('POLYGON((-1 2, -1 4, 1 4, 1 2, -1 2))')
     result = self.filter.filter(self.model.objects.all(), shape)
     self.assertEqual(1, len(result))  # one of them is None
Ejemplo n.º 54
0
 def _load_polygon_data(self):
     bbox_wkt, bbox_z = bbox_data
     bbox_2d = GEOSGeometry(bbox_wkt, srid=32140)
     bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140)
     Polygon2D.objects.create(name='2D BBox', poly=bbox_2d)
     Polygon3D.objects.create(name='3D BBox', poly=bbox_3d)
Ejemplo n.º 55
0
def closest_hospitals_by_location(latitude, longitude, distance=5):
    point_wkt = 'POINT({} {})'.format(longitude, latitude)
    point = GEOSGeometry(point_wkt, srid=4326)
    hospitals = Hospital.objects.filter(
        location__distance_lte=(point, D(km=distance)))
    return hospitals
Ejemplo n.º 56
0
    def test_dwithin_gis_lookup_output_with_rasters(self):
        """
        Check the logical functionality of the dwithin lookup for different
        input parameters.
        """
        # Create test raster and geom.
        rast = GDALRaster(json.loads(JSON_RASTER))
        stx_pnt = GEOSGeometry(
            'POINT (-95.370401017314293 29.704867409475465)', 4326)
        stx_pnt.transform(3086)

        # Filter raster with different lookup raster formats.
        qs = RasterModel.objects.filter(rastprojected__dwithin=(rast, D(km=1)))
        self.assertEqual(qs.count(), 1)

        qs = RasterModel.objects.filter(
            rastprojected__dwithin=(json.loads(JSON_RASTER), D(km=1)))
        self.assertEqual(qs.count(), 1)

        qs = RasterModel.objects.filter(rastprojected__dwithin=(JSON_RASTER,
                                                                D(km=1)))
        self.assertEqual(qs.count(), 1)

        # Filter in an unprojected coordinate system.
        qs = RasterModel.objects.filter(rast__dwithin=(rast, 40))
        self.assertEqual(qs.count(), 1)

        # Filter with band index transform.
        qs = RasterModel.objects.filter(rast__1__dwithin=(rast, 1, 40))
        self.assertEqual(qs.count(), 1)
        qs = RasterModel.objects.filter(rast__1__dwithin=(rast, 40))
        self.assertEqual(qs.count(), 1)
        qs = RasterModel.objects.filter(rast__dwithin=(rast, 1, 40))
        self.assertEqual(qs.count(), 1)

        # Filter raster by geom.
        qs = RasterModel.objects.filter(rast__dwithin=(stx_pnt, 500))
        self.assertEqual(qs.count(), 1)

        qs = RasterModel.objects.filter(rastprojected__dwithin=(stx_pnt,
                                                                D(km=10000)))
        self.assertEqual(qs.count(), 1)

        qs = RasterModel.objects.filter(rast__dwithin=(stx_pnt, 5))
        self.assertEqual(qs.count(), 0)

        qs = RasterModel.objects.filter(rastprojected__dwithin=(stx_pnt,
                                                                D(km=100)))
        self.assertEqual(qs.count(), 0)

        # Filter geom by raster.
        qs = RasterModel.objects.filter(geom__dwithin=(rast, 500))
        self.assertEqual(qs.count(), 1)

        # Filter through related model.
        qs = RasterRelatedModel.objects.filter(
            rastermodel__rast__dwithin=(rast, 40))
        self.assertEqual(qs.count(), 1)

        # Filter through related model with band index transform
        qs = RasterRelatedModel.objects.filter(
            rastermodel__rast__1__dwithin=(rast, 40))
        self.assertEqual(qs.count(), 1)

        # Filter through conditional statements.
        qs = RasterModel.objects.filter(
            Q(rast__dwithin=(rast, 40))
            & Q(rastprojected__dwithin=(stx_pnt, D(km=10000))))
        self.assertEqual(qs.count(), 1)

        # Filter through different lookup.
        qs = RasterModel.objects.filter(rastprojected__bbcontains=rast)
        self.assertEqual(qs.count(), 1)
Ejemplo n.º 57
0
def process_user_boundary_shapefiles(request):
    from bims.models import UserBoundary
    from django.contrib.gis.geos import Polygon, MultiPolygon
    token = request.GET.get('token', None)
    boundary_name = request.GET.get('name', None)

    if not token:
        return JsonResponse({
            'message': 'empty token'
        })

    shapefiles = Shapefile.objects.filter(
            token=token
    )

    for shp in shapefiles:
        shp.token = ''
        shp.save()

    upload_session, created = ShapefileUploadSession.objects.get_or_create(
            uploader=request.user,
            token=token,
            processed=False,
    )

    if created:
        upload_session.shapefiles = shapefiles
        upload_session.save()

    all_shapefiles = upload_session.shapefiles.all()

    needed_ext = ['.shx', '.shp', '.dbf']
    needed_files = {}

    # Check all needed files
    for shp in all_shapefiles:
        name, extension = os.path.splitext(shp.filename)
        if extension in needed_ext:
            needed_files[extension[1:]] = shp
            needed_ext.remove(extension)

    if len(needed_ext) > 0:
        data = {
            'message': 'missing %s' % ','.join(needed_ext)
        }
        upload_session.error = data['message']
        upload_session.save()
        return JsonResponse(data)

    # Extract shapefile into dictionary
    outputs = extract_shape_file(
            shp_file=needed_files['shp'].shapefile,
            shx_file=needed_files['shx'].shapefile,
            dbf_file=needed_files['dbf'].shapefile,
    )

    geometry = None
    geometries = []

    for geojson in outputs:
        try:
            properties = geojson['properties']

            if not boundary_name:
                if 'name' in properties:
                    boundary_name = properties['name']
                else:
                    boundary_name, extension = os.path.splitext(
                            all_shapefiles[0].filename
                    )

            geojson_json = json.dumps(geojson['geometry'])
            geometry = GEOSGeometry(geojson_json)

            if isinstance(geometry, Polygon):
                geometries.append(geometry)
            elif not isinstance(geometry, MultiPolygon):
                response_message = 'Only polygon and multipolygon allowed'
                upload_session.error = response_message
                upload_session.save()
                return JsonResponse({'message': response_message})

        except (ValueError, KeyError, TypeError) as e:
            upload_session.error = str(e)
            upload_session.save()
            response_message = 'Failed : %s' % str(e)
            return JsonResponse({'message': response_message})

    if len(geometries) > 0:
        geometry = MultiPolygon(geometries)

    user_boundary, created = UserBoundary.objects.get_or_create(
            user=request.user,
            name=boundary_name,
            geometry=geometry
    )
    upload_session.processed = True
    upload_session.save()

    if created:
        response_message = 'User boundary added'
    else:
        response_message = 'User boundary already exists'

    data = {
        'message': response_message
    }
    return JsonResponse(data)
Ejemplo n.º 58
0
def send_zone_json(request):
    if request.method == 'POST':

        print("000")

        sh = json.loads(request.POST.get('shapes', ''))

        print(sh)

        try:
            shape = sh[0]
        except KeyError:
            shape = sh

        ghost = json.loads(request.POST.get('ghost', ''))

        print("Shapes", shape)
        print("Ghost Zone", ghost)

        try:
            existing_zone = Zone.objects.filter(id=shape['id']).first()
        except KeyError:
            z = Zone()
        else:
            if existing_zone != None:
                z = existing_zone
            else:
                z = Zone()

        print(1)

        try:
            colour = shape['properties']['color']
        except KeyError:
            colour = 'none'

        try:
            print shape['name']
        except KeyError:
            pass
        else:
            z.name = shape['name']

        print(2)

        try:
            tags_in = shape['tags_in']
            tags_out = shape['tags_out']
        except KeyError:
            pass
        else:
            for t_i in tags_in:
                tag = Tag.objects.filter(id=t_i).first()
                if ZoneTag.objects.filter(zone=z, tag=tag).first() != None:
                    pass
                else:
                    new_tag_in = ZoneTag(zone=z, tag=tag)
                    new_tag_in.save()

            for t_o in tags_out:
                tag = Tag.objects.filter(id=t_o).first()

                if ZoneTag.objects.filter(zone=z, tag=tag).first() == None:
                    pass
                else:
                    new_tag_out = ZoneTag.objects.filter(zone=z,
                                                         tag=tag).first()
                    new_tag_out.delete()

        print(3)
        co_ords = GEOSGeometry(json.dumps(shape['geometry']))

        print(4)

        if "features" in ghost:
            ghost_co_ords = GEOSGeometry(
                json.dumps(ghost["features"][0]["geometry"]))
            z.geom = GeometryCollection(co_ords, ghost_co_ords)
            print z.geom.json

        else:
            z.geom = GeometryCollection(co_ords)

        z.colour = colour
        z.author = request.user
        z.save()

    return HttpResponse(json.dumps(shape))
Ejemplo n.º 59
0
def layer_detail(request, layername, template='layers/layer_detail.html'):
    layer = _resolve_layer(request, layername, 'base.view_resourcebase',
                           _PERMISSION_MSG_VIEW)

    # assert False, str(layer_bbox)
    config = layer.attribute_config()

    # Add required parameters for GXP lazy-loading
    layer_bbox = layer.bbox
    bbox = [float(coord) for coord in list(layer_bbox[0:4])]
    if hasattr(layer, 'srid'):
        config['crs'] = {'type': 'name', 'properties': layer.srid}
    config["srs"] = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913')
    config["bbox"] = bbox if config["srs"] != 'EPSG:900913' \
        else llbbox_to_mercator([float(coord) for coord in bbox])
    config["title"] = layer.title
    config["queryable"] = True

    if layer.storeType == "remoteStore":
        service = layer.service
        source_params = {
            "ptype": service.ptype,
            "remote": True,
            "url": service.base_url,
            "name": service.name
        }
        maplayer = GXPLayer(name=layer.alternate,
                            ows_url=layer.ows_url,
                            layer_params=json.dumps(config),
                            source_params=json.dumps(source_params))
    else:
        maplayer = GXPLayer(name=layer.alternate,
                            ows_url=layer.ows_url,
                            layer_params=json.dumps(config))

    # Update count for popularity ranking,
    # but do not includes admins or resource owners
    layer.view_count_up(request.user)

    # center/zoom don't matter; the viewer will center on the layer bounds
    map_obj = GXPMap(
        projection=getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913'))

    NON_WMS_BASE_LAYERS = [
        la for la in default_map_config(request)[1] if la.ows_url is None
    ]

    metadata = layer.link_set.metadata().filter(
        name__in=settings.DOWNLOAD_FORMATS_METADATA)

    granules = None
    all_granules = None
    all_times = None
    filter = None
    if layer.is_mosaic:
        try:
            cat = gs_catalog
            cat._cache.clear()
            store = cat.get_store(layer.name)
            coverages = cat.mosaic_coverages(store)

            filter = None
            try:
                if request.GET["filter"]:
                    filter = request.GET["filter"]
            except BaseException:
                pass

            offset = 10 * (request.page - 1)
            granules = cat.mosaic_granules(
                coverages['coverages']['coverage'][0]['name'],
                store,
                limit=10,
                offset=offset,
                filter=filter)
            all_granules = cat.mosaic_granules(
                coverages['coverages']['coverage'][0]['name'],
                store,
                filter=filter)
        except BaseException:
            granules = {"features": []}
            all_granules = {"features": []}

    if 'geonode.geoserver' in settings.INSTALLED_APPS:
        from geonode.geoserver.views import get_capabilities
        if layer.has_time:
            workspace, layername = layer.alternate.split(":")
            # WARNING Please make sure to have enabled DJANGO CACHE as per
            # https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching
            wms_capabilities_resp = get_capabilities(request,
                                                     layer.id,
                                                     tolerant=True)
            if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400:
                wms_capabilities = wms_capabilities_resp.getvalue()
                if wms_capabilities:
                    import xml.etree.ElementTree as ET
                    e = ET.fromstring(wms_capabilities)
                    for atype in e.findall(
                            "Capability/Layer/Layer[Name='%s']/Extent" %
                        (layername)):
                        dim_name = atype.get('name')
                        if dim_name:
                            dim_name = str(dim_name).lower()
                            if dim_name == 'time':
                                dim_values = atype.text
                                if dim_values:
                                    all_times = dim_values.split(",")
                                    break

    group = None
    if layer.group:
        try:
            group = GroupProfile.objects.get(slug=layer.group.name)
        except GroupProfile.DoesNotExist:
            group = None
    # a flag to be used for qgis server
    show_popup = False
    if 'show_popup' in request.GET and request.GET["show_popup"]:
        show_popup = True

    context_dict = {
        'resource': layer,
        'group': group,
        'perms_list': get_perms(request.user, layer.get_self_resource()),
        "permissions_json": _perms_info_json(layer),
        "documents": get_related_documents(layer),
        "metadata": metadata,
        "is_layer": True,
        "wps_enabled": settings.OGC_SERVER['default']['WPS_ENABLED'],
        "granules": granules,
        "all_granules": all_granules,
        "all_times": all_times,
        "show_popup": show_popup,
        "filter": filter,
    }

    if 'access_token' in request.session:
        access_token = request.session['access_token']
    else:
        u = uuid.uuid1()
        access_token = u.hex

    context_dict["viewer"] = json.dumps(
        map_obj.viewer_json(request.user, access_token,
                            *(NON_WMS_BASE_LAYERS + [maplayer])))
    context_dict["preview"] = getattr(settings,
                                      'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY',
                                      'geoext')
    context_dict["crs"] = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913')

    # provide bbox in EPSG:4326 for leaflet
    if context_dict["preview"] == 'leaflet':
        srid, wkt = layer.geographic_bounding_box.split(';')
        srid = re.findall(r'\d+', srid)
        geom = GEOSGeometry(wkt, srid=int(srid[0]))
        geom.transform(4326)
        context_dict["layer_bbox"] = ','.join([str(c) for c in geom.extent])

    if layer.storeType == 'dataStore':
        links = layer.link_set.download().filter(
            name__in=settings.DOWNLOAD_FORMATS_VECTOR)
    else:
        links = layer.link_set.download().filter(
            name__in=settings.DOWNLOAD_FORMATS_RASTER)
    links_view = [
        item for idx, item in enumerate(links)
        if item.url and 'wms' in item.url or 'gwc' in item.url
    ]
    links_download = [
        item for idx, item in enumerate(links)
        if item.url and 'wms' not in item.url and 'gwc' not in item.url
    ]
    for item in links_view:
        if item.url and access_token and 'access_token' not in item.url:
            params = {'access_token': access_token}
            item.url = Request('GET', item.url, params=params).prepare().url
    for item in links_download:
        if item.url and access_token and 'access_token' not in item.url:
            params = {'access_token': access_token}
            item.url = Request('GET', item.url, params=params).prepare().url

    if request.user.has_perm('view_resourcebase', layer.get_self_resource()):
        context_dict["links"] = links_view
    if request.user.has_perm('download_resourcebase',
                             layer.get_self_resource()):
        if layer.storeType == 'dataStore':
            links = layer.link_set.download().filter(
                name__in=settings.DOWNLOAD_FORMATS_VECTOR)
        else:
            links = layer.link_set.download().filter(
                name__in=settings.DOWNLOAD_FORMATS_RASTER)
        context_dict["links_download"] = links_download

    if settings.SOCIAL_ORIGINS:
        context_dict["social_links"] = build_social_links(request, layer)
    layers_names = layer.alternate
    try:
        if settings.DEFAULT_WORKSPACE and settings.DEFAULT_WORKSPACE in layers_names:
            workspace, name = layers_names.split(':', 1)
        else:
            name = layers_names
    except:
        print "Can not identify workspace type and layername"

    context_dict["layer_name"] = json.dumps(layers_names)

    try:
        # get type of layer (raster or vector)
        if layer.storeType == 'coverageStore':
            context_dict["layer_type"] = "raster"
        elif layer.storeType == 'dataStore':
            if layer.has_time:
                context_dict["layer_type"] = "vector_time"
            else:
                context_dict["layer_type"] = "vector"

            location = "{location}{service}".format(
                **{
                    'location': settings.OGC_SERVER['default']['LOCATION'],
                    'service': 'wms',
                })
            # get schema for specific layer
            username = settings.OGC_SERVER['default']['USER']
            password = settings.OGC_SERVER['default']['PASSWORD']
            schema = get_schema(location,
                                name,
                                username=username,
                                password=password)

            # get the name of the column which holds the geometry
            if 'the_geom' in schema['properties']:
                schema['properties'].pop('the_geom', None)
            elif 'geom' in schema['properties']:
                schema['properties'].pop("geom", None)

            # filter the schema dict based on the values of layers_attributes
            layer_attributes_schema = []
            for key in schema['properties'].keys():
                layer_attributes_schema.append(key)

            filtered_attributes = layer_attributes_schema
            context_dict["schema"] = schema
            context_dict["filtered_attributes"] = filtered_attributes

    except:
        print "Possible error with OWSLib. Turning all available properties to string"

    # maps owned by user needed to fill the "add to existing map section" in template
    if request.user.is_authenticated():
        context_dict["maps"] = Map.objects.filter(owner=request.user)
    return TemplateResponse(request, template,
                            RequestContext(request, context_dict))
Ejemplo n.º 60
0
    def test_art_types_geo_api(self):
        """
        Ensure we can get the data to display on the Arts Tab Lists
        """
        # Don't show Place Names without Geom or cooridinates = [0, 0]
        test_placename18 = PlaceName.objects.create(name="test place18",
                                                    kind="resource",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [1, 1]
            }"""))
        test_placename19 = PlaceName.objects.create(name="test place19",
                                                    kind="resource",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [0, 0]
            }"""))

        test_placename20 = PlaceName.objects.create(name="test place20",
                                                    kind="public_art",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [1, 1]
            }"""))
        test_placename21 = PlaceName.objects.create(name="test place21",
                                                    kind="public_art",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [0, 0]
            }"""))

        test_placename22 = PlaceName.objects.create(name="test place22",
                                                    kind="artist",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [1, 1]
            }"""))
        test_placename23 = PlaceName.objects.create(name="test place23",
                                                    kind="artist",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [0, 0]
            }"""))

        test_placename24 = PlaceName.objects.create(name="test place24",
                                                    kind="organization",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [1, 1]
            }"""))
        test_placename25 = PlaceName.objects.create(name="test place25",
                                                    kind="organization",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [0, 0]
            }"""))

        test_placename26 = PlaceName.objects.create(name="test place26",
                                                    kind="event",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [1, 1]
            }"""))
        test_placename27 = PlaceName.objects.create(name="test place27",
                                                    kind="event",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [0, 0]
            }"""))

        test_placename28 = PlaceName.objects.create(name="test place28",
                                                    kind="grant",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [1, 1]
            }"""))
        test_placename29 = PlaceName.objects.create(name="test place29",
                                                    kind="grant",
                                                    geom=GEOSGeometry("""{
                "type": "Point",
                "coordinates": [0, 0]
            }"""))

        # Test Public Arts
        response = self.client.get("/api/arts/public-art/", format="json")
        # By fetching "features" specifically, we're committing
        # that this API si a GEO Feature API
        data = response.json().get("features")

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(
            len(data), 1)  # Out of all the data created, only 1 should appear

        # Test Artists
        response = self.client.get("/api/arts/artist/", format="json")
        # By fetching "features" specifically, we're committing
        # that this API si a GEO Feature API
        data = response.json().get("features")

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(
            len(data), 1)  # Out of all the data created, only 1 should appear

        # Test Organizations
        response = self.client.get("/api/arts/organization/", format="json")
        # By fetching "features" specifically, we're committing
        # that this API si a GEO Feature API
        data = response.json().get("features")

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(
            len(data), 1)  # Out of all the data created, only 1 should appear

        # Test Events
        response = self.client.get("/api/arts/event/", format="json")
        # By fetching "features" specifically, we're committing
        # that this API si a GEO Feature API
        data = response.json().get("features")

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(
            len(data), 1)  # Out of all the data created, only 1 should appear

        # Test Grants
        response = self.client.get("/api/arts/grant/", format="json")
        # By fetching "features" specifically, we're committing
        # that this API si a GEO Feature API
        data = response.json().get("features")

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(
            len(data), 1)  # Out of all the data created, only 1 should appear

        # Test Resource
        response = self.client.get("/api/arts/resource/", format="json")
        # By fetching "features" specifically, we're committing
        # that this API si a GEO Feature API
        data = response.json().get("features")

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(
            len(data), 1)  # Out of all the data created, only 1 should appear