コード例 #1
0
ファイル: base.py プロジェクト: simongareste/coach
 def simplify(self, coords, tolerance=0.0001):
   '''
   Simplify the raw polyline
   '''
   raw = LineString(coords) # don't store raw, too heavy
   self.simple = raw.simplify(tolerance)
   return self.simple
コード例 #2
0
ファイル: link_loader.py プロジェクト: ion599/phi
    def _read_geos(self):
        '''
        Reads the geometries from the shapefile and dumps them into a string
        :returns:the id and geometries of the links in a string
        '''

        irecord = self.shapefile_reader.iterRecords()
        ishapes = self.shapefile_reader.iterShapes()

        # read entries into a dictionary which has ids mapped to geometries
        id_to_geometry = list()

        for i in range(self.shapefile_reader.numRecords):
            record = irecord.next()
            shape = ishapes.next()

            points = [tuple(x) for x in shape.points]

            id = int(record[0])
            orig_id = int(record[7])
            line = LineString(points)
            line.set_srid(config.EPSG32611)

            defaultprojection = line.clone()
            defaultprojection.transform(config.canonical_projection)

            googleprojection = line.clone()
            googleprojection.transform(config.google_projection)

            id_to_geometry.append('\t'.join([str(i), str(id), defaultprojection.ewkt, googleprojection.ewkt, line.ewkt, str(orig_id)]))

        return '\n'.join(id_to_geometry)
コード例 #3
0
ファイル: models.py プロジェクト: Web5design/Geotrek
 def geom(self):
     geom = None
     for p in self.paths.all():
         if geom is None:
             geom = LineString(p.geom.coords, srid=settings.SRID)
         else:
             geom = geom.union(p.geom)
     return geom
コード例 #4
0
ファイル: forms.py プロジェクト: jurginius/jmbo-competition
 def clean_location(self):
     p = fromstr(self.cleaned_data['location'])
     line = LineString(p, self.competition.location.coordinates, srid=4326) # lon/lat (srid=4326)
     line.transform(53031) # transform to two-point equidistant projection (srid=53031)
     if line.length > self.competition.check_in_distance:
         raise forms.ValidationError(_("""You are not close enough to 
             enter the competition, or you GPS might not be turned on. In the latter 
             case, turn on your device's GPS and reload the page."""))
     return self.cleaned_data['location']
コード例 #5
0
ファイル: admin.py プロジェクト: miltontony/django-atlas
 def check_consistency(self, cleaned_data):
     if cleaned_data['city'].country != cleaned_data['country']:
         return (False, "The location's city and country do not match.")
     if cleaned_data['coordinates']:
         line = LineString(cleaned_data['coordinates'], cleaned_data['city'].coordinates, srid=4326)
         line.transform(53031)  # transform to two-point equidistant project
         # if coordinate is more than 500km away from city
         if line.length > 500000:
             return (False, "The location's coordinates are more than 500km away from its city.")
     return (True, "")
コード例 #6
0
ファイル: routing.py プロジェクト: syadlowsky/phi-estimation
def sensors(point_list):
    route = LineString([[point['lng'], point['lat']] for point in point_list], srid=canonical_projection)
    route.transform(google_projection)
    sensor_map = []
    for sensor_ind, sensor in enumerate(sensors_transformed):
      if sensor['compare'].distance(route) < FUZZY_DIST:
          sensor_map.append({
              'loc':sensor['map'],
              'true':true_sensor_value[sensor_ind],
              'predicted':lsqr[sensor_ind]
          })
    return sensor_map
コード例 #7
0
 def _splitLineString(self, lineString, *args):
     """
     args type could be Point or list
     """
     minDist = sys.maxint
     tolerance = 0.1
     
     #split line string with a single point    
     if type(args[0]) == Point:
         splitPoint = args[0]
         startPt = lineString.coords[0]
         for i in range(1,len(lineString.coords)):
             endPt = lineString.coords[i]
             lineSeg = LineString(startPt, endPt)
             dist = lineSeg.distance(splitPoint) 
             if dist < minDist:
                 minDist = dist
                 splitIndex = i
         coords = list(lineString.coords[0:splitIndex])
         coords.append((splitPoint.coords))
         firstLineString = LineString(coords)
         coords = []        
         coords.append((splitPoint.coords))
         coords.extend(lineString.coords[splitIndex:])        
         secondLineString = LineString(coords)
         return [firstLineString, secondLineString]
     elif type(args[0]) == tuple  or type(args[0]) == list or type(args[0]) == ControlPointList:
         splitPointList = args[0]
         assert(len(splitPointList) != 0)
                     
         splitLines = self._splitLineString(lineString, splitPointList[-1])
         splitPointList.pop()
         
         spPtList1 = []
         spPtList2 = []
         for point in splitPointList:
             if splitLines[0].distance(point) < tolerance:
                 spPtList1.append(point)
             else:
                 spPtList2.append(point)
         resList = []
         if len(spPtList1) != 0:
             resList.extend(self._splitLineString(splitLines[0], spPtList1))
         else:
             resList.append(splitLines[0])
         if len(spPtList2) != 0:
             resList.extend(self._splitLineString(splitLines[1], spPtList2))
         else:
             resList.append(splitLines[1])
         return resList  
     else:
         raise 'Error, unsupported type'
コード例 #8
0
ファイル: test_geos.py プロジェクト: ricardoffv/django-pizza
    def test_distance(self):
        "Testing the distance() function."
        # Distance to self should be 0.
        pnt = Point(0, 0)
        self.assertEqual(0.0, pnt.distance(Point(0, 0)))

        # Distance should be 1
        self.assertEqual(1.0, pnt.distance(Point(0, 1)))

        # Distance should be ~ sqrt(2)
        self.assertAlmostEqual(1.41421356237, pnt.distance(Point(1, 1)), 11)

        # Distances are from the closest vertex in each geometry --
        #  should be 3 (distance from (2, 2) to (5, 2)).
        ls1 = LineString((0, 0), (1, 1), (2, 2))
        ls2 = LineString((5, 2), (6, 1), (7, 0))
        self.assertEqual(3, ls1.distance(ls2))
コード例 #9
0
ファイル: models.py プロジェクト: makinacorpus/Geotrek
 def reverse(self):
     """
     Reverse the geometry.
     We keep track of this, since we will have to work on topologies at save()
     """
     reversed_coord = self.geom.coords[-1::-1]
     self.geom = LineString(reversed_coord)
     self.is_reversed = True
     return self
コード例 #10
0
ファイル: dice_coastline.py プロジェクト: imclab/linz2osm
 def handle(self, **options):
     c = connections[DB_NAME].cursor()
     c.execute("DELETE FROM coastline WHERE ogc_fid >= 1000000;")
     transaction.commit(using=DB_NAME)
     c.execute("SELECT ogc_fid, wkb_geometry, elevation FROM coastline ORDER BY ogc_fid;")
     for row in c.fetchall():
         src_fid = row[0]
         src_geom = GEOSGeometry(row[1])
         src_elevation = row[2]
         print "OGC_FID: % 5d - VERTICES: % 8d" % (row[0], src_geom.num_points,)
         
         if src_geom.num_points > SPLIT_THRESHOLD:
             for start in range(0, src_geom.num_points, SPLIT_AMOUNT):
                 new_fid = src_fid * 1000000 + start
                 new_geom = LineString(src_geom[start:start + SPLIT_AMOUNT + 1])
                 new_geom.srid = src_geom.srid
                 c.execute("INSERT INTO coastline (ogc_fid, wkb_geometry, elevation) VALUES (%d, '%s', %f)" % (new_fid, new_geom.hexewkb, src_elevation))
                 c.execute("DELETE FROM coastline WHERE ogc_fid = %d;" % (src_fid))
                 transaction.commit(using=DB_NAME)
                 print "  * SPLIT INTO OGC_FID: % 10d - VERTICES: % 8d" % (new_fid, new_geom.num_points)
コード例 #11
0
class Link:
    def __init__(self, geom, speed):
        self.line = LineString(geom['coordinates'], srid=4326)
        self.line.transform(32611)
        self.speed = speed
        self.cur_dist = 0
        self.total_dist = self.line.length

    def move(self, dt):
        distance = self.speed * dt
        overshot = distance + self.cur_dist - self.total_dist
        if overshot > 0:
            self.cur_dist = self.total_dist
            return float(overshot) / self.speed
        else:
            self.cur_dist += distance
            return 0

    def get_position(self):
        pt = self.line.interpolate(self.cur_dist)
        pt.transform(3857)
        return list(pt)
 def setUp(self):
     self.view = TiledGeoJSONLayerView(model=Route)
     self.view.args = []
     self.r1 = Route.objects.create(geom=LineString((0, 1), (10, 1)))
     self.r2 = Route.objects.create(geom=LineString((0, -1), (-10, -1)))
コード例 #13
0
ファイル: subset.py プロジェクト: DREAM-ODA-OS/eoxserver
    def matches(self, eo_object, containment="overlaps"):
        if not len(self):
            return True

        bbox = [None, None, None, None]
        srid = self.srid
        if srid is None:
            srid = 4326
        max_extent = crss.crs_bounds(srid)
        tolerance = crss.crs_tolerance(srid)

        footprint = eo_object.footprint
        begin_time = eo_object.begin_time
        end_time = eo_object.end_time

        for subset in self:
            if isinstance(subset, Slice):
                is_slice = True
                value = subset.value
            elif isinstance(subset, Trim):
                is_slice = False
                low = subset.low
                high = subset.high

            if subset.is_temporal:
                if is_slice:
                    if begin_time > value or end_time < value:
                        return False
                elif low is None and high is not None:
                    if begin_time > high:
                        return False
                elif low is not None and high is None:
                    if end_time < low:
                        return False
                else:
                    if begin_time > high or end_time < low:
                        return False

            else:
                if is_slice:
                    if subset.is_x:
                        line = LineString(
                            (value, max_extent[1]),
                            (value, max_extent[3])
                        )
                    else:
                        line = LineString(
                            (max_extent[0], value),
                            (max_extent[2], value)
                        )
                    line.srid = srid
                    if srid != 4326:
                        line.transform(4326)

                    if not line.intersects(footprint):
                        return False

                else:
                    if subset.is_x:
                        bbox[0] = subset.low
                        bbox[2] = subset.high
                    else:
                        bbox[1] = subset.low
                        bbox[3] = subset.high

        if bbox != [None, None, None, None]:
            bbox = map(
                lambda v: v[0] if v[0] is not None else v[1],
                zip(bbox, max_extent)
            )

            bbox[0] -= tolerance
            bbox[1] -= tolerance
            bbox[2] += tolerance
            bbox[3] += tolerance

            logger.debug(
                "Applying BBox %s with containment '%s'." % (bbox, containment)
            )

            poly = Polygon.from_bbox(bbox)
            poly.srid = srid

            if srid != 4326:
                poly.transform(4326)
            if containment == "overlaps":
                if not footprint.intersects(poly):
                    return False
            elif containment == "contains":
                if not footprint.within(poly):
                    return False
        return True
コード例 #14
0
 def test_1m(self):
     path = Path.objects.create(geom=LineString((0, 0), (0, 1), (1, 1)))
     self.assertEqual(len(path.geom_3d.coords), 3)
コード例 #15
0
ファイル: tests.py プロジェクト: tedjt/personalSite
    def test03b_distance_method(self):
        "Testing the `distance` GeoQuerySet method on geodetic coordnate systems."
        if oracle: tol = 2
        else: tol = 5

        # Testing geodetic distance calculation with a non-point geometry
        # (a LineString of Wollongong and Shellharbour coords).
        ls = LineString(((150.902, -34.4245), (150.87, -34.5789)))
        if oracle or connection.ops.geography:
            # Reference query:
            #  SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326)) FROM distapp_australiacity ORDER BY name;
            distances = [
                1120954.92533513, 140575.720018241, 640396.662906304,
                60580.9693849269, 972807.955955075, 568451.8357838,
                40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0
            ]
            qs = AustraliaCity.objects.distance(ls).order_by('name')
            for city, distance in zip(qs, distances):
                # Testing equivalence to within a meter.
                self.assertAlmostEqual(distance, city.distance.m, 0)
        else:
            # PostGIS 1.4 and below is limited to disance queries only
            # to/from point geometries, check for raising of ValueError.
            self.assertRaises(ValueError, AustraliaCity.objects.distance, ls)
            self.assertRaises(ValueError, AustraliaCity.objects.distance,
                              ls.wkt)

        # Got the reference distances using the raw SQL statements:
        #  SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326), 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
        #  SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326)) FROM distapp_australiacity WHERE (NOT (id = 11));  st_distance_sphere
        if connection.ops.postgis and connection.ops.proj_version_tuple() >= (
                4, 7, 0):
            # PROJ.4 versions 4.7+ have updated datums, and thus different
            # distance values.
            spheroid_distances = [
                60504.0628957201, 77023.9489850262, 49154.8867574404,
                90847.4358768573, 217402.811919332, 709599.234564757,
                640011.483550888, 7772.00667991925, 1047861.78619339,
                1165126.55236034
            ]
            sphere_distances = [
                60580.9693849267, 77144.0435286473, 49199.4415344719,
                90804.7533823494, 217713.384600405, 709134.127242793,
                639828.157159169, 7786.82949717788, 1049204.06569028,
                1162623.7238134
            ]

        else:
            spheroid_distances = [
                60504.0628825298, 77023.948962654, 49154.8867507115,
                90847.435881812, 217402.811862568, 709599.234619957,
                640011.483583758, 7772.00667666425, 1047861.7859506,
                1165126.55237647
            ]
            sphere_distances = [
                60580.7612632291, 77143.7785056615, 49199.2725132184,
                90804.4414289463, 217712.63666124, 709131.691061906,
                639825.959074112, 7786.80274606706, 1049200.46122281,
                1162619.7297006
            ]

        # Testing with spheroid distances first.
        hillsdale = AustraliaCity.objects.get(name='Hillsdale')
        qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(
            hillsdale.point, spheroid=True)
        for i, c in enumerate(qs):
            self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
        if postgis:
            # PostGIS uses sphere-only distances by default, testing these as well.
            qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(
                hillsdale.point)
            for i, c in enumerate(qs):
                self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
コード例 #16
0
    def test_distance_geodetic(self):
        """
        Test the `distance` GeoQuerySet method on geodetic coordinate systems.
        """
        tol = 2 if oracle else 4

        # Testing geodetic distance calculation with a non-point geometry
        # (a LineString of Wollongong and Shellharbour coords).
        ls = LineString(((150.902, -34.4245), (150.87, -34.5789)))

        # Reference query:
        #  SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
        #  FROM distapp_australiacity ORDER BY name;
        distances = [
            1120954.92533513, 140575.720018241, 640396.662906304,
            60580.9693849269, 972807.955955075, 568451.8357838,
            40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0
        ]
        qs = AustraliaCity.objects.distance(ls).order_by('name')
        for city, distance in zip(qs, distances):
            # Testing equivalence to within a meter.
            self.assertAlmostEqual(distance, city.distance.m, 0)

        # Got the reference distances using the raw SQL statements:
        #  SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
        #    'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
        #  SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
        #  FROM distapp_australiacity WHERE (NOT (id = 11));  st_distance_sphere
        spheroid_distances = [
            60504.0628957201,
            77023.9489850262,
            49154.8867574404,
            90847.4358768573,
            217402.811919332,
            709599.234564757,
            640011.483550888,
            7772.00667991925,
            1047861.78619339,
            1165126.55236034,
        ]
        sphere_distances = [
            60580.9693849267,
            77144.0435286473,
            49199.4415344719,
            90804.7533823494,
            217713.384600405,
            709134.127242793,
            639828.157159169,
            7786.82949717788,
            1049204.06569028,
            1162623.7238134,
        ]
        # Testing with spheroid distances first.
        hillsdale = AustraliaCity.objects.get(name='Hillsdale')
        qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(
            hillsdale.point, spheroid=True).order_by('id')
        for i, c in enumerate(qs):
            self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
        if postgis:
            # PostGIS uses sphere-only distances by default, testing these as well.
            qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(
                hillsdale.point).order_by('id')
            for i, c in enumerate(qs):
                self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
コード例 #17
0
 def test_resolution_step_depends_on_geometry_size(self):
     self.assertEqual(self.area['resolution']['step'], 25)
     geom = LineString((100, 370), (100100, 370), srid=settings.SRID)
     area = AltimetryHelper.elevation_area(geom)
     self.assertEqual(area['resolution']['step'], 866)
コード例 #18
0
 def test_line_locate_point(self):
     pos_expr = functions.LineLocatePoint(
         LineString((0, 0), (0, 3), srid=4326), Point(0, 1, srid=4326))
     self.assertAlmostEqual(
         State.objects.annotate(pos=pos_expr).first().pos, 0.3333333)
コード例 #19
0
ファイル: models.py プロジェクト: SeaborneClink/faadata
 def locator_point(self):
     if self.base_end_point and self.reciprocal_end_point:
         ls = LineString(self.base_end_point, self.reciprocal_end_point)
         return ls.centroid
     return None
コード例 #20
0
ファイル: models.py プロジェクト: pierreloicq/Geotrek-admin
class Path(AddPropertyMixin, MapEntityMixin, AltimetryMixin,
           TimeStampedModelMixin, StructureRelated):
    geom = models.LineStringField(srid=settings.SRID, spatial_index=False)
    geom_cadastre = models.LineStringField(null=True,
                                           srid=settings.SRID,
                                           spatial_index=False,
                                           editable=False)
    valid = models.BooleanField(default=True,
                                verbose_name=_("Validity"),
                                help_text=_("Approved by manager"))
    visible = models.BooleanField(default=True,
                                  verbose_name=_("Visible"),
                                  help_text=_("Shown in lists and maps"))
    name = models.CharField(null=True,
                            blank=True,
                            max_length=250,
                            verbose_name=_("Name"),
                            help_text=_("Official name"))
    comments = models.TextField(null=True,
                                blank=True,
                                verbose_name=_("Comments"),
                                help_text=_("Remarks"))

    departure = models.CharField(null=True,
                                 blank=True,
                                 default="",
                                 max_length=250,
                                 verbose_name=_("Departure"),
                                 help_text=_("Departure place"))
    arrival = models.CharField(null=True,
                               blank=True,
                               default="",
                               max_length=250,
                               verbose_name=_("Arrival"),
                               help_text=_("Arrival place"))

    comfort = models.ForeignKey('Comfort',
                                on_delete=models.CASCADE,
                                null=True,
                                blank=True,
                                related_name='paths',
                                verbose_name=_("Comfort"))
    source = models.ForeignKey('PathSource',
                               on_delete=models.CASCADE,
                               null=True,
                               blank=True,
                               related_name='paths',
                               verbose_name=_("Source"))
    stake = models.ForeignKey('Stake',
                              on_delete=models.CASCADE,
                              null=True,
                              blank=True,
                              related_name='paths',
                              verbose_name=_("Maintenance stake"))
    usages = models.ManyToManyField('Usage',
                                    blank=True,
                                    related_name="paths",
                                    verbose_name=_("Usages"))
    networks = models.ManyToManyField('Network',
                                      blank=True,
                                      related_name="paths",
                                      verbose_name=_("Networks"))
    eid = models.CharField(verbose_name=_("External id"),
                           max_length=1024,
                           blank=True,
                           null=True)
    draft = models.BooleanField(default=False,
                                verbose_name=_("Draft"),
                                db_index=True)

    objects = PathManager()
    include_invisible = PathInvisibleManager()

    is_reversed = False

    @property
    def length_2d(self):
        if self.geom:
            return self.geom.length
        else:
            return None

    @classproperty
    def length_2d_verbose_name(cls):
        return _("2D Length")

    @property
    def length_2d_display(self):
        return round(self.length_2d, 1)

    def kml(self):
        """ Exports path into KML format, add geometry as linestring """
        kml = simplekml.Kml()
        geom3d = self.geom_3d.transform(4326, clone=True)  # KML uses WGS84

        line = kml.newlinestring(name=self.name,
                                 description=plain_text(self.comments),
                                 coords=simplify_coords(geom3d.coords))
        line.style.linestyle.color = simplekml.Color.red  # Red
        line.style.linestyle.width = 4  # pixels
        return kml.kml()

    def __str__(self):
        return self.name or _('path %d') % self.pk

    class Meta:
        verbose_name = _("Path")
        verbose_name_plural = _("Paths")
        permissions = MapEntityMixin._meta.permissions + [
            ("add_draft_path", "Can add draft Path"),
            ("change_draft_path", "Can change draft Path"),
            ("delete_draft_path", "Can delete draft Path"),
        ]

    @classmethod
    def closest(cls, point, exclude=None):
        """
        Returns the closest path of the point.
        Will fail if no path in database.
        """
        # TODO: move to custom manager
        if point.srid != settings.SRID:
            point = point.transform(settings.SRID, clone=True)
        qs = cls.objects.exclude(draft=True)
        if exclude:
            qs = qs.exclude(pk=exclude.pk)
        return qs.exclude(visible=False).annotate(
            distance=Distance('geom', point)).order_by('distance')[0]

    def is_overlap(self):
        return not PathHelper.disjoint(self.geom, self.pk)

    def reverse(self):
        """
        Reverse the geometry.
        We keep track of this, since we will have to work on topologies at save()
        """
        reversed_coord = self.geom.coords[-1::-1]
        self.geom = LineString(reversed_coord)
        self.is_reversed = True
        return self

    def interpolate(self, point):
        """
        Returns position ([0.0-1.0]) and offset (distance) of the point
        along this path.
        """
        return PathHelper.interpolate(self, point)

    def snap(self, point):
        """
        Returns the point snapped (i.e closest) to the path line geometry.
        """
        return PathHelper.snap(self, point)

    def reload(self):
        # Update object's computed values (reload from database)
        if self.pk and self.visible:
            fromdb = self.__class__.objects.get(pk=self.pk)
            self.geom = fromdb.geom
            AltimetryMixin.reload(self, fromdb)
            TimeStampedModelMixin.reload(self, fromdb)
        return self

    @debug_pg_notices
    def save(self, *args, **kwargs):
        # If the path was reversed, we have to invert related topologies
        if self.is_reversed:
            for aggr in self.aggregations.all():
                aggr.start_position = 1 - aggr.start_position
                aggr.end_position = 1 - aggr.end_position
                aggr.save()
            self._is_reversed = False
        super(Path, self).save(*args, **kwargs)
        self.reload()

    def delete(self, *args, **kwargs):
        if not settings.TREKKING_TOPOLOGY_ENABLED:
            return super(Path, self).delete(*args, **kwargs)
        topologies = list(self.topology_set.filter())
        r = super(Path, self).delete(*args, **kwargs)
        if not Path.objects.exists():
            return r
        for topology in topologies:
            if isinstance(topology.geom, Point):
                closest = self.closest(topology.geom, self)
                position, offset = closest.interpolate(topology.geom)
                new_topology = Topology.objects.create()
                aggrobj = PathAggregation(topo_object=new_topology,
                                          start_position=position,
                                          end_position=position,
                                          path=closest)
                aggrobj.save()
                point = Point(topology.geom.x,
                              topology.geom.y,
                              srid=settings.SRID)
                new_topology.geom = point
                new_topology.offset = offset
                new_topology.position = position
                new_topology.save()
                topology.mutate(new_topology)
        return r

    @property
    def name_display(self):
        return '<a data-pk="%s" href="%s" title="%s" >%s</a>' % (
            self.pk, self.get_detail_url(), self, self)

    @property
    def name_csv_display(self):
        return str(self)

    @classproperty
    def trails_verbose_name(cls):
        return _("Trails")

    @property
    def trails_display(self):
        trails = getattr(self, '_trails', self.trails)
        if trails:
            return ", ".join([t.name_display for t in trails])
        return _("None")

    @property
    def trails_csv_display(self):
        trails = getattr(self, '_trails', self.trails)
        if trails:
            return ", ".join([str(t) for t in trails])
        return _("None")

    @property
    def usages_display(self):
        return ", ".join([str(u) for u in self.usages.all()])

    @property
    def networks_display(self):
        return ", ".join([str(n) for n in self.networks.all()])

    @classmethod
    def get_create_label(cls):
        return _("Add a new path")

    @property
    def checkbox(self):
        return '<input type="checkbox" name="{}[]" value="{}" />'.format(
            'path', self.pk)

    @classproperty
    def checkbox_verbose_name(cls):
        return _("Action")

    @property
    def checkbox_display(self):
        return self.checkbox

    def topologies_by_path(self, default_dict):
        if 'geotrek.core' in settings.INSTALLED_APPS:
            for trail in self.trails:
                default_dict[_('Trails')].append({
                    'name': trail.name,
                    'url': trail.get_detail_url()
                })
        if 'geotrek.trekking' in settings.INSTALLED_APPS:
            for trek in self.treks:
                default_dict[_('Treks')].append({
                    'name': trek.name,
                    'url': trek.get_detail_url()
                })
            for service in self.services:
                default_dict[_('Services')].append({
                    'name':
                    service.type.name,
                    'url':
                    service.get_detail_url()
                })
            for poi in self.pois:
                default_dict[_('Pois')].append({
                    'name': poi.name,
                    'url': poi.get_detail_url()
                })
        if 'geotrek.signage' in settings.INSTALLED_APPS:
            for signage in self.signages:
                default_dict[_('Signages')].append({
                    'name':
                    signage.name,
                    'url':
                    signage.get_detail_url()
                })
        if 'geotrek.infrastructure' in settings.INSTALLED_APPS:
            for infrastructure in self.infrastructures:
                default_dict[_('Infrastructures')].append({
                    'name':
                    infrastructure.name,
                    'url':
                    infrastructure.get_detail_url()
                })
        if 'geotrek.maintenance' in settings.INSTALLED_APPS:
            for intervention in self.interventions:
                default_dict[_('Interventions')].append({
                    'name':
                    intervention.name,
                    'url':
                    intervention.get_detail_url()
                })

    def merge_path(self, path_to_merge):
        """
        Path unification
        :param path_to path_to_merge: Path instance to merge
        :return: Boolean
        """
        if (self.pk and path_to_merge) and (self.pk != path_to_merge.pk):
            conn = connections[DEFAULT_DB_ALIAS]
            cursor = conn.cursor()
            sql = "SELECT ft_merge_path({}, {});".format(
                self.pk, path_to_merge.pk)
            cursor.execute(sql)

            result = cursor.fetchall()[0][0]

            if result:
                # reload object after unification
                self.reload()

            return result

    @property
    def extent(self):
        return self.geom.transform(settings.API_SRID,
                                   clone=True).extent if self.geom else None
コード例 #21
0
    def test_couches_sig_link(self):
        # Fake restricted areas
        ra1 = RestrictedAreaFactory.create(
            geom=MultiPolygon(Polygon(((0, 0), (2, 0), (2, 1), (0, 1), (0,
                                                                        0)))))
        ra2 = RestrictedAreaFactory.create(
            geom=MultiPolygon(Polygon(((0, 1), (2, 1), (2, 2), (0, 2), (0,
                                                                        1)))))

        # Fake city
        c = City(code='005178',
                 name='Trifouillis-les-marmottes',
                 geom=MultiPolygon(
                     Polygon(((0, 0), (2, 0), (2, 2), (0, 2), (0, 0)),
                             srid=settings.SRID)))
        c.save()

        # Fake paths in these areas
        p = PathFactory(
            geom=LineString((0.5, 0.5), (0.5, 1.5), (1.5, 1.5), (1.5, 0.5)))
        p.save()

        # This should results in 3 PathAggregation (2 for RA1, 1 for RA2, 1 for City)
        self.assertEquals(p.aggregations.count(), 4)
        self.assertEquals(p.topology_set.count(), 4)

        # PathAgg is plain for City
        t_c = c.cityedge_set.get().topo_object
        pa = c.cityedge_set.get().aggregations.get()
        self.assertEquals(pa.start_position, 0.0)
        self.assertEquals(pa.end_position, 1.0)

        # PathAgg is splitted for RA
        self.assertEquals(ra1.restrictedareaedge_set.count(), 2)
        self.assertEquals(ra2.restrictedareaedge_set.count(), 1)
        rae1a = ra1.restrictedareaedge_set.filter(
            aggregations__start_position=0).get()
        rae1b = ra1.restrictedareaedge_set.filter(
            aggregations__end_position=1).get()
        pa1a = rae1a.aggregations.get()
        pa1b = rae1b.aggregations.get()
        t_ra1a = rae1a.topo_object
        t_ra1b = rae1b.topo_object
        pa2 = ra2.restrictedareaedge_set.get().aggregations.get()
        t_ra2 = ra2.restrictedareaedge_set.get().topo_object
        self.assertAlmostEqual(pa1a.start_position, 0.0)
        self.assertAlmostEqual(pa1a.end_position, 0.5 / 3)
        self.assertAlmostEqual(pa1b.start_position, 2.5 / 3)
        self.assertAlmostEqual(pa1b.end_position, 1.0)
        self.assertAlmostEqual(pa2.start_position, 0.5 / 3)
        self.assertAlmostEqual(pa2.end_position, 2.5 / 3)

        # Ensure everything is in order after update
        p.geom = LineString((0.5, 0.5), (1.5, 0.5))
        p.save()
        self.assertEquals(p.aggregations.count(), 2)
        self.assertEquals(p.topology_set.count(), 2)
        # Topology are re-created at DB-level after any update
        self.assertRaises(Topology.DoesNotExist,
                          Topology.objects.get,
                          pk=t_c.pk)
        self.assertRaises(Topology.DoesNotExist,
                          Topology.objects.get,
                          pk=t_ra1a.pk)
        self.assertRaises(Topology.DoesNotExist,
                          Topology.objects.get,
                          pk=t_ra1b.pk)
        self.assertRaises(Topology.DoesNotExist,
                          Topology.objects.get,
                          pk=t_ra2.pk)
        self.assertEquals(ra1.restrictedareaedge_set.count(), 1)
        # a new association exists for C
        t_c = c.cityedge_set.get().topo_object
        self.assertEquals(Topology.objects.filter(pk=t_c.pk).count(), 1)
        # a new association exists for RA1
        t_ra1 = ra1.restrictedareaedge_set.get().topo_object
        self.assertEquals(Topology.objects.filter(pk=t_ra1.pk).count(), 1)
        pa1 = ra1.restrictedareaedge_set.get().aggregations.get()
        self.assertEquals(pa1.start_position, 0.0)
        self.assertEquals(pa1.end_position, 1.0)
        # RA2 is not connected anymore
        self.assertEquals(ra2.restrictedareaedge_set.count(), 0)
        self.assertEquals(Topology.objects.filter(pk=t_ra2.pk).count(), 0)

        # All intermediary objects should be cleaned on delete
        p.delete()
        self.assertEquals(c.cityedge_set.count(), 0)
        self.assertEquals(Topology.objects.filter(pk=t_c.pk).count(), 0)
        self.assertEquals(ra1.restrictedareaedge_set.count(), 0)
        self.assertEquals(Topology.objects.filter(pk=t_ra1.pk).count(), 0)
        self.assertEquals(ra2.restrictedareaedge_set.count(), 0)
        self.assertEquals(Topology.objects.filter(pk=t_ra2.pk).count(), 0)
コード例 #22
0
ファイル: test_tracks.py プロジェクト: xamogast/dtrack
def track(device_l):
    points = [location.point for location in device_l.locations.all()]
    track = Track.objects.create(track=LineString(points), device=device_l)
    Location.objects.filter(pk__in=device_l.locations.values_list("pk", flat=True)) \
        .update(track=track)
    return track
コード例 #23
0
 def test_elevation_altimetry_limits(self):
     geom = LineString((1.5, 2.5, 8), (2.5, 2.5, 10), srid=settings.SRID)
     profile = AltimetryHelper.elevation_profile(geom)
     limits = AltimetryHelper.altimetry_limits(profile)
     self.assertEqual(limits[0], 1108)
     self.assertEqual(limits[1], -92)
コード例 #24
0
def handle_zipfile(path, collection, url, last_modified):
    source = DataSource.objects.update_or_create(
        {
            'url': url,
            'datetime': last_modified
        }, name=f'{collection} GTFS')[0]

    shapes = {}
    service_shapes = {}
    operators = {}
    routes = {}
    services = set()
    headsigns = {}

    with zipfile.ZipFile(path) as archive:

        for line in read_file(archive, 'shapes.txt'):
            shape_id = line['shape_id']
            if shape_id not in shapes:
                shapes[shape_id] = []
            shapes[shape_id].append(
                Point(float(line['shape_pt_lon']),
                      float(line['shape_pt_lat'])))

        for line in read_file(archive, 'agency.txt'):
            operator, created = Operator.objects.get_or_create(
                {
                    'name': line['agency_name'],
                    'region_id': 'LE'
                },
                id=line['agency_id'],
                region__in=['CO', 'UL', 'MU', 'LE', 'NI'])
            if not created and operator.name != line['agency_name']:
                print(operator, line)
            operators[line['agency_id']] = operator

        for line in read_file(archive, 'routes.txt'):
            defaults = {
                'line_name': line['route_short_name'],
                'description': line['route_long_name'],
                'date': time.strftime('%Y-%m-%d'),
                'mode': MODES.get(int(line['route_type']), ''),
                'current': True,
                'show_timetable': True,
                'source': source,
                'service_code': ''
            }
            service, created = Service.objects.filter(
                source=source,
                route__code=line['route_id']).update_or_create(defaults)

            try:
                operator = operators[line['agency_id']]
                if service in services:
                    service.operator.add(operator)
                else:
                    service.operator.set([operator])
            except KeyError:
                pass
            services.add(service)

            route, created = Route.objects.update_or_create(
                {
                    'line_name': line['route_short_name'],
                    'description': line['route_long_name'],
                    'service': service,
                },
                source=source,
                code=line['route_id'],
            )
            if not created:
                route.trip_set.all().delete()
            routes[line['route_id']] = route

        stops, stops_not_created = do_stops(archive)

        calendars = {}
        for line in read_file(archive, 'calendar.txt'):
            calendar = Calendar(
                mon='1' == line['monday'],
                tue='1' == line['tuesday'],
                wed='1' == line['wednesday'],
                thu='1' == line['thursday'],
                fri='1' == line['friday'],
                sat='1' == line['saturday'],
                sun='1' == line['sunday'],
                start_date=parse_date(line['start_date']),
                end_date=parse_date(line['end_date']),
            )
            calendar.save()
            calendars[line['service_id']] = calendar

        for line in read_file(archive, 'calendar_dates.txt'):
            CalendarDate.objects.create(
                calendar=calendars[line['service_id']],
                start_date=parse_date(line['date']),
                end_date=parse_date(line['date']),
                operation=line['exception_type'] == '1')

        trips = {}
        for line in read_file(archive, 'trips.txt'):
            route = routes[line['route_id']]
            trips[line['trip_id']] = Trip(
                route=route,
                calendar=calendars[line['service_id']],
                inbound=line['direction_id'] == '1')
            if route.service_id not in service_shapes:
                service_shapes[route.service_id] = set()
            service_shapes[route.service_id].add(line['shape_id'])
            if line['trip_headsign']:
                if line['route_id'] not in headsigns:
                    headsigns[line['route_id']] = {
                        '0': set(),
                        '1': set(),
                    }
                headsigns[line['route_id']][line['direction_id']].add(
                    line['trip_headsign'])
        for route_id in headsigns:
            route = routes[route_id]
            if not route.service.description:
                origins = headsigns[route_id]['1']
                destinations = headsigns[route_id]['0']
                origin = None
                destination = None
                if len(origins) <= 1 and len(destinations) <= 1:
                    if len(origins) == 1:
                        origin = list(origins)[0]
                    if len(destinations) == 1:
                        destination = list(destinations)[0]

                    if origin and ' - ' in origin:
                        route.service.inbound_description = origin
                        route.service.description = origin
                    if destination and ' - ' in destination:
                        route.service.outbound_description = destination
                        route.service.description = destination

                    if origin and destination and ' - ' not in origin:
                        route.service.description = route.service.outbound_description = f'{origin} - {destination}'
                        route.service.inbound_description = f'{destination} - {origin}'

                    route.service.save(update_fields=[
                        'description', 'inbound_description',
                        'outbound_description'
                    ])

        stop_times = []
        trip_id = None
        trip = None
        stop_time = None
        for line in read_file(archive, 'stop_times.txt'):
            if trip_id != line['trip_id']:
                # previous trip
                if trip:
                    if not stop_time.arrival:
                        stop_time.arrival = stop_time.departure
                        stop_time.departure = None
                    trip.start = stop_times[0].departure
                    trip.end = stop_times[-1].arrival
                    trip.save()
                    for stop_time in stop_times:
                        stop_time.trip = trip
                    StopTime.objects.bulk_create(stop_times)
                    stop_times = []
                trip = Trip()
            trip_id = line['trip_id']
            trip = trips[trip_id]
            stop = stops.get(line['stop_id'])
            arrival_time = line['arrival_time']
            departure_time = line['departure_time']
            if arrival_time == departure_time:
                arrival_time = None
            stop_time = StopTime(
                stop=stop,
                arrival=arrival_time,
                departure=departure_time,
                sequence=line['stop_sequence'],
            )
            if stop:
                trip.destination = stop
            elif line['stop_id'] in stops_not_created:
                stop_time.stop_code = stops_not_created[line['stop_id']]
            else:
                stop_time.stop_code = line['stop_id']
                print(line)
            stop_times.append(stop_time)

    # last trip
    if not stop_time.arrival:
        stop_time.arrival = stop_time.departure
        stop_time.departure = None
    trip.start = stop_times[0].departure
    trip.end = stop_times[-1].arrival
    trip.save()
    for stop_time in stop_times:
        stop_time.trip = trip
    StopTime.objects.bulk_create(stop_times)

    for service in services:
        if service.id in service_shapes:
            linestrings = [
                LineString(*shapes[shape])
                for shape in service_shapes[service.id] if shape in shapes
            ]
            service.geometry = MultiLineString(*linestrings)
            service.save(update_fields=['geometry'])

        groupings = get_stop_usages(
            Trip.objects.filter(route__service=service))

        service.stops.clear()
        stop_usages = [
            StopUsage(service=service,
                      stop_id=stop_time.stop_id,
                      timing_status=stop_time.timing_status,
                      direction='outbound',
                      order=i) for i, stop_time in enumerate(groupings[0])
        ] + [
            StopUsage(service=service,
                      stop_id=stop_time.stop_id,
                      timing_status=stop_time.timing_status,
                      direction='inbound',
                      order=i) for i, stop_time in enumerate(groupings[1])
        ]
        StopUsage.objects.bulk_create(stop_usages)

        service.region = Region.objects.filter(
            adminarea__stoppoint__service=service).annotate(
                Count('adminarea__stoppoint__service')).order_by(
                    '-adminarea__stoppoint__service__count').first()
        if service.region:
            service.save(update_fields=['region'])
        service.update_search_vector()

    for operator in operators.values():
        operator.region = Region.objects.filter(
            adminarea__stoppoint__service__operator=operator).annotate(
                Count('adminarea__stoppoint__service__operator')).order_by(
                    '-adminarea__stoppoint__service__operator__count').first()
        if operator.region_id:
            operator.save(update_fields=['region'])

    print(
        source.service_set.filter(current=True).exclude(
            route__in=routes.values()).update(current=False))
    print(
        source.service_set.filter(current=True).exclude(
            route__trip__isnull=False).update(current=False))
    print(
        source.route_set.exclude(
            id__in=(route.id for route in routes.values())).delete())
    StopPoint.objects.filter(active=False,
                             service__current=True).update(active=True)
    StopPoint.objects.filter(active=True,
                             service__isnull=True).update(active=False)
コード例 #25
0
 def test_51m(self):
     path = Path.objects.create(
         geom=LineString((0, 0), (0, self.step * 2 + 1), (0, self.step * 4 +
                                                          2)))
     self.assertEqual(len(path.geom_3d.coords), 7)
コード例 #26
0
ファイル: models.py プロジェクト: makinacorpus/Geotrek
class Path(AddPropertyMixin, MapEntityMixin, AltimetryMixin,
           TimeStampedModelMixin, StructureRelated):
    geom = models.LineStringField(srid=settings.SRID, spatial_index=False)
    geom_cadastre = models.LineStringField(null=True, srid=settings.SRID, spatial_index=False,
                                           editable=False)
    valid = models.BooleanField(db_column='valide', default=True, verbose_name=_(u"Validity"),
                                help_text=_(u"Approved by manager"))
    visible = models.BooleanField(db_column='visible', default=True, verbose_name=_(u"Visible"),
                                  help_text=_(u"Shown in lists and maps"))
    name = models.CharField(null=True, blank=True, max_length=20, db_column='nom', verbose_name=_(u"Name"),
                            help_text=_(u"Official name"))
    comments = models.TextField(null=True, blank=True, db_column='remarques', verbose_name=_(u"Comments"),
                                help_text=_(u"Remarks"))

    departure = models.CharField(null=True, blank=True, default="", max_length=250, db_column='depart', verbose_name=_(u"Departure"),
                                 help_text=_(u"Departure place"))
    arrival = models.CharField(null=True, blank=True, default="", max_length=250, db_column='arrivee', verbose_name=_(u"Arrival"),
                               help_text=_(u"Arrival place"))

    comfort = models.ForeignKey('Comfort',
                                null=True, blank=True, related_name='paths',
                                verbose_name=_("Comfort"), db_column='confort')
    source = models.ForeignKey('PathSource',
                               null=True, blank=True, related_name='paths',
                               verbose_name=_("Source"), db_column='source')
    stake = models.ForeignKey('Stake',
                              null=True, blank=True, related_name='paths',
                              verbose_name=_("Maintenance stake"), db_column='enjeu')
    usages = models.ManyToManyField('Usage',
                                    blank=True, related_name="paths",
                                    verbose_name=_(u"Usages"), db_table="l_r_troncon_usage")
    networks = models.ManyToManyField('Network',
                                      blank=True, related_name="paths",
                                      verbose_name=_(u"Networks"), db_table="l_r_troncon_reseau")
    eid = models.CharField(verbose_name=_(u"External id"), max_length=1024, blank=True, null=True, db_column='id_externe')
    draft = models.BooleanField(db_column='brouillon', default=False, verbose_name=_(u"Draft"), db_index=True)

    objects = PathManager()
    include_invisible = PathInvisibleManager()

    is_reversed = False

    @property
    def length_2d(self):
        if self.geom:
            return self.geom.length
        else:
            return None

    @classproperty
    def length_2d_verbose_name(cls):
        return _(u"2D Length")

    @property
    def length_2d_display(self):
        return round(self.length_2d, 1)

    def kml(self):
        """ Exports path into KML format, add geometry as linestring """
        kml = simplekml.Kml()
        geom3d = self.geom_3d.transform(4326, clone=True)  # KML uses WGS84
        line = kml.newlinestring(name=self.name,
                                 description=plain_text(self.comments),
                                 coords=geom3d.coords)
        line.style.linestyle.color = simplekml.Color.red  # Red
        line.style.linestyle.width = 4  # pixels
        return kml.kml()

    def __unicode__(self):
        return self.name or _('path %d') % self.pk

    class Meta:
        db_table = 'l_t_troncon'
        verbose_name = _(u"Path")
        verbose_name_plural = _(u"Paths")
        permissions = MapEntityMixin._meta.permissions + [("add_draft_path", "Can add draft Path"),
                                                          ("change_draft_path", "Can change draft Path"),
                                                          ("delete_draft_path", "Can delete draft Path"),
                                                          ]

    @classmethod
    def closest(cls, point, exclude=None):
        """
        Returns the closest path of the point.
        Will fail if no path in database.
        """
        # TODO: move to custom manager
        if point.srid != settings.SRID:
            point = point.transform(settings.SRID, clone=True)
        qs = cls.objects.exclude(draft=True)
        if exclude:
            qs = qs.exclude(pk=exclude.pk)
        return qs.exclude(visible=False).distance(point).order_by('distance')[0]

    def is_overlap(self):
        return not PathHelper.disjoint(self.geom, self.pk)

    def reverse(self):
        """
        Reverse the geometry.
        We keep track of this, since we will have to work on topologies at save()
        """
        reversed_coord = self.geom.coords[-1::-1]
        self.geom = LineString(reversed_coord)
        self.is_reversed = True
        return self

    def interpolate(self, point):
        """
        Returns position ([0.0-1.0]) and offset (distance) of the point
        along this path.
        """
        return PathHelper.interpolate(self, point)

    def snap(self, point):
        """
        Returns the point snapped (i.e closest) to the path line geometry.
        """
        return PathHelper.snap(self, point)

    def reload(self):
        # Update object's computed values (reload from database)
        if self.pk and self.visible:
            fromdb = self.__class__.objects.get(pk=self.pk)
            self.geom = fromdb.geom
            AltimetryMixin.reload(self, fromdb)
            TimeStampedModelMixin.reload(self, fromdb)
        return self

    @debug_pg_notices
    def save(self, *args, **kwargs):
        # If the path was reversed, we have to invert related topologies
        if self.is_reversed:
            for aggr in self.aggregations.all():
                aggr.start_position = 1 - aggr.start_position
                aggr.end_position = 1 - aggr.end_position
                aggr.save()
            self._is_reversed = False
        super(Path, self).save(*args, **kwargs)
        self.reload()

    def delete(self, *args, **kwargs):
        topologies = list(self.topology_set.filter())
        r = super(Path, self).delete(*args, **kwargs)
        if not Path.objects.exists():
            return r
        for topology in topologies:
            if isinstance(topology.geom, Point):
                closest = self.closest(topology.geom, self)
                position, offset = closest.interpolate(topology.geom)
                new_topology = Topology.objects.create()
                aggrobj = PathAggregation(topo_object=new_topology,
                                          start_position=position,
                                          end_position=position,
                                          path=closest)
                aggrobj.save()
                point = Point(topology.geom.x, topology.geom.y, srid=settings.SRID)
                new_topology.geom = point
                new_topology.offset = offset
                new_topology.position = position
                new_topology.save()
                topology.mutate(new_topology)
        return r

    @property
    def name_display(self):
        return u'<a data-pk="%s" href="%s" title="%s" >%s</a>' % (self.pk,
                                                                  self.get_detail_url(),
                                                                  self,
                                                                  self)

    @property
    def name_csv_display(self):
        return unicode(self)

    @classproperty
    def trails_verbose_name(cls):
        return _("Trails")

    @property
    def trails_display(self):
        trails = getattr(self, '_trails', self.trails)
        if trails:
            return ", ".join([t.name_display for t in trails])
        return _("None")

    @property
    def trails_csv_display(self):
        trails = getattr(self, '_trails', self.trails)
        if trails:
            return ", ".join([unicode(t) for t in trails])
        return _("None")

    @property
    def usages_display(self):
        return u", ".join([unicode(u) for u in self.usages.all()])

    @property
    def networks_display(self):
        return u", ".join([unicode(n) for n in self.networks.all()])

    @classmethod
    def get_create_label(cls):
        return _(u"Add a new path")

    @property
    def checkbox(self):
        return u'<input type="checkbox" name="{}[]" value="{}" />'.format('path',
                                                                          self.pk)

    @classproperty
    def checkbox_verbose_name(cls):
        return _("Action")

    @property
    def checkbox_display(self):
        return self.checkbox

    def topologies_by_path(self, default_dict):
        if 'geotrek.core' in settings.INSTALLED_APPS:
            for trail in self.trails:
                default_dict[_('Trails')].append({'name': trail.name, 'url': trail.get_detail_url()})
        if 'geotrek.trekking' in settings.INSTALLED_APPS:
            for trek in self.treks:
                default_dict[_('Treks')].append({'name': trek.name, 'url': trek.get_detail_url()})
            for service in self.services:
                default_dict[_('Services')].append(
                    {'name': service.type.name, 'url': service.get_detail_url()})
            for poi in self.pois:
                default_dict[_('Pois')].append({'name': poi.name, 'url': poi.get_detail_url()})
        if 'geotrek.signage' in settings.INSTALLED_APPS:
            for signage in self.signages:
                default_dict[_('Signages')].append({'name': signage.name, 'url': signage.get_detail_url()})
        if 'geotrek.infrastructure' in settings.INSTALLED_APPS:
            for infrastructure in self.infrastructures:
                default_dict[_('Infrastructures')].append(
                    {'name': infrastructure.name, 'url': infrastructure.get_detail_url()})
        if 'geotrek.maintenance' in settings.INSTALLED_APPS:
            for intervention in self.interventions:
                default_dict[_('Interventions')].append(
                    {'name': intervention.name, 'url': intervention.get_detail_url()})

    def merge_path(self, path_to_merge):
        """
        Path unification
        :param path_to path_to_merge: Path instance to merge
        :return: Boolean
        """
        if (self.pk and path_to_merge) and (self.pk != path_to_merge.pk):
            conn = connections[DEFAULT_DB_ALIAS]
            cursor = conn.cursor()
            sql = "SELECT ft_merge_path({}, {});".format(self.pk, path_to_merge.pk)
            cursor.execute(sql)

            result = cursor.fetchall()[0][0]

            if result:
                # reload object after unification
                self.reload()

            return result

    @property
    def extent(self):
        return self.geom.transform(settings.API_SRID, clone=True).extent if self.geom else None
コード例 #27
0
ファイル: Contour.py プロジェクト: danellecline/stoqs
    def createPlot(self, start_datetime, end_datetime):

        if len(self.data) == 0:
            logger.debug('no data found to plot')
            raise Exception('no data found to plot')

        # GridSpecs for plots
        outer_gs = gridspec.GridSpec(nrows=2, ncols=1, height_ratios=[1, 3])

        # tighten up space between plots
        outer_gs.update(left=0.10, right=0.90, hspace=0.05)
        map_gs = gridspec.GridSpecFromSubplotSpec(nrows=1,
                                                  ncols=1,
                                                  subplot_spec=outer_gs[0])
        lower_gs = gridspec.GridSpecFromSubplotSpec(nrows=len(
            self.plotGroupValid),
                                                    ncols=1,
                                                    subplot_spec=outer_gs[1])

        clt = self.readCLT(
            os.path.join(settings.STATICFILES_DIRS[0], 'colormaps',
                         'jetplus.txt'))
        self.cm_jetplus = mpl.colors.ListedColormap(np.array(clt))

        # start a new figure - size is in inches
        fig = plt.figure(figsize=(8, 10))
        fig.suptitle(self.title + '\n' + self.subtitle1 + '\n' +
                     self.subtitle2,
                     fontsize=8)

        pn = self.platformName[0]

        # bound the depth to cover max of all parameter group depths
        # and flag removal of scatter plot if have more than 2000 points in any parameter
        maxy = 0
        for group in self.plotGroupValid:
            parm = [x.strip() for x in group.split(',')]
            for name in parm:
                y = max(self.data[pn + name]['depth'])
                sz = len(self.data[pn + name]['datavalue'])
                if y > maxy:
                    maxy = y

        # pad the depth by 20 meters to make room for parameter name to be displayed at bottom
        rangey = [0.0, int(maxy) + 20]

        i = 0
        # add contour plots for each parameter group
        for group in self.plotGroupValid:
            parm = [x.strip() for x in group.split(',')]
            plot_step = sum([
                self.data[pn + p]['units'].count('bool') for p in parm
            ])  # count the number of boolean plots in the groups
            plot_scatter_contour = len(
                parm
            ) - plot_step  # otherwise all other plots are scatter plots
            plot_scatter = 0

            # this parameter only makes sense to plot as a scatter plot
            if 'vertical_temperature_homogeneity_index' in self.plotGroupValid:
                plot_scatter = 1
                plot_scatter_contour -= 1
            #plot_dense = sum([val for val  in len(self.data[pn+name]['datavalue']) > 2000]) #  if more than 2000 points, skip the scatter plot

            # choose the right type of gridspec to display the data
            if plot_scatter_contour:
                # one row for scatter and one for contour
                plot_gs = gridspec.GridSpecFromSubplotSpec(
                    nrows=len(parm) * 2,
                    ncols=2,
                    subplot_spec=lower_gs[i],
                    width_ratios=[30, 1],
                    wspace=0.05)
            else:
                # one row for single step/scatter/contour plots
                plot_gs = gridspec.GridSpecFromSubplotSpec(
                    nrows=len(parm),
                    ncols=2,
                    subplot_spec=lower_gs[i],
                    width_ratios=[30, 1],
                    wspace=0.05)

            j = 0
            i += 1
            for name in parm:
                title = name
                x = [
                    time.mktime(xe.timetuple())
                    for xe in self.data[pn + name]['datetime']
                ]
                y = self.data[pn + name]['depth']
                z = self.data[pn + name]['datavalue']
                sdt_count = self.data[pn + name]['sdt_count']

                units = '(' + self.data[pn + name]['units'] + ')'

                if len(z):
                    if self.autoscale:
                        rangez = [
                            self.data[pn + name]['p010'],
                            self.data[pn + name]['p990']
                        ]
                    else:
                        rangez = [min(z), max(z)]
                else:
                    rangez = [0, 0]

                if name.find('chlorophyll') != -1:
                    if not self.autoscale:
                        rangez = [0.0, 10.0]
                if name.find('salinity') != -1:
                    if not self.autoscale:
                        rangez = [33.3, 34.9]
                    units = ''
                if name.find('temperature') != -1:
                    if not self.autoscale:
                        rangez = [10.0, 14.0]
                    units = ' ($^\circ$C)'

                logger.debug('getting subplot ax0')
                gs = gridspec.GridSpecFromSubplotSpec(1,
                                                      1,
                                                      subplot_spec=plot_gs[j])
                ax0_plot = plt.Subplot(fig, gs[:])
                fig.add_subplot(ax0_plot)

                gs = gridspec.GridSpecFromSubplotSpec(1,
                                                      1,
                                                      subplot_spec=plot_gs[j +
                                                                           1])
                ax0_colorbar = plt.Subplot(fig, gs[:])
                fig.add_subplot(ax0_colorbar)

                if plot_scatter_contour:
                    logger.debug('getting subplot ax1')
                    gs = gridspec.GridSpecFromSubplotSpec(
                        1, 1, subplot_spec=plot_gs[j + 2])
                    ax1_plot = plt.Subplot(fig, gs[:])
                    fig.add_subplot(ax1_plot)

                    gs = gridspec.GridSpecFromSubplotSpec(
                        1, 1, subplot_spec=plot_gs[j + 3])
                    ax1_colorbar = plt.Subplot(fig, gs[:])
                    fig.add_subplot(ax1_colorbar)

                # if no data found add in some fake data and plot a placeholder time/depth plot
                if not x:
                    tmin = time.mktime(start_datetime.timetuple())
                    tmax = time.mktime(end_datetime.timetuple())
                    x.append(tmin)
                    x.append(tmax)
                    y.append(np.NaN)
                    y.append(np.NaN)
                    z.append(np.NaN)
                    z.append(np.NaN)

                if plot_scatter_contour:
                    cs0, _, scale_factor = self.createContourPlot(
                        title + pn, ax0_plot, x, y, z, rangey, rangez,
                        start_datetime, end_datetime, sdt_count)
                    cs1 = self.createScatterPlot(title + pn, ax1_plot, x, y, z,
                                                 rangey, rangez,
                                                 start_datetime, end_datetime)
                elif plot_step:
                    cs0 = self.createStepPlot(title + pn, title, ax0_plot, x,
                                              z, rangez, start_datetime,
                                              end_datetime)
                elif plot_scatter:
                    cs0 = self.createScatterPlot(title + pn, ax0_plot, x, y, z,
                                                 rangey, rangez,
                                                 start_datetime, end_datetime)
                else:
                    cs0, _, scale_factor = self.createContourPlot(
                        title + pn, ax0_plot, x, y, z, rangey, rangez,
                        start_datetime, end_datetime, sdt_count)

                if plot_scatter_contour:
                    ax1_plot.text(0.95,
                                  0.02,
                                  name,
                                  verticalalignment='bottom',
                                  horizontalalignment='right',
                                  transform=ax1_plot.transAxes,
                                  color='black',
                                  fontsize=8)
                    # Don't show on the upper contour plot
                    ax0_plot.xaxis.set_ticks([])
                    # Rotate date labels and format bottom
                    x_fmt = self.DateFormatter(1)
                    ax1_plot.xaxis.set_major_formatter(x_fmt)
                    for label in ax1_plot.xaxis.get_ticklabels():
                        label.set_rotation(10)
                else:
                    ax0_plot.text(0.95,
                                  0.02,
                                  name,
                                  verticalalignment='bottom',
                                  horizontalalignment='right',
                                  transform=ax0_plot.transAxes,
                                  color='black',
                                  fontsize=8)
                    # Rotate date labels and format bottom
                    x_fmt = self.DateFormatter(1)
                    ax0_plot.xaxis.set_major_formatter(x_fmt)
                    for label in ax0_plot.xaxis.get_ticklabels():
                        label.set_rotation(10)

                self.shadeNight(ax0_plot, sorted(x), rangey[0], rangey[1])
                if plot_scatter:
                    self.shadeNight(ax1_plot, sorted(x), rangey[0], rangey[1])

                logger.debug('plotting colorbars')
                if plot_scatter or plot_scatter_contour:
                    cbFormatter = FormatStrFormatter('%.2f')
                    cb = plt.colorbar(cs0,
                                      cax=ax0_colorbar,
                                      ticks=[min(rangez),
                                             max(rangez)],
                                      format=cbFormatter,
                                      orientation='vertical')
                    cb.set_label(units, fontsize=8)  #,labelpad=5)
                    cb.ax.xaxis.set_ticks_position('top')
                    for t in cb.ax.yaxis.get_ticklabels():
                        t.set_fontsize(8)

                    cb = plt.colorbar(cs1,
                                      cax=ax1_colorbar,
                                      ticks=[min(rangez),
                                             max(rangez)],
                                      format=cbFormatter,
                                      orientation='vertical')
                    cb.set_label(units, fontsize=8)  #,labelpad=5)
                    cb.ax.xaxis.set_ticks_position('top')
                    for t in cb.ax.yaxis.get_ticklabels():
                        t.set_fontsize(8)
                else:
                    if plot_step:
                        ax0_colorbar.xaxis.set_major_locator(plt.NullLocator())
                        ax0_colorbar.yaxis.set_ticks_position('right')
                        for t in ax0_colorbar.yaxis.get_ticklabels():
                            t.set_fontsize(8)
                    else:
                        cbFormatter = FormatStrFormatter('%.2f')
                        cb = plt.colorbar(cs0,
                                          cax=ax0_colorbar,
                                          ticks=[min(rangez),
                                                 max(rangez)],
                                          format=cbFormatter,
                                          orientation='vertical')
                        cb.set_label(units, fontsize=8)  #,labelpad=5)
                        cb.ax.xaxis.set_ticks_position('top')
                        for t in cb.ax.yaxis.get_ticklabels():
                            t.set_fontsize(8)

                if plot_scatter:
                    j += 4
                else:
                    j += 2

        # plot tracks
        ax = plt.Subplot(fig, map_gs[:])
        fig.add_subplot(ax, aspect='equal')

        z = []
        logger.debug('getting measured data')
        z, points, maptracks = self.getMeasuredPPData(start_datetime,
                                                      end_datetime, pn,
                                                      self.plotDotParmName)

        # get the percentile ranges for this to autoscale
        pointsnp = np.array(points)
        lon = pointsnp[:, 0]
        lat = pointsnp[:, 1]

        ltmin = self.extent['maptrack__extent'][1]
        ltmax = self.extent['maptrack__extent'][3]
        lnmin = self.extent['maptrack__extent'][0]
        lnmax = self.extent['maptrack__extent'][2]
        lndiff = abs(lnmax - lnmin)
        ltdiff = abs(ltmax - ltmin)
        logger.debug("lon diff {} lat diff {}".format(lndiff, ltdiff))
        mindeg = .02
        paddeg = .01
        if lndiff < mindeg:
            lnmin -= mindeg
            lnmax += mindeg
        if ltdiff < mindeg:
            ltmin -= mindeg
            ltmax += mindeg

        e = (lnmin - paddeg, ltmin - paddeg, lnmax + paddeg, ltmax + paddeg)
        logger.debug('Extent {},{},{},{})'.format(e[0], e[1], e[2], e[3]))
        # retry up to 5 times to get the basemap
        for i in range(0, 5):
            logger.debug('Getting basemap')
            mp = Basemap(llcrnrlon=e[0],
                         llcrnrlat=e[1],
                         urcrnrlon=e[2],
                         urcrnrlat=e[3],
                         projection='cyl',
                         resolution='l',
                         ax=ax)
            try:
                # Works, but coarse resolution
                ##mp.wmsimage('http://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?', layers=['GEBCO_08_Grid'])
                mp.arcgisimage(
                    server='http://services.arcgisonline.com/ArcGIS',
                    service='Ocean_Basemap')
                mp.drawparallels(np.linspace(e[1], e[3], num=3),
                                 labels=[True, False, False, False],
                                 fontsize=8,
                                 linewidth=0)
                mp.drawmeridians(np.linspace(e[0], e[2], num=3),
                                 labels=[False, False, False, True],
                                 fontsize=8,
                                 linewidth=0)
            except Exception as e:
                logger.error('Could not download ocean basemap ')
                mp = None

            if mp is not None:
                break

        if mp is None:
            logger.debug('Error - cannot cannot fetch basemap')
            return

        try:
            logger.debug('plotting tracks')
            if self.animate:
                try:
                    track = LineString(points).simplify(tolerance=.001)
                    if track is not None:
                        ln, lt = list(zip(*track))
                        mp.plot(ln,
                                lt,
                                '-',
                                c='k',
                                alpha=0.5,
                                linewidth=2,
                                zorder=1)
                except TypeError as e:
                    logger.warning(
                        "{}\nCannot plot map track path to None".format(e))
            else:
                for track in maptracks:
                    if track is not None:
                        ln, lt = list(zip(*track))
                        mp.plot(ln,
                                lt,
                                '-',
                                c='k',
                                alpha=0.5,
                                linewidth=2,
                                zorder=1)

            # if have a valid series, then plot the dots
            if self.plotDotParmName and len(z) > 0:
                if len(z) > 2000:
                    sz = len(z)
                    stride = int(sz / 200)
                    z_stride = z[0:sz:stride]
                    lon_stride = lon[0:sz:stride]
                    lat_stride = lat[0:sz:stride]
                    mp.scatter(lon_stride,
                               lat_stride,
                               c=z_stride,
                               marker='.',
                               lw=0,
                               alpha=1.0,
                               cmap=self.cm_jetplus,
                               label=self.plotDotParmName,
                               zorder=2)
                    if stride > 1:
                        ax.text(0.70,
                                0.1, ('{} (every {} points)'.format(
                                    self.plotDotParmName, stride)),
                                verticalalignment='bottom',
                                horizontalalignment='center',
                                transform=ax.transAxes,
                                color='black',
                                fontsize=8)
                    else:
                        ax.text(
                            0.70,
                            0.1,
                            ('{} (every point)'.format(self.plotDotParmName)),
                            verticalalignment='bottom',
                            horizontalalignment='center',
                            transform=ax.transAxes,
                            color='black',
                            fontsize=8)

                else:
                    mp.scatter(lon,
                               lat,
                               c=z,
                               marker='.',
                               lw=0,
                               alpha=1.0,
                               cmap=self.cm_jetplus,
                               label=self.plotDotParmName,
                               zorder=2)
                    ax.text(0.70,
                            0.1,
                            ('{} (every point)'.format(self.plotDotParmName)),
                            verticalalignment='bottom',
                            horizontalalignment='center',
                            transform=ax.transAxes,
                            color='black',
                            fontsize=8)

            if self.booleanPlotGroup:
                # plot the binary markers
                markers = ['o', 'x', 'd', 'D', '8', '1', '2', '3', '4']
                i = 1
                for g in self.booleanPlotGroup:
                    parm = [z2.strip() for z2 in g.split(',')]
                    for name in parm:
                        if name in self.plotGroupValid:
                            logger.debug(
                                'Plotting boolean plot group parameter {}',
                                name)
                            z, points, maptracks = self.getMeasuredPPData(
                                start_datetime, end_datetime,
                                self.platformName[0], name)
                            pointsnp = np.array(points)
                            lon = pointsnp[:, 0]
                            lat = pointsnp[:, 1]
                            # scale up the size of point
                            s = [20 * val for val in z]
                            if len(z) > 0:
                                mp.scatter(lon,
                                           lat,
                                           s=s,
                                           marker=markers[i],
                                           c='black',
                                           label=name,
                                           zorder=3)
                            i = i + 1

            # plot the legend outside the plot in the upper left corner
            l = ax.legend(loc='upper left',
                          bbox_to_anchor=(1, 1),
                          prop={'size': 8},
                          scatterpoints=1)  # only plot legend symbol once
            l.set_zorder(4)  # put the legend on top

        except Exception as e:
            logger.warning(e)

        if self.animate:
            # append frames output as pngs with an indexed frame number before the gif extension
            fname = '{}/frame_{:02}.png'.format(self.dirpath, self.frame)
        else:
            fname = self.outFilename

        logger.debug('Saving figure {}'.format(fname))
        fig.savefig(fname, dpi=120)  #,transparent=True)
        plt.close()
        self.frame += 1

        logger.debug('Done with contourPlot')
 def geom(self):
     return GeometryCollection(LineString((3, 4, 5), (6, 7, 8)),
                               Point(1, 2, 3),
                               srid=4326)
コード例 #29
0
ファイル: models.py プロジェクト: imclab/yachter
 def path(self):
     points = map(lambda cm: cm.mark.location, self.coursemark_set.all())
     if len(points):
         path = LineString(*points)
         path.srid = SRID
         return path
コード例 #30
0
 def test_area_has_nice_ratio_if_square_enough(self):
     geom = LineString((0, 0), (1000, 1000), srid=settings.SRID)
     area = AltimetryHelper.elevation_area(geom)
     self.assertEqual(area['size']['x'], 1300.0)
     self.assertEqual(area['size']['y'], 1300.0)
コード例 #31
0
    def _import_cz_regions(self, temp_dir):
        """Import Czechia regions (Kraje) geodata layer from RUIAN DB

        :param temp_dir str: temp dir path
        """
        filename = "cz_regions.db"
        layer = "Vusc"  # kraje

        CzechiaRegions.objects.all().delete()
        with connection.cursor() as cursor:
            q = f"TRUNCATE {CzechiaRegions._meta.db_table} RESTART IDENTITY"
            cursor.execute(q)

        regions_geodata_path = self._import_geodata_grom_ruian_db(
            layer=layer,
            output_filename=filename,
            temp_dir=temp_dir,
        )

        self._import_geodata(
            model=CzechiaRegions,
            model_mapping=czechiaregions_mapping,
            geodata_path=regions_geodata_path,
        )
        self.stdout.write(
            _("'%(model)s' model(s) created.") %
            {"model": CzechiaRegions.__name__})

        # Create Poi regions (line geom type representation)
        name = "Kraj"
        desc = "CZ Kraje"
        order = max(OverlayLayer.objects.all().values_list("order",
                                                           flat=True)) + 1
        overlay, created = OverlayLayer.objects.get_or_create(
            name=name,
            desc=desc,
            slug="kraje",
            enabled=False,
        )
        if created:
            overlay.order = order
            overlay.minzoom = 14
            overlay.maxzoom = 10
            overlay.save()
        marker, created = Marker.objects.get_or_create(
            name=name,
            desc=desc,
            line_width=5.0,
            line_color="#ff0000",
            layer=overlay,
            slug="kraj",
        )

        objs = []
        count = 0
        for r in CzechiaRegions.objects.all():
            p, created = Poi.objects.get_or_create(
                name=r.nazev,
                marker=marker,
                geom=GeometryCollection(LineString(
                    r.geom.boundary.coords[0]), ),
            )
            count += 1
            if not created:
                p.geom = GeometryCollection(
                    LineString(r.geom.boundary.coords[0]), )
                objs.append(p)

        message = _("%(feat)s of 'Poi' models (represented Czechia regions - "
                    " Kraje) with line geometry type") % {
                        "feat": len(objs) if objs else count
                    }

        if objs:
            CzechiaRegions.objects.bulk_update(objs, ["geom"])
            self.stdout.write(
                _("%(message)s has been %(action)s.") % {
                    "message": message,
                    "action": "updated"
                })
        else:
            self.stdout.write(
                _("%(message)s has been %(action)s.") % {
                    "message": message,
                    "action": "created"
                })
コード例 #32
0
    def handle(self, osmfile=None, **options):
        print "Opening %s ..." % osmfile
        osmobject = OSMXMLFile(osmfile)
        
        maap_multilines = {}
        points_count = areas_count = zones_count = 0                

        print "Importing Maap Nodes ..."
        for node in osmobject.nodes.values():
            if node.tags.get('type', None) != 'point':
                continue
            geom = Point(float(node.lon),float(node.lat), srid=OSM_SRID)
            
            if OSM_SRID != 'EPSG:%d' % DEFAULT_SRID:
                geom = osm_change_srid(geom, 'EPSG:%d' % DEFAULT_SRID)
                
            category = get_or_create_category(node.tags['category'])
            if node.tags.has_key('icon'):
                icon = Icon.objects.get(name = node.tags['icon'])
            else:
                icon = Icon.objects.latest('id')
            maap_point, created = MaapPoint.objects.get_or_create(
                name = node.tags['name'],
                defaults = dict(
                    geom = geom,
                    creator_id = 1,
                    editor_id = 1,
                    icon = icon,
                    description = node.tags.get('description', None)
                )
            )
            if not(created):
                maap_point.geom = geom
            maap_point.save()
            maap_point.category.add(category)
            points_count += 1
            
        print "Importing Maap Ways ..."
        for way in osmobject.ways.values():
            pgeom = map(lambda p: (float(p.lon),float(p.lat)), way.nodes)
            if way.tags.get('type', None) == 'multiline':
                geom = LineString(pgeom)
                
                if OSM_SRID != 'EPSG:%d' % DEFAULT_SRID:
                    geom = osm_change_srid(geom, 'EPSG:%d' % DEFAULT_SRID)
                
                if maap_multilines.has_key(way.tags['name']):
                    maap_multilines[way.tags['name']][0].geom.append(geom)
                else:
                    category = get_or_create_category(way.tags['category'])           
                    maap_multiline, created = MaapMultiLine.objects.get_or_create(
                        name = way.tags['name'],
                        defaults = dict(
                            geom = MultiLineString([geom]),
                            creator_id = 1,
                            editor_id = 1,
                            description = way.tags.get('description', None)
                        )                        
                    )
                    if not(created):
                        maap_multiline.geom = MultiLineString([geom])
                    maap_multilines[way.tags['name']] = (maap_multiline, category)
                    
            elif way.tags.get('type', None) in ['area','zone']:
                geom = Polygon(pgeom)
                if OSM_SRID != 'EPSG:%d' % DEFAULT_SRID:
                    geom = osm_change_srid(geom, 'EPSG:%d' % DEFAULT_SRID)

                category = get_or_create_category(way.tags['category'])           
                
                if way.tags['type'] == 'area':
                    maap_area, created = MaapArea.objects.get_or_create(
                        name = way.tags['name'],
                        defaults = dict(
                            geom = geom,
                            creator_id = 1,
                            editor_id = 1,
                            description = way.tags.get('description', None)
                        )                        
                    ) 
                    if not(created):
                        maap_area.geom = geom
                    maap_area.save()
                    maap_area.category.add(category)
                    areas_count += 1
                else:
                    maap_zone, created = MaapArea.objects.get_or_create(
                        name = way.tags['name'],                   
                        defaults = dict(
                            geom = geom,
                            creator_id = 1,
                            editor_id = 1,
                        )                        
                    )
                    if not(created):
                        maap_zone.geom = geom
                    maap_zone.save()
                    maap_zone.category.add(category)
                    zones_count += 1
        
        for maap_multiline, category in maap_multilines.values():
            maap_multiline.save()
            maap_multiline.category.add(category)
            
        print "Points imported %s" % points_count         
        print "Multilines imported %s" % len(maap_multilines)
        print "Areas imported %s" % areas_count        
        print "Zones imported %s" % zones_count
コード例 #33
0
 def __init__(self, geom, speed):
     self.line = LineString(geom['coordinates'], srid=4326)
     self.line.transform(32611)
     self.speed = speed
     self.cur_dist = 0
     self.total_dist = self.line.length
コード例 #34
0
ファイル: preprocessor.py プロジェクト: v-manip/ngeo-b
 def line(self):
     geom = LineString([(x, y) for x, y, _, _ in self.gcps])
     geom.srid = self._srid
     if self._srid != 4326:
         geom.transform(4326)
     return geom
コード例 #35
0
ファイル: subset.py プロジェクト: EOxServer/eoxserver
    def filter(self, queryset, containment="overlaps"):
        """ Filter a :class:`Django QuerySet <django.db.models.query.QuerySet>`
        of objects inheriting from :class:`EOObject
        <eoxserver.resources.coverages.models.EOObject>`.

        :param queryset: the ``QuerySet`` to filter
        :param containment: either "overlaps" or "contains"
        :returns: a ``QuerySet`` with additional filters applied
        """
        if not len(self):
            return queryset

        qs = queryset

        bbox = [None, None, None, None]
        srid = self.srid

        if srid is None:
            srid = 4326
        max_extent = crss.crs_bounds(srid)
        tolerance = crss.crs_tolerance(srid)

        # check if time intervals are configured as "open" or "closed"
        config = get_eoxserver_config()
        reader = SubsetConfigReader(config)
        if reader.time_interval_interpretation == "closed":
            gt_op = "__gte"
            lt_op = "__lte"
        else:
            gt_op = "__gt"
            lt_op = "__lt"

        for subset in self:
            if isinstance(subset, Slice):
                is_slice = True
                value = subset.value
            elif isinstance(subset, Trim):
                is_slice = False
                low = subset.low
                high = subset.high
                # we need the value in case low == high
                value = low

            if subset.is_temporal:
                if is_slice or (high == low and containment == "overlaps"):
                    qs = qs.filter(
                        begin_time__lte=value,
                        end_time__gte=value
                    )

                elif high == low:
                    qs = qs.filter(
                        begin_time__gte=value,
                        end_time__lte=value
                    )

                else:
                    # check if the temporal bounds must be strictly contained
                    if containment == "contains":
                        if high is not None:
                            qs = qs.filter(**{
                                "end_time" + lt_op: high
                            })
                        if low is not None:
                            qs = qs.filter(**{
                                "begin_time" + gt_op: low
                            })
                    # or just overlapping
                    else:
                        if high is not None:
                            qs = qs.filter(**{
                                "begin_time" + lt_op: high
                            })
                        if low is not None:
                            qs = qs.filter(**{
                                "end_time" + gt_op: low
                            })

            else:
                if is_slice:
                    if subset.is_x:
                        line = LineString(
                            (value, max_extent[1]),
                            (value, max_extent[3])
                        )
                    else:
                        line = LineString(
                            (max_extent[0], value),
                            (max_extent[2], value)
                        )
                    line.srid = srid
                    if srid != 4326:
                        line.transform(4326)
                    qs = qs.filter(footprint__intersects=line)

                else:
                    if subset.is_x:
                        bbox[0] = subset.low
                        bbox[2] = subset.high
                    else:
                        bbox[1] = subset.low
                        bbox[3] = subset.high

        if bbox != [None, None, None, None]:
            bbox = map(
                lambda v: v[0] if v[0] is not None else v[1],
                zip(bbox, max_extent)
            )

            bbox[0] -= tolerance
            bbox[1] -= tolerance
            bbox[2] += tolerance
            bbox[3] += tolerance

            logger.debug(
                "Applying BBox %s with containment '%s'." % (bbox, containment)
            )

            poly = Polygon.from_bbox(bbox)
            poly.srid = srid

            if srid != 4326:
                poly.transform(4326)
            if containment == "overlaps":
                qs = qs.filter(footprint__intersects=poly)
            elif containment == "contains":
                qs = qs.filter(footprint__within=poly)

        return qs
コード例 #36
0
    def test02_proxy(self):
        "Testing Lazy-Geometry support (using the GeometryProxy)."
        ## Testing on a Point
        pnt = Point(0, 0)
        nullcity = City(name='NullCity', point=pnt)
        nullcity.save()

        # Making sure TypeError is thrown when trying to set with an
        #  incompatible type.
        for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
            try:
                nullcity.point = bad
            except TypeError:
                pass
            else:
                self.fail('Should throw a TypeError')

        # Now setting with a compatible GEOS Geometry, saving, and ensuring
        #  the save took, notice no SRID is explicitly set.
        new = Point(5, 23)
        nullcity.point = new

        # Ensuring that the SRID is automatically set to that of the
        #  field after assignment, but before saving.
        self.assertEqual(4326, nullcity.point.srid)
        nullcity.save()

        # Ensuring the point was saved correctly after saving
        self.assertEqual(new, City.objects.get(name='NullCity').point)

        # Setting the X and Y of the Point
        nullcity.point.x = 23
        nullcity.point.y = 5
        # Checking assignments pre & post-save.
        self.assertNotEqual(Point(23, 5),
                            City.objects.get(name='NullCity').point)
        nullcity.save()
        self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
        nullcity.delete()

        ## Testing on a Polygon
        shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
        inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))

        # Creating a State object using a built Polygon
        ply = Polygon(shell, inner)
        nullstate = State(name='NullState', poly=ply)
        self.assertEqual(4326, nullstate.poly.srid)  # SRID auto-set from None
        nullstate.save()

        ns = State.objects.get(name='NullState')
        self.assertEqual(ply, ns.poly)

        # Testing the `ogr` and `srs` lazy-geometry properties.
        if gdal.HAS_GDAL:
            self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry))
            self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
            self.assertEqual(True,
                             isinstance(ns.poly.srs, gdal.SpatialReference))
            self.assertEqual('WGS 84', ns.poly.srs.name)

        # Changing the interior ring on the poly attribute.
        new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30),
                               (30, 30))
        ns.poly[1] = new_inner
        ply[1] = new_inner
        self.assertEqual(4326, ns.poly.srid)
        ns.save()
        self.assertEqual(ply, State.objects.get(name='NullState').poly)
        ns.delete()
コード例 #37
0
def get_line(geom):
    return LineString(geom.coords)
コード例 #38
0
 def test_create_poly_file_string_when_geometry_is_invalid_raises_raises_type_error(
         self):
     invalid_geometry = MultiLineString(LineString((0, 0), (0, 1), (1, 1)),
                                        LineString((1, 1), (1, 2), (2, 2)))
     self.assertRaises(TypeError, create_poly_file_string, invalid_geometry)
コード例 #39
0
 def test_49(self):
     path = Path.objects.create(
         geom=LineString((0, 0), (0, self.step * 2 - 1)))
     self.assertEqual(len(path.geom_3d.coords), 3)
コード例 #40
0
ファイル: utility.py プロジェクト: treystaff/OPS
def crossovers(models,path_id,loc_id):
    
    # SET THE SEARCH OFFSET
    search_offset = 50
    
    # FIND CROSSOVERS (NON SELF-INSERSECTING)
    try:
        
        # GET LINE PATH
        line_pathz = models.segments.objects.filter(segment_id=path_id).values_list('line_path', flat=True)
        
        #line path w/out z values
        line_path = line_pathz.extra(select={'line_path':'ST_Force_2D(line_path)'}).values_list('line_path',flat=True)
        
        #Get all other line paths with z values.
        other_linesz = models.segments.objects.exclude(segment_id=path_id).filter(line_path__intersects=line_path[0])
        
        #Get all other line paths w/out z values
        other_lines = other_linesz.extra(select={'line_path':'ST_Force_2D(line_path)'}).values_list('line_path','segment_id')
      
        #Create a GEOS Well Known Binary reader and set up variables                            
        wkb_r = WKBReader()
        points1 = []
        line_ids = []
      
        #Find points of intersection w/ interpolated z values for given line
        # path
        for line in other_lines:
            l = wkb_r.read(line[0])
            crosses = l.intersection(line_pathz[0])
            if crosses.geom_type == 'MultiPoint':
                for pt in crosses:
                    points1.append(pt)
                    line_ids.append(line[1])
            else:
                points1.append(crosses)
                line_ids.append(line[1])
   
        #Find points of intersection w/ interpolated z values for other line
        #  paths
        points2 = other_linesz.intersection(line_path[0]).values_list('intersection',flat=True)
      
        #EXTRACT x/y coordinates of intersections and gps_time1/2
        x=[]
        y=[]
        gps_time1 = []
        gps_time2 = []
        #Check all point objects for multiple crossovers and extract x/y coords
        # and gps times
        for po in points1:
            if po.geom_type == 'MultiPoint':
                for p in po:
                    x.append(p[0])
                    y.append(p[1])
                    gps_time1.append(p[2])
            else:
                x.append(po[0])
                y.append(po[1])
                gps_time1.append(po[2])
        for po in points2:
            if po.geom_type == 'MultiPoint':
                for p in po:
                    gps_time2.append(p[2])
            else:
                gps_time2.append(po[2])
      
        #Create point objects for all crossover points
        cross_pts = [Point(point) for point in zip(x,y)]
      
    except:
        return error_check(sys)
   
    #Find all non-self-intersecting Crossover angles:
    try:
        #Get the lines associated w/ crossover points
        lines = models.segments.objects.filter(segment_id__in=list(set(line_ids))).values_list('line_path','segment_id')
        line_dict = {}
        for line in lines:
            line_dict[line[1]] = line[0]
   
        #Enumerate over line_ids in order to query for the nearest point paths to each cross point.
        angles = []
        for idx,pt_id in enumerate(line_ids):
            try:
                #Get the nearest point paths for the other line to the crossover
                # point and sort them by point_path_id
                line_pts = models.point_paths.objects.filter(segment_id=pt_id, gps_time__range=(repr(gps_time2[idx]-search_offset),repr(gps_time2[idx]+search_offset))).distance(cross_pts[idx]).order_by('distance','gps_time').values_list('point_path','point_path_id','gps_time', 'distance')
                line_pts = sorted(line_pts,key=lambda l_id: l_id[1])
                #Find the two points adjacent to the cross point. Use time_diff to ensure they are
                # the closest two points.

                time_diff = None
                for pt_idx in range(len(line_pts) - 1):
                    if float(line_pts[pt_idx][2]) <  gps_time2[idx] and float(line_pts[pt_idx+1][2]) > gps_time2[idx]:
                        if time_diff is None:
                            time_diff = float(line_pts[pt_idx+1][2])-float(line_pts[pt_idx][2])
                            change_x1 = line_pts[pt_idx][0].coords[0]-line_pts[pt_idx+1][0].coords[0]
                            change_y1 = line_pts[pt_idx][0].coords[1]-line_pts[pt_idx+1][0].coords[1]
                        elif float(line_pts[pt_idx+1][2])-float(line_pts[pt_idx][2]) < time_diff:
                            time_diff = float(line_pts[pt_idx+1][2])-float(line_pts[pt_idx][2])
                            change_x1 = line_pts[pt_idx][0].coords[0]-line_pts[pt_idx+1][0].coords[0]
                            change_y1 = line_pts[pt_idx][0].coords[1]-line_pts[pt_idx+1][0].coords[1]

                #Get the nearest point paths for the given line to the crossover
                # point and sort them by point_path_id.
                line_pts = models.point_paths.objects.filter(segment_id=path_id, gps_time__range=(repr(gps_time1[idx]-search_offset),repr(gps_time1[idx]+search_offset))).distance(cross_pts[idx]).order_by('distance','gps_time').values_list('point_path','point_path_id','gps_time','distance')
                line_pts = sorted(line_pts,key=lambda l_id: l_id[1])
                time_diff = None
                for pt_idx in range(len(line_pts) - 1):
                    if float(line_pts[pt_idx][2]) <  gps_time1[idx] and float(line_pts[pt_idx+1][2]) > gps_time1[idx]:
                        if time_diff is None:
                            time_diff = float(line_pts[pt_idx+1][2])-float(line_pts[pt_idx][2])
                            change_x2 = line_pts[pt_idx][0].coords[0]-line_pts[pt_idx+1][0].coords[0]
                            change_y2 = line_pts[pt_idx][0].coords[1]-line_pts[pt_idx+1][0].coords[1]
                        elif float(line_pts[pt_idx+1][2])-float(line_pts[pt_idx][2]) < time_diff:
                            time_diff = float(line_pts[pt_idx+1][2])-float(line_pts[pt_idx][2])
                            change_x2 = line_pts[pt_idx][0].coords[0]-line_pts[pt_idx+1][0].coords[0]
                            change_y2 = line_pts[pt_idx][0].coords[1]-line_pts[pt_idx+1][0].coords[1]  
             
            except:
                return error_check(sys)  
          
            #Check if either of the lines is parallel to the y-axis and find
            # the angle of intersection.
            if change_x1 == 0 and change_x2 == 0:
                angle = 180
            elif change_x1 == 0:
                angle = degrees(atan(fabs(1/(change_y2/change_x2))))
            elif change_x2 == 0:
                angle = degrees(atan(fabs(1/(change_y1/change_x1))))
            else:
                slope1 = change_y1/change_x1
                slope2 = change_y2/change_x2
                if slope1*slope2 == -1:
                    angle = 90
                else:
                    angle = degrees(atan(fabs((slope1 - slope2)/(1 + slope1 * slope2))))
          
            #Save all the angles for later
            angles.append(angle)
    except:
        return error_check(sys)
   
    #FIND all self-intersecting crossovers
    try:
        #Fetch the given line path from the db as a multilinestring.
        # 'ST_UnaryUnion' results in a multilinestring with the last point of
        # every linestring except the last linestring is a crossover and the
        # first point of every linestring except the first linestring is a
        # crossover.
        line = models.segments.objects.filter(segment_id=path_id).extra(select={'multi_line_path':'ST_UnaryUnion(line_path)'}
                                                ).values_list('multi_line_path', flat=True)
      
        #Create a GEOS MultiLineString from the multilinestring fetched above.
        lines = wkb_r.read(line[0])
        #Check if resulting object is a multilinestring, indicating crossovers.
        if lines.geom_type.encode('ascii', 'ignore') == 'MultiLineString':
            self_cross = True
            #Loop through all linestrings created by ST_UnaryUnion to get crossover
            # points and interpolate GPS times.
            crossover_gps = {}
            crossover_slopes = {}
            for idx in range(len(lines) - 1):  
                #Check if current linestring is composed only of crossovers.
                if len(lines[idx].coords) ==2 and idx != 0:
                    #Fall back on the next linestring not made exclusively of
                    # crossovers in order to get a good GPS time.
                    idx1 = idx-1
                    while idx1 >= 0:
                        if idx1 == 0 or len(lines[idx1].coords) > 2:
                        
                            #Check if next linestring is made only of crossovers
                            # in order to obtain the next linestring w/ good GPS
                            # time.
                            if len(lines[idx+1].coords) == 2 and idx+1 != len(lines)-1:
                                idx2 = idx+2
                                while idx2 < len(lines):
                                    if idx2 == len(lines)-1 or len(lines[idx2].coords) > 2:
                                        point1 = Point(lines[idx1].coords[-2])
                                        point2 = Point(lines[idx2].coords[1])
                                        #Break while loop
                                        break
                                    else:
                                        idx2 += 1
                            else:
                                point1 = Point(lines[idx1].coords[-2])
                                point2 = Point(lines[idx+1].coords[1])
                            #Break while loop
                            break        
                        else:
                            idx1 = idx1 - 1
                #If current linestring is not made exclusively of crossovers, check
                # if next one is.
                elif len(lines[idx+1].coords) == 2 and idx+1 != len(lines)-1:
                    idx2 = idx+2
                    while idx2 < len(lines):
                        if idx2 == len(lines)-1 or len(lines[idx2].coords) > 2:
                            point1 = Point(lines[idx].coords[-2])
                            point2 = Point(lines[idx2].coords[1])
                            #break loop
                            break
                        else:
                            idx2 += 1
                else:
                    point1 = Point(lines[idx].coords[-2])
                    point2 = Point(lines[idx+1].coords[1])
               
                #Find the change in x/y in order to determine slope
                change_x = point1[0] - point2[0]
                change_y = point1[1] - point2[1]
              
                #Check if the change in x is zero
                if change_x == 0:
                    slope = None
                else:
                    slope = change_y/change_x
              
                #Create a new line object from the two points adjacent to the
                # crossover.
                newline = LineString(point1,point2)
                #Find the crossover point/interpolate the gps time.
                crossover = Point(lines[idx].coords[-1])
                cross_pt = newline.interpolate(newline.project(crossover))
              
                #Use crossover coordinates as keys for a dictionary storing gps
                # times
                if (crossover[0],crossover[1]) not in crossover_gps.keys():
                    crossover_gps[crossover[0],crossover[1]] = [cross_pt[2]]
                else:
                    crossover_gps[crossover[0],crossover[1]].append(cross_pt[2])
              
                #Use crossover coordinates as keys for a dictionary storing slopes
                if (crossover[0],crossover[1]) not in crossover_slopes.keys():
                    crossover_slopes[crossover[0], crossover[1]] = [slope]
                else:
                    crossover_slopes[crossover[0], crossover[1]].append(slope)
              
            #Create a dictionary holding both gps times and slopes.
            crossovers = crossover_gps
            for key in crossover_slopes:
                crossovers[key].append(crossover_slopes[key][0])
                crossovers[key].append(crossover_slopes[key][1])
            del crossover_gps, crossover_slopes
          
            #Extract self-intersecting crossovers information from above.
            self_cross_pts = []
            self_gps1 = []
            self_gps2 = []
            self_angles = []
            for x,y in crossovers:
                self_cross_pts.append(Point(x,y))
                self_gps1.append(crossovers[x,y][0])
                self_gps2.append(crossovers[x,y][1])
                #Determine angle of intersection
                slope1 = crossovers[x,y][2]
                slope2 = crossovers[x,y][3]
                if slope1 == None and slope2 == None:
                    angle = 180
                elif slope1 == None:
                    angle = degrees(atan(fabs(1/slope2)))
                elif slope2 == None:
                    angle = degrees(atan(fabs(1/slope1)))
                else:
                    if slope1*slope2 == -1:
                        angle = 90
                    else:
                        angle = degrees(atan(fabs((slope1 - slope2)/
                                                      (1 + slope1 * slope2))))
                self_angles.append(angle)
        else:
            self_cross = False
    except:
        return error_check(sys)
   
    
    #Format and return crossovers for writing to CSV file:
    try:
        rows = []
        #Non-self-intersecting crossovers
        for idx, pt in enumerate(cross_pts):
            rows.append([path_id,line_ids[idx],repr(gps_time1[idx]), repr(gps_time2[idx]), repr(angles[idx]), repr(pt[0]), repr(pt[1]), loc_id])
        
        if self_cross:
            #Self-intersecting crossovers
            for idx, pt in enumerate(self_cross_pts):
                rows.append([path_id,path_id,repr(self_gps1[idx]),repr(self_gps2[idx]),repr(self_angles[idx]),repr(pt[0]),repr(pt[1]),loc_id])
       
        return rows

    except:
        return error_check(sys)
コード例 #41
0
    def test_shape_mixed(self):
        """
        Test that a project made of intervention of different geom create multiple files.
        Check that those files are each of a different type (Point/LineString) and that
        the project and the intervention are correctly referenced in it.
        """

        # Create topology line
        topo_line = TopologyFactory.create(no_path=True)
        line = PathFactory.create(geom=LineString(Point(10, 10), Point(11, 10)))
        PathAggregationFactory.create(topo_object=topo_line, path=line)

        # Create a topology point
        lng, lat = tuple(Point(1, 1, srid=settings.SRID).transform(settings.API_SRID, clone=True))

        closest_path = PathFactory(geom=LineString(Point(0, 0), Point(1, 0), srid=settings.SRID))
        topo_point = TopologyHelper._topologypoint(lng, lat, None).reload()

        self.assertEquals(topo_point.paths.get(), closest_path)

        # Create one intervention by geometry (point/linestring)
        it_point = InterventionFactory.create(topology=topo_point)
        it_line = InterventionFactory.create(topology=topo_line)
        # reload
        it_point = type(it_point).objects.get(pk=it_point.pk)
        it_line = type(it_line).objects.get(pk=it_line.pk)

        proj = ProjectFactory.create()
        proj.interventions.add(it_point)
        proj.interventions.add(it_line)

        # instanciate the class based view 'abnormally' to use create_shape directly
        # to avoid making http request, authent and reading from a zip
        pfl = ZipShapeSerializer()
        devnull = open(os.devnull, "wb")
        pfl.serialize(Project.objects.all(), stream=devnull, delete=False,
                      fields=ProjectFormatList.columns)
        self.assertEquals(len(pfl.layers), 2)

        ds_point = gdal.DataSource(pfl.layers.values()[0])
        layer_point = ds_point[0]
        ds_line = gdal.DataSource(pfl.layers.values()[1])
        layer_line = ds_line[0]

        self.assertEquals(layer_point.geom_type.name, 'MultiPoint')
        self.assertEquals(layer_line.geom_type.name, 'LineString')

        for layer in [layer_point, layer_line]:
            self.assertEquals(layer.srs.name, 'RGF93_Lambert_93')
            self.assertItemsEqual(layer.fields, [
                u'id', u'name', u'period', u'type', u'domain', u'constraint',
                u'global_cos', u'interventi', u'interven_1', u'comments',
                u'contractor', u'project_ow', u'project_ma', u'founders',
                u'related_st', u'insertion_', u'update_dat',
                u'cities', u'districts', u'restricted'
            ])

        self.assertEquals(len(layer_point), 1)
        self.assertEquals(len(layer_line), 1)

        for feature in layer_point:
            self.assertEquals(str(feature['id']), str(proj.pk))
            self.assertEquals(len(feature.geom.geos), 1)
            self.assertAlmostEqual(feature.geom.geos[0].x, it_point.geom.x)
            self.assertAlmostEqual(feature.geom.geos[0].y, it_point.geom.y)

        for feature in layer_line:
            self.assertEquals(str(feature['id']), str(proj.pk))
            self.assertTrue(feature.geom.geos.equals(it_line.geom))

        # Clean-up temporary shapefiles
        for layer_file in pfl.layers.values():
            for subfile in shapefile_files(layer_file):
                os.remove(subfile)
コード例 #42
0
ファイル: subset.py プロジェクト: EOxServer/eoxserver
    def matches(self, eo_object, containment="overlaps"):
        """ Check if the given :class:`EOObject
        <eoxserver.resources.coverages.models.EOObject>` matches the given
        subsets.

        :param eo_object: the ``EOObject`` to match
        :param containment: either "overlaps" or "contains"
        :returns: a boolean value indicating if the object is contained in the
                  given subsets
        """
        if not len(self):
            return True

        bbox = [None, None, None, None]
        srid = self.srid
        if srid is None:
            srid = 4326
        max_extent = crss.crs_bounds(srid)
        tolerance = crss.crs_tolerance(srid)

        # check if time intervals are configured as "open" or "closed"
        config = get_eoxserver_config()
        reader = SubsetConfigReader(config)
        # note that the operator is inverted from filter() above as the
        # filters use an inclusive search whereas here it's exclusive
        if reader.time_interval_interpretation == "closed":
            gt_op = operator.gt
            lt_op = operator.lt
        else:
            gt_op = operator.ge
            lt_op = operator.le

        footprint = eo_object.footprint
        begin_time = eo_object.begin_time
        end_time = eo_object.end_time

        for subset in self:
            if isinstance(subset, Slice):
                is_slice = True
                value = subset.value
            elif isinstance(subset, Trim):
                is_slice = False
                low = subset.low
                high = subset.high
                # we need the value in case low == high
                value = low

            if subset.is_temporal:
                if is_slice or (low == high and containment == "overlaps"):
                    if begin_time > value or end_time < value:
                        return False
                elif low == high:
                    if begin_time < value or end_time > value:
                        return False
                else:
                    # check if the temporal bounds must be strictly contained
                    if containment == "contains":
                        if high is not None:
                            if gt_op(end_time, high):
                                return False
                        if low is not None:
                            if lt_op(begin_time, low):
                                return False
                    # or just overlapping
                    else:
                        if high is not None:
                            if gt_op(begin_time, high):
                                return False
                        if low is not None:
                            if lt_op(end_time, low):
                                return False

            else:
                if is_slice:
                    if subset.is_x:
                        line = LineString(
                            (value, max_extent[1]),
                            (value, max_extent[3])
                        )
                    else:
                        line = LineString(
                            (max_extent[0], value),
                            (max_extent[2], value)
                        )
                    line.srid = srid
                    if srid != 4326:
                        line.transform(4326)

                    if not line.intersects(footprint):
                        return False

                else:
                    if subset.is_x:
                        bbox[0] = subset.low
                        bbox[2] = subset.high
                    else:
                        bbox[1] = subset.low
                        bbox[3] = subset.high

        if bbox != [None, None, None, None]:
            bbox = map(
                lambda v: v[0] if v[0] is not None else v[1],
                zip(bbox, max_extent)
            )

            bbox[0] -= tolerance
            bbox[1] -= tolerance
            bbox[2] += tolerance
            bbox[3] += tolerance

            logger.debug(
                "Applying BBox %s with containment '%s'." % (bbox, containment)
            )

            poly = Polygon.from_bbox(bbox)
            poly.srid = srid

            if srid != 4326:
                poly.transform(4326)
            if containment == "overlaps":
                if not footprint.intersects(poly):
                    return False
            elif containment == "contains":
                if not footprint.within(poly):
                    return False
        return True
コード例 #43
0
 def test_multilinestring_with_hole(self):
     geom = MultiLineString(LineString((0, 0), (0, 1)),
                            LineString((100, 100), (100, 101)))
     self.assertEqual(self.parser.filter_geom('geom', geom),
                      LineString((0, 0), (0, 1), (100, 100), (100, 101)))
     self.assertTrue(self.parser.warnings)
コード例 #44
0
def find_shared_segments(R, S):
  """Return a list of LineStrings where two routes overlap

  Keyword arguments:
  route_a -- LineString route
  route_b -- LineString route
  This method should be commutative.
  
  Algorithmic idea:
  For a given set of routes that are not identical:
    - Go through each route (R and S), and make a list, P(*) for each that contains points not on the other
      route.
    - Starting with P(R), iterate through elements p_i = P(R)_i:
      - Find p_i in original polyline, R.
      - Find the vertex of the item before p_i in R, v_s. v_s should be on the other route S.
      - Starting from this element in R, move forward through route until you find the first vertex
        that is on S, call this v_f. Remove elements not on S from P(R), including p_i
      - Starting from v_s, find closest vertex on S, call it g. This could be before or after where v_s
        intersects the route.
      - Move along S in both directions from g, taking ordered pairs where order is index in S, and identify
        the first pair where the first element is on R, but the second element is not.
        - If the first element of this tuple is after g, then that element is end of a shared leg.
        - Otherwise, v_s is the end of that shared leg.
      - Starting from v_f, find closest vertex on S, call it g, this could be before or after where v_f
        intersects the route.
      - Move along S in both directions from g, taking ordered pairs where order is index in S, and identify
        the first pair where the first element is not on R, but the second element is.
        - If the second element of this tuple is before g, then that element is the start of a shared leg.
        - Otherwise, v_f is the start of that shared leg.
    - At this point, we have a list of starting points and ending points of shared legs, combining them
      in to a polyline from the two routes seems tractable. TODO: figure this out when brain is less fried.
  """
  R_points_not_on_S = []
  S_points_not_on_R = []
  S_trans = S.clone()
  S_trans.set_srid(4326)
  S_trans.transform(900913)
  for pt in R:
      place = Point(pt)
      place.set_srid(4326)
      place.transform(900913)
      if place.distance(S_trans) > FUZZY_DIST:
          R_points_not_on_S.append(pt)
  #TODO: refactor these into single function call
  S_trans = S.clone()
  S_trans.set_srid(4326)
  S_trans.transform(900913)
  R_trans = R.clone()
  R_trans.set_srid(4326)
  R_trans.transform(900913)
  for pt in S:
      place = Point(pt, srid=4326)
      place.transform(900913)
      if place.distance(R_trans) > FUZZY_DIST:
          S_points_not_on_R.append(pt)
  # we know they start at the same point
  shared_leg_list = []
  shared_leg = []
  shared_leg_start_index = 0
  n = len(R_points_not_on_S)
  while n > 0:
      p_i = R_points_not_on_S[0]
      j = R.index(p_i)
      v_s = R[j - 1]
      f = j
      v_f = p_i
      while v_f in R_points_not_on_S:
          idx = R_points_not_on_S.index(v_f)
          del R_points_not_on_S[idx]
          n = n - 1
          f = f + 1
          v_f = R[f]
      # We know v_f is fuzzy-on S,  so we can iterate through pairs of S and find the segment it is fuzzy-on
      before_index = 0
      for i, start_vertex in enumerate(S):
          if i == len(S):
              break
          end_vertex = S[i + 1]
          line = LineString([start_vertex, end_vertex])
          line.set_srid(4326)
          line.transform(900913)
          pt = Point(v_s)
          pt.set_srid(4326)
          pt.transform(900913)
          if pt.distance(line) < FUZZY_DIST:
              before_index = i
              break
      # At this point, we know shared_leg_start_index..before_index is certainly on the path.
      shared_leg = S[shared_leg_start_index:(before_index+1)]
      shared_leg.append(v_s)
      after_index = before_index + 1
      # Either v_s is the end of the previous shared segment, or after_index or something following that,
      # so go until you find a point on S not on R, starting at after_index
      pt = Point(S[after_index], srid=4326)
      pt.transform(900913)
      while pt.distance(R_trans) < FUZZY_DIST:
          shared_leg.append(S[after_index])
          after_index = after_index + 1
      # should check that shared_leg is not just first element. In fact, TODO: go back an check what happens
      # if the first element is the only shared element.
      shared_leg_list.append(LineString(shared_leg))
      return shared_leg_list
      shared_leg = []
コード例 #45
0
ファイル: subset.py プロジェクト: ESA-VirES/eoxserver
    def filter(self, queryset, containment="overlaps"):
        if not len(self):
            return queryset

        qs = queryset

        bbox = [None, None, None, None]
        srid = self.srid

        if srid is None:
            srid = 4326
        max_extent = crss.crs_bounds(srid)
        tolerance = crss.crs_tolerance(srid)

        for subset in self:
            if isinstance(subset, Slice):
                is_slice = True
                value = subset.value
            elif isinstance(subset, Trim):
                is_slice = False
                low = subset.low
                high = subset.high

            if subset.is_temporal:
                if is_slice:
                    qs = qs.filter(
                        begin_time__lte=value,
                        end_time__gte=value
                    )

                else:
                    if high is not None:
                        qs = qs.filter(
                            begin_time__lte=high
                        )
                    if low is not None:
                        qs = qs.filter(
                            end_time__gte=low
                        )
                
                    # check if the temporal bounds must be strictly contained
                    if containment == "contains":
                        if high is not None:
                            qs = qs.filter(
                                end_time__lte=high
                            )
                        if low is not None:
                            qs = qs.filter(
                                begin_time__gte=low
                            )

            else:
                if is_slice:
                    if subset.is_x:
                        line = LineString(
                            (value, max_extent[1]),
                            (value, max_extent[3])
                        )
                    else:
                        line = LineString(
                            (max_extent[0], value),
                            (max_extent[2], value)
                        )
                    line.srid = srid
                    if srid != 4326:
                        line.transform(4326)
                    qs = qs.filter(footprint__intersects=line)

                else:
                    if subset.is_x:
                        bbox[0] = subset.low
                        bbox[2] = subset.high
                    else:
                        bbox[1] = subset.low
                        bbox[3] = subset.high


        if bbox != [None, None, None, None]:
            bbox = map(
                lambda v: v[0] if v[0] is not None else v[1], 
                zip(bbox, max_extent)
            )

            bbox[0] -= tolerance; bbox[1] -= tolerance
            bbox[2] += tolerance; bbox[3] += tolerance

            logger.debug(
                "Applying BBox %s with containment '%s'." % (bbox, containment)
            )

            poly = Polygon.from_bbox(bbox)
            poly.srid = srid

            if srid != 4326:
                poly.transform(4326)
            if containment == "overlaps":
                qs = qs.filter(footprint__intersects=poly)
            elif containment == "contains":
                qs = qs.filter(footprint__within=poly)

        return qs
コード例 #46
0
def save_into_database(rib_path, rmb_path, putdict, sewerdict, rmberrors):
    # Get sewerage name, try to create sewerage
    # If it exists, return with an error
    sewerage_name = os.path.basename(rmb_path)[:-4]  # Minus ".RMB"

    if models.Sewerage.objects.filter(name=sewerage_name).exists():
        rmberrors.append(Error(
                line_number=0,
                message=("Er bestaat al een stelsel met de naam {name}. "
                 "Verwijder het op de archiefpagina, of gebruik "
                 "een andere naam.").format(name=sewerage_name)))
        return

    # Files are copied only at the end
    sewerage = models.Sewerage.objects.create(
        name=sewerage_name,
        rib=None,  # Filled in later
        rmb=None,
        active=True)

    # Save the puts, keep a dictionary
    saved_puts = dict()
    for put_id, putinfo in putdict.items():
        saved_puts[put_id] = models.Manhole.objects.create(
            sewerage=sewerage,
            code=put_id,
            sink=int(putinfo['is_sink']),
            ground_level=putinfo['surface_level'],
            the_geom=Point(*putinfo['coordinate']))

    # Save the sewers, use the dictionary
    saved_sewers = dict()
    for sewer_id, sewerinfo in sewerdict.items():
        manhole1 = saved_puts[sewerinfo['manhole_code_1']]
        manhole2 = saved_puts[sewerinfo['manhole_code_2']]
        sewer_line_rd = LineString(manhole1.the_geom, manhole2.the_geom)
        sewer_line_rd.set_srid(4326)
        sewer_line_rd.transform(RD)

        saved_sewers[sewer_id] = models.Sewer.objects.create(
            sewerage=sewerage,
            code=sewer_id,
            quality=models.Sewer.QUALITY_UNKNOWN,
            diameter=sewerinfo['diameter'],
            manhole1=manhole1,
            manhole2=manhole2,
            bob1=sewerinfo['bob_1'],
            bob2=sewerinfo['bob_2'],
            the_geom=LineString(manhole1.the_geom, manhole2.the_geom),
            the_geom_length=sewer_line_rd.length)

    # Save the measurements
    sewer_measurements_dict = dict()
    for sewer_id, sewerinfo in sewerdict.items():
        measurements = sewerinfo['measurements']
        sewer = saved_sewers[sewer_id]

        if measurements:
            sewer_measurements = [
                # Create the SewerMeasurement objects, but don't save
                # them yet!
                models.SewerMeasurement(
                    sewer=sewer,
                    dist=m['dist'],
                    virtual=False,
                    water_level=None,
                    flooded_pct=None,
                    bob=m['bob'],
                    obb=m['bob'] + sewerinfo['diameter'],
                    the_geom=Point(*m['coordinate']))
                for m in measurements]

            # Quality
            sewer.judge_quality(sewer_measurements)
            sewer.save()

            # BOB correction ("sawtooth" phenomenon)
            correct_bob_values(sewer, sewer_measurements)

            # Create two virtual sewer measurements for the start and
            # end of the sewer
            virtual_start = models.SewerMeasurement(
                sewer=sewer, dist=0, virtual=True, water_level=None,
                flooded_pct=None, bob=sewer.bob1,
                obb=sewer.bob1 + sewerinfo['diameter'],
                the_geom=sewer.manhole1.the_geom)
            virtual_end = models.SewerMeasurement(
                sewer=sewer, dist=sewer.the_geom_length,
                virtual=True, water_level=None,
                flooded_pct=None, bob=sewer.bob2,
                obb=sewer.bob2 + sewerinfo['diameter'],
                the_geom=sewer.manhole2.the_geom)

            # Note: we MUST add those two virtual points only after
            # doing the sawtooth correction, otherwise the sawtooth
            # correction will think that everything is fine already
            # since the first and end points would be equal to the
            # bobs of the sewer...
            sewer_measurements = (
                [virtual_start] + sewer_measurements + [virtual_end])
            sewer_measurements_dict[sewer_id] = sewer_measurements
        else:
            # Create "virtual measurements"
            sewer_measurements_dict[sewer_id] = list(
                virtual_measurements(sewer))
            sewer.quality = models.Sewer.QUALITY_UNKNOWN
            sewer.save()

    # Actually compute the lost capacity, the point of this app
    lost_capacity.compute_lost_capacity(
        saved_puts, saved_sewers, sewer_measurements_dict)

    # Save all the SewerMeasurement objects to the database. Since
    # there are thousands of them, it is essential to use bulk_create.
    models.SewerMeasurement.objects.bulk_create(list(chain(
                *sewer_measurements_dict.values())))

    # Success -- copy files
    sewerage.move_files(rib_path, rmb_path)

    # The clap on the fireworks
    sewerage.generate_rib()
コード例 #47
0
class Path(AddPropertyMixin, MapEntityMixin, AltimetryMixin,
           TimeStampedModelMixin, StructureRelated):
    geom = models.LineStringField(srid=settings.SRID, spatial_index=False)
    geom_cadastre = models.LineStringField(null=True,
                                           srid=settings.SRID,
                                           spatial_index=False,
                                           editable=False)
    valid = models.BooleanField(db_column='valide',
                                default=True,
                                verbose_name=_(u"Validity"),
                                help_text=_(u"Approved by manager"))
    visible = models.BooleanField(db_column='visible',
                                  default=True,
                                  verbose_name=_(u"Visible"),
                                  help_text=_(u"Shown in lists and maps"))
    name = models.CharField(null=True,
                            blank=True,
                            max_length=20,
                            db_column='nom',
                            verbose_name=_(u"Name"),
                            help_text=_(u"Official name"))
    comments = models.TextField(null=True,
                                blank=True,
                                db_column='remarques',
                                verbose_name=_(u"Comments"),
                                help_text=_(u"Remarks"))

    departure = models.CharField(null=True,
                                 blank=True,
                                 default="",
                                 max_length=250,
                                 db_column='depart',
                                 verbose_name=_(u"Departure"),
                                 help_text=_(u"Departure place"))
    arrival = models.CharField(null=True,
                               blank=True,
                               default="",
                               max_length=250,
                               db_column='arrivee',
                               verbose_name=_(u"Arrival"),
                               help_text=_(u"Arrival place"))

    comfort = models.ForeignKey('Comfort',
                                null=True,
                                blank=True,
                                related_name='paths',
                                verbose_name=_("Comfort"),
                                db_column='confort')
    source = models.ForeignKey('PathSource',
                               null=True,
                               blank=True,
                               related_name='paths',
                               verbose_name=_("Source"),
                               db_column='source')
    stake = models.ForeignKey('Stake',
                              null=True,
                              blank=True,
                              related_name='paths',
                              verbose_name=_("Maintenance stake"),
                              db_column='enjeu')
    usages = models.ManyToManyField('Usage',
                                    blank=True,
                                    related_name="paths",
                                    verbose_name=_(u"Usages"),
                                    db_table="l_r_troncon_usage")
    networks = models.ManyToManyField('Network',
                                      blank=True,
                                      related_name="paths",
                                      verbose_name=_(u"Networks"),
                                      db_table="l_r_troncon_reseau")
    eid = models.CharField(verbose_name=_(u"External id"),
                           max_length=128,
                           blank=True,
                           null=True,
                           db_column='id_externe')

    objects = PathManager()
    include_invisible = PathInvisibleManager()

    is_reversed = False

    @property
    def length_2d(self):
        if self.geom:
            return round(self.geom.length, 1)
        else:
            return None

    @classproperty
    def length_2d_verbose_name(cls):
        return _(u"2D Length")

    @property
    def length_2d_display(self):
        return self.length_2d

    def __unicode__(self):
        return self.name or _('path %d') % self.pk

    class Meta:
        db_table = 'l_t_troncon'
        verbose_name = _(u"Path")
        verbose_name_plural = _(u"Paths")

    @classmethod
    def closest(cls, point):
        """
        Returns the closest path of the point.
        Will fail if no path in database.
        """
        # TODO: move to custom manager
        if point.srid != settings.SRID:
            point = point.transform(settings.SRID, clone=True)
        return cls.objects.all().exclude(
            visible=False).distance(point).order_by('distance')[0]

    def is_overlap(self):
        return not PathHelper.disjoint(self.geom, self.pk)

    def reverse(self):
        """
        Reverse the geometry.
        We keep track of this, since we will have to work on topologies at save()
        """
        reversed_coord = self.geom.coords[-1::-1]
        self.geom = LineString(reversed_coord)
        self.is_reversed = True
        return self

    def interpolate(self, point):
        """
        Returns position ([0.0-1.0]) and offset (distance) of the point
        along this path.
        """
        return PathHelper.interpolate(self, point)

    def snap(self, point):
        """
        Returns the point snapped (i.e closest) to the path line geometry.
        """
        return PathHelper.snap(self, point)

    def reload(self, fromdb=None):
        # Update object's computed values (reload from database)
        if self.pk and self.visible:
            fromdb = self.__class__.objects.get(pk=self.pk)
            self.geom = fromdb.geom
            AltimetryMixin.reload(self, fromdb)
            TimeStampedModelMixin.reload(self, fromdb)
        return self

    @debug_pg_notices
    def save(self, *args, **kwargs):
        # If the path was reversed, we have to invert related topologies
        if self.is_reversed:
            for aggr in self.aggregations.all():
                aggr.start_position = 1 - aggr.start_position
                aggr.end_position = 1 - aggr.end_position
                aggr.save()
            self._is_reversed = False
        super(Path, self).save(*args, **kwargs)
        self.reload()

    @property
    def name_display(self):
        return u'<a data-pk="%s" href="%s" title="%s" >%s</a>' % (
            self.pk, self.get_detail_url(), self, self)

    @property
    def name_csv_display(self):
        return unicode(self)

    @classproperty
    def trails_verbose_name(cls):
        return _("Trails")

    @property
    def trails_display(self):
        trails = getattr(self, '_trails', self.trails)
        if trails:
            return ", ".join([t.name_display for t in trails])
        return _("None")

    @property
    def trails_csv_display(self):
        trails = getattr(self, '_trails', self.trails)
        if trails:
            return ", ".join([unicode(t) for t in trails])
        return _("None")

    @property
    def usages_display(self):
        return u", ".join([unicode(u) for u in self.usages.all()])

    @property
    def networks_display(self):
        return u", ".join([unicode(n) for n in self.networks.all()])

    @classmethod
    def get_create_label(cls):
        return _(u"Add a new path")

    @property
    def checkbox(self):
        return u'<input type="checkbox" name="{}[]" value="{}" />'.format(
            'path', self.pk)

    @classproperty
    def checkbox_verbose_name(cls):
        return _("Action")

    @property
    def checkbox_display(self):
        return self.checkbox

    def merge_path(self, path_to_merge):
        """
        Path unification
        :param path_to path_to_merge: Path instance to merge
        :return: Boolean
        """
        if (self.pk and path_to_merge) and (self.pk != path_to_merge.pk):
            conn = connections[DEFAULT_DB_ALIAS]
            cursor = conn.cursor()
            sql = "SELECT ft_merge_path({}, {});".format(
                self.pk, path_to_merge.pk)
            cursor.execute(sql)

            result = cursor.fetchall()[0][0]

            if result:
                # reload object after unification
                self.reload()

            return result

    @property
    def extent(self):
        return self.geom.transform(settings.API_SRID,
                                   clone=True).extent if self.geom else None
コード例 #48
0
ファイル: views.py プロジェクト: trailbehind/StaticMapService
def render_static(request, height=None, width=None, format='png',
                  background='satellite', bounds=None, center=None, render_srid=3857):

# width and height
    width = int(width)
    height = int(height)
    if width > settings.MAX_IMAGE_DIMENSION or \
        height > settings.MAX_IMAGE_DIMENSION or \
        width <= 1 or height <= 1:
        logging.debug("Invalid size")
        return HttpResponseBadRequest("Invalid image size, both dimensions must be in range %i-%i" % (1, settings.MAX_IMAGE_DIMENSION))

# image format
    if format not in IMAGE_FORMATS:
        logging.error("unknown image format %s" % format)
        return HttpResponseBadRequest("Unknown image format, available formats: " + ", ".join(IMAGE_FORMATS))

    if format.startswith('png'):
        mimetype = 'image/png'
    elif format.startswith('jpeg'):
        mimetype = 'image/jpeg'

# bounds
    bounds_box = None
    if bounds:
        bounds_components = bounds.split(',')
        if len(bounds_components) != 4:
            return HttpResponseBadRequest("Invalid bounds, must be 4 , separated numbers")
        bounds_components = [float(f) for f in bounds_components]

        if not (-180 < bounds_components[0] < 180) or not (-180 < bounds_components[2] < 180):
            logging.error("x out of range %f or %f" % (bounds_components[0], bounds_components[2]))
            return HttpResponseBadRequest("x out of range %f or %f" % (bounds_components[0], bounds_components[2]))
        if not (-90 < bounds_components[1] < 90) or not (-90 < bounds_components[3] < 90):
            logging.error("y out of range %f or %f" % (bounds_components[1], bounds_components[3]))
            return HttpResponseBadRequest("y out of range %f or %f" % (bounds_components[1], bounds_components[3]))

        ll = Point(bounds_components[0], bounds_components[1], srid=4326)
        ll.transform(render_srid)

        ur = Point(bounds_components[2], bounds_components[3], srid=4326)
        ur.transform(render_srid)
        bounds_box = mapnik.Box2d(ll.x, ll.y, ur.x, ur.y)
    elif center:
        center_components = center.split(',')
        if len(center_components) != 3:
            return HttpResponseBadRequest()
        lon = float(center_components[0])
        lat = float(center_components[1])
        zoom = int(center_components[2])
        # todo calc bounds from center and zoom

# baselayer
    if background not in settings.BASE_LAYERS and background != 'none':
        return HttpResponseNotFound("Background not found")

# GeoJSON post data
    if request.method == "POST" and len(request.body):
        input_data = json.loads(request.body)
    else:
        input_data = None

    if not bounds and not center and not input_data:
        return HttpResponseBadRequest("Bounds, center, or post data is required.")

# initialize map
    m = mapnik.Map(width, height)
    m.srs = '+init=epsg:' + str(render_srid)

# add a tile source as a background
    if background != "none":
        background_file = settings.BASE_LAYERS[background]
        background_style = mapnik.Style()
        background_rule = mapnik.Rule()
        background_rule.symbols.append(mapnik.RasterSymbolizer())
        background_style.rules.append(background_rule)
        m.append_style('background style', background_style)
        tile_layer = mapnik.Layer('background')
        tile_layer.srs = '+init=epsg:' + str(render_srid)
        tile_layer.datasource = mapnik.Gdal(base=settings.BASE_LAYER_DIR, file=background_file)
        tile_layer.styles.append('background style')
        m.layers.append(tile_layer)

# add features from geojson
    if input_data and input_data['type'] == "Feature":
        features = [input_data]
    elif input_data and input_data['type'] == "FeatureCollection":
        if 'features' not in input_data:
            return HttpResponseBadRequest()
        features = input_data['features']
    else:
        features = []

    logging.debug("Adding %d features to map" % len(features))

    geometries = []
    point_features = []
    fid = 0
    for feature in features:
        if 'geometry' not in feature:
            logging.debug("feature does not have geometry")
            return HttpResponseBadRequest("Feature does not have a geometry")
        if 'type' not in feature['geometry']:
            logging.debug("geometry does not have type")
            return HttpResponseBadRequest("Geometry does not have a type")

        fid += 1
        style_name = str(fid)

        if feature['geometry']['type'] == 'Point':
            point_features.append(feature)
        elif feature['geometry']['type'] in ('LineString', 'MultiLineString'):
            if feature['geometry']['type'] == 'LineString':
                geos_feature = LineString(feature['geometry']['coordinates'])
            elif feature['geometry']['type'] == 'MultiLineString':
                rings = feature['geometry']['coordinates']
                rings = [[(c[0], c[1]) for c in r] for r in rings]
                if len(rings) == 1:
                    geos_feature = LineString(rings[0])
                else:
                    linestrings = []
                    for ring in rings:
                        try:
                            linestrings.append(LineString(ring))
                        except Exception, e:
                            logging.error("Error adding ring: %s", e)

                    geos_feature = MultiLineString(linestrings)

            geos_feature.srid = 4326
            geos_feature.transform(render_srid)
            geometries.append(geos_feature)

            style = mapnik.Style()
            line_rule = mapnik.Rule()
            style_dict = None
            if 'style' in feature:
                style_dict = feature['style']
            elif 'properties' in feature:
                style_dict = feature['properties']
            line_rule.symbols.append(line_symbolizer(style_dict))
            style.rules.append(line_rule)
            m.append_style(style_name, style)

            wkt = geos_feature.wkt
            line_layer = mapnik.Layer(style_name + ' layer')
            line_layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"')
            line_layer.styles.append(style_name)
            line_layer.srs = '+init=epsg:' + str(render_srid)
            m.layers.append(line_layer)
        elif feature['geometry']['type'] == 'Polygon' or feature['geometry']['type'] == 'MultiPolygon':
            geos_feature = GEOSGeometry(json.dumps(feature['geometry']))
            geos_feature.srid = 4326
            geos_feature.transform(render_srid)
            geometries.append(geos_feature)

            style = mapnik.Style()
            rule = mapnik.Rule()
            style_dict = None
            if 'style' in feature:
                style_dict = feature['style']
            elif 'properties' in feature:
                style_dict = feature['properties']
            rule.symbols.append(polygon_symbolizer(style_dict))
            rule.symbols.append(line_symbolizer(style_dict))
            style.rules.append(rule)
            m.append_style(style_name, style)

            wkt = geos_feature.wkt
            layer = mapnik.Layer(style_name + ' layer')
            layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"')
            layer.styles.append(style_name)
            layer.srs = '+init=epsg:' + str(render_srid)
            m.layers.append(layer)
コード例 #49
0
ファイル: views.py プロジェクト: fradeve/ark-addons
    def post(self, request, *args, **kwargs):
        if request.is_ajax:
            cur_shp_id = kwargs['pk']
            cur_shp = Shapefile.objects.get(id=cur_shp_id)

            # check if the statistic has already been calculated
            if cur_shp.stat_ditch_area is True:
                pass
            else:
                cur_shp_geom = cur_shp.helperditchesnumber_set.all()

                for feature in cur_shp_geom:
                    # the list that will contains compound's area perimeter pts
                    area_points_list = []

                    # [A] get convex hull and its centroid
                    feat_convex_hull = feature.poly.convex_hull
                    feat_centroid = feat_convex_hull.centroid

                    # [B] feature's hull farthest point from centroid
                    max_point = Point(
                        feat_convex_hull.extent[2],
                        feat_convex_hull.extent[3],
                        srid=3857)

                    radius = max_point.distance(feat_centroid)

                    # get vertexes in a circle (center=centroid), every n angle
                    vertexes_list = get_round_vertex(1, radius,
                                                     feat_centroid.x,
                                                     feat_centroid.y,
                                                     3857)

                    # for each point in vertex list
                    for point in vertexes_list:

                    # create new line between point and centroid
                        line = LineString(feat_centroid, point, srid=3857)
                        # line intersects geometry: get point nearest to centroid
                        try:
                            intersection_line = line.intersection(feature.poly)
                        except GEOSException:
                            pass
                        if intersection_line.num_coords == 0:  # no intersection
                            pass
                        # intersection in 1 point
                        elif intersection_line.num_coords == 1:
                            area_points_list.append(Point(
                                intersection_line.coords[0]))
                        # intersection in 2 or more points
                        elif intersection_line.num_coords >= 2:
                            nearest_point = [None, 10000000]
                            # intersection generates a MultiLineString (> 2 pts)
                            if intersection_line.geom_type == 'MultiLineString':
                                for multiline_tuple in intersection_line.tuple:
                                    for coords_tuple in multiline_tuple:
                                        nearest_point = check_nearest_point(
                                            coords_tuple, 3857,
                                            feat_centroid,
                                            nearest_point)
                                area_points_list.append(nearest_point[0].tuple)
                            # intersection generates a LineString (2 pts)
                            else:
                                for coords_tuple in intersection_line.tuple:
                                    nearest_point = check_nearest_point(
                                        coords_tuple, 3857,
                                        feat_centroid,
                                        nearest_point)
                                area_points_list.append(nearest_point[0].tuple)

                    # close polygon, get projected area and save
                    area_points_list.append(area_points_list[0])
                    internal_area_polygon = Polygon(area_points_list, srid=3857)

                    proj_area_polygon = internal_area_polygon
                    proj_area_polygon.transform(cur_shp.proj)

                    if feature.type == 'compound':
                        # recognize open/closed compound
                        tr = settings.GEOSTAT_SETTINGS['open_compound_treshold']
                        closed_limit = 360 - ((tr * 360) / 100)
                        if area_points_list.__len__() > closed_limit:
                            structure_open = False
                        else:
                            structure_open = True
                    else:
                        structure_open = None

                    internal_area_feature = HelperCompoundsArea(
                        shapefile_id=cur_shp_id,
                        feature=feature,
                        poly=internal_area_polygon,
                        perimeter=internal_area_polygon.length,
                        storedarea=proj_area_polygon.area,
                        type=feature.type,
                        open=structure_open
                    )
                    internal_area_feature.save()

                cur_shp.stat_ditch_area = True
                cur_shp.save()

            results = context_results(cur_shp_id)
            return render_to_response(self.template_name, {'context': results})
コード例 #50
0
 def test_snapping_choose_closest_point(self):
     # Line with several points in less than PATH_SNAPPING_DISTANCE
     PathFactory.create(geom=LineString((0, 0), (9.8, 0), (9.9, 0), (10, 0)))
     path_snapped = PathFactory.create(geom=LineString((10, 0.1), (10, 10)))
     self.assertEqual(path_snapped.geom.coords, ((10, 0), (10, 10)))
コード例 #51
0
ファイル: test_functions.py プロジェクト: yephper/django
 def test_reverse_geom(self):
     coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
     Track.objects.create(name='Foo', line=LineString(coords))
     track = Track.objects.annotate(reverse_geom=functions.Reverse('line')).get(name='Foo')
     coords.reverse()
     self.assertEqual(tuple(coords), track.reverse_geom.coords)
コード例 #52
0
 def test_multilinestring(self):
     geom = MultiLineString(LineString((0, 0), (0, 1), (1, 1), (1, 0)))
     self.assertEqual(self.parser.filter_geom('geom', geom),
                      LineString((0, 0), (0, 1), (1, 1), (1, 0)))
     self.assertFalse(self.parser.warnings)
コード例 #53
0
 def setUp(self):
     self._fill_raster()
     self.geom = LineString((100, 370), (1100, 370), srid=settings.SRID)
     self.area = AltimetryHelper.elevation_area(self.geom)
コード例 #54
0
    def export(self, map_obj):
        print('Export data to DB')
        fake = Factory.create()

        # Export regions
        print('Save regions')
        self.cleanup_region(map_obj)
        # new_objects = []

        for region in map_obj.regions:
            obj = self.region_model()
            obj.name = fake.city()

            polygons = [center.shapely_object for center in region.centers]
            region_poly = cascaded_union(polygons)
            coords = [
                self.point_to_lnglat(point)
                for point in region_poly.exterior.coords
            ]
            obj.geom = MultiPolygon([Polygon(coords)])
            self.region_pre_save(obj, region, map_obj)

            obj.full_clean()
            obj.save()
            region.model = obj
            # new_objects.append(obj)

        # self.region_model.objects.bulk_create(new_objects)

        # Save region neighbors
        print('Save regions neighbors')
        checked = []
        for region in map_obj.regions:
            for neighbour in region.neighboir_regions:
                if neighbour not in checked:
                    region.model.neighbors.add(neighbour.model)

        # Export biomes
        print('Save biomes')
        self.cleanup_biome(map_obj)
        new_objects = []

        for center in map_obj.centers:
            obj = self.biome_model()
            center.model = obj
            obj.biome = center.biome
            obj.water = center.water
            obj.coast = center.coast
            obj.border = center.border
            obj.ocean = center.ocean
            obj.elevation = center.elevation
            obj.moisture = center.moisture
            obj.center = Point(*self.point_to_lnglat(center.point))
            obj.river = any(edge.river for edge in center.borders)
            if not center.water:
                obj.region = center.region.model

            coords = []
            for corner in center.corners:
                coords.append(self.point_to_lnglat(corner.point))
            # Sort coordinates. Should be sorted already, but lets check once more.
            coords.sort(key=lambda p: math.atan2(p[1] - obj.center.y, p[0] -
                                                 obj.center.x))
            coords.append(coords[0])

            obj.geom = MultiPolygon([Polygon(coords)])
            self.biome_pre_save(obj, center, map_obj)
            obj.full_clean()
            obj.save()
            new_objects.append(obj)

        # FIXME: Use bulk_create and change neighbors saving
        # self.model.objects.bulk_create(new_objects)

        # save neighbors
        print('Save biomes neighbors')
        checked = []
        for center in map_obj.centers:
            checked.append(center)
            for neighbour in center.neighbors:
                if neighbour not in checked:
                    center.model.neighbors.add(neighbour.model)

        # Export rivers
        print('Save rivers')
        self.cleanup_river(map_obj)
        new_objects = []

        for edge in map_obj.edges:
            if edge.river:
                obj = self.river_model()
                obj.width = edge.river
                p1 = self.point_to_lnglat(edge.corners[0].point)
                p2 = self.point_to_lnglat(edge.corners[1].point)
                obj.geom = MultiLineString(LineString(p1, p2))
                self.river_pre_save(obj, edge, map_obj)
                obj.full_clean()
                new_objects.append(obj)

        self.river_model.objects.bulk_create(new_objects)

        # Export cities
        print('Save cities')
        self.cleanup_city(map_obj)
        new_objects = []

        for region in map_obj.regions:
            for center in region.centers:
                obj = self.city_model()
                obj.biome = center.model
                obj.capital = (center == region.capital)
                obj.name = fake.city()
                obj.region = region.model
                obj.coords = Point(*self.point_to_lnglat(center.point))
                self.region_pre_save(obj, region, map_obj)
                obj.full_clean()
                new_objects.append(obj)

        self.city_model.objects.bulk_create(new_objects)
コード例 #55
0
    def handle(self, *args, **options):
        
        # Parse arguments.
        if len(args) < 2 or len(args) > 3:
            raise CommandError("Missing arguments. Try running with --help.")

        shapefile_base = args[0]
        agency_name = args[1]
        srid = int(args[2]) if len(args) > 2 else None

        # Verify the agency name.
        try:
            agency = Agency.objects.get(agency_name=agency_name)
        except Agency.DoesNotExist:
            self.stderr.write("No agency found for '%s'.\n" % agency_name)
            if Agency.objects.all():
                self.stderr.write("Agency name must be one of:\n")
                for agency in Agency.objects.all():
                    self.stderr.write("- %s\n" % agency.agency_name)
            return
    
        # Read in the shapefile.
        self.stdout.write("Reading shapefile from '%s'.\n" % shapefile_base)
        sf = shapefile.Reader(shapefile_base)

        # Verify that there is a NAME field in this shapefile.
        # Ignore first field ('DeletionField' added by shapefile library).
        name_index = [f[0] for f in sf.fields[1:]].index('NAME')

        if not srid:
            # Try opening a .prj file to get the shapefile projection.
            prj_file = open(shapefile_base + '.prj')
            prj_text = prj_file.read()

            # Connect to API to get SRID from projection, if not provided.
            # Inspired by http://gis.stackexchange.com/a/7784
            self.stdout.write("Querying prj2epsg to find SRID.\n")
            url = 'http://prj2epsg.org/search.json'
            query = urlencode(
                {'exact': True,
                 'error': True,
                 'mode': 'wkt',
                 'terms': prj_text})
            response = urlopen(url, query)
            data = json.loads(response.read())
            srid = int(data['codes'][0]['code'])
            self.stdout.write("Using SRID %d.\n" % srid)

        for shaperecord in sf.shapeRecords():
            shape = shaperecord.shape
            record = shaperecord.record

            # Get name of the record.
            name = record[name_index]
            self.stdout.write("Finding patterns for shape '%s'.\n" % name)

            # Verify that this shape is a PolyLine (type 3).
            if 3 != shape.shapeType:
                pass

            # Create the LineString with the appropriate SRID
            ls = LineString([list(pt) for pt in shape.points], srid=srid)

            # Transform into a common SRID (4326).
            ls.transform(4326)

            # Find the routes and patterns that are covered by this shape.
            # Consider all patterns where both the start and end stops are
            # within a certain distance to the shape.
            distance_threshold = 1609

            cursor = connection.cursor()

            # Find matching pattern ID, shape ID, and first and last stop locations.
            query = 'SELECT a.pattern_id, gp.shape_id, first_stop_location, last_stop_location FROM (SELECT pattern_id, gs.stop_id AS first_stop_id, location AS first_stop_location, stop_name AS first_stop_name FROM gtfs_patternstop gps INNER JOIN gtfs_stop gs ON gps.stop_id = gs.id WHERE gps.order = 0) a INNER JOIN (SELECT gps.pattern_id, gs.stop_id AS last_stop_id, location AS last_stop_location, stop_name AS last_stop_name FROM gtfs_patternstop gps INNER JOIN (SELECT pattern_id, MAX(gps.order) max_order FROM gtfs_patternstop gps GROUP BY pattern_id) gps2 ON gps.pattern_id = gps2.pattern_id AND gps.order = gps2.max_order INNER JOIN gtfs_stop gs ON gps.stop_id = gs.id) b ON a.pattern_id = b.pattern_id INNER JOIN gtfs_pattern gp ON a.pattern_id = gp.id INNER JOIN gtfs_route gr ON gp.route_id = gr.id WHERE gr.agency_id = %s AND ST_Distance_Sphere(first_stop_location, ST_Line_Interpolate_Point(ST_GeomFromEWKB(%s), ST_Line_Locate_Point(ST_GeomFromEWKB(%s), first_stop_location))) < %s AND ST_Distance_Sphere(last_stop_location, ST_Line_Interpolate_Point(ST_GeomFromEWKB(%s), ST_Line_Locate_Point(ST_GeomFromEWKB(%s), last_stop_location))) < %s AND (gr.route_short_name ILIKE %s OR gr.route_long_name ILIKE %s) AND gp.shape_id IS NOT NULL'
            args = [agency.id, ls.ewkb, ls.ewkb, distance_threshold, ls.ewkb, ls.ewkb, distance_threshold, name, name]

            cursor.execute(query, args)
            data = cursor.fetchall()

            for row in data:

                pattern_id = row[0]
                shape_id = row[1]
                first_stop_location = row[2]
                last_stop_location = row[3]
            
                # Cut the shape between the first and last stops.
                query = 'SELECT ST_Line_Substring(ST_GeomFromEWKB(%s), LEAST(ST_Line_Locate_Point(%s, %s), ST_Line_Locate_Point(%s, %s)), GREATEST(ST_Line_Locate_Point(%s, %s), ST_Line_Locate_Point(%s, %s)))'
                args = [ls.ewkb, ls.ewkb, first_stop_location, ls.ewkb, last_stop_location, ls.ewkb, first_stop_location, ls.ewkb, last_stop_location]
                cursor.execute(query, args)
                substring = cursor.fetchall()[0]

                # Update the database with the new partial shape.
                query = 'UPDATE gtfs_shape SET line=%s WHERE id=%s'
                args = [substring, shape_id]

                cursor.execute(query, args)
                transaction.commit_unless_managed()