Ejemplo n.º 1
0
    def update_sheds(self):
        from maps import get_sheds
        sheds = get_sheds(self.id)

        self.shed_05 = MultiPolygon(GEOSGeometry(sheds[0.5]))
        self.shed_10 = MultiPolygon(GEOSGeometry(sheds[1.0]))
        self.shed_15 = MultiPolygon(GEOSGeometry(sheds[1.5]))
        self.shed_20 = MultiPolygon(GEOSGeometry(sheds[2.0]))

        g = self.shed_20.difference(self.shed_15)
        try:
            self.shed_20 = g
        except TypeError:
            if g.area == 0:
                self.shed_20 = None

        g = self.shed_15.difference(self.shed_10)
        try:
            self.shed_15 = g
        except TypeError:
            if g.area == 0:
                self.shed_15 = None

        g = self.shed_15.difference(self.shed_10)
        try:
            self.shed_15 = g
        except TypeError:
            if g.area == 0:
                self.shed_15 = None
Ejemplo n.º 2
0
    def collapse_zip_codes(self):
        # The ESRI ZIP Code layer breaks ZIP Codes up along county
        # boundaries, so we need to collapse them first before
        # proceeding

        if len(self.zipcodes) > 0:
            return

        for feature in self.layer:
            zipcode = feature.get(self.name_field)
            geom = feature.geom.geos
            if zipcode not in self.zipcodes:
                self.zipcodes[zipcode] = geom
            else:
                # If it's a MultiPolygon geom we're adding to our
                # existing geom, we need to "unroll" it into its
                # constituent polygons 
                if isinstance(geom, MultiPolygon):
                    subgeoms = list(geom)
                else:
                    subgeoms = [geom]
                existing_geom = self.zipcodes[zipcode]
                if not isinstance(existing_geom, MultiPolygon):
                    new_geom = MultiPolygon([existing_geom])
                    new_geom.extend(subgeoms)
                    self.zipcodes[zipcode] = new_geom
                else:
                    existing_geom.extend(subgeoms)
Ejemplo n.º 3
0
 def create_country(coords, properties, name, polygon_type):
     if polygon_type == 'Polygon':
         multipolygon_latlng = MultiPolygon([Polygon(coord_set) for coord_set in coords], srid=4326)
     else:
         multipolygon_latlng = MultiPolygon([Polygon(coord_set[0]) for coord_set in coords], srid=4326)
     multipolygon_webmercator = multipolygon_latlng.transform(3857, clone=True)
     return Country(name=name, border_latlng=multipolygon_latlng, properties=properties, border_webmercator=multipolygon_webmercator)
Ejemplo n.º 4
0
    def save(self, verbose=False):
        # The ESRI ZIP Code layer breaks ZIP Codes up along county
        # boundaries, so we need to collapse them first before
        # proceeding
        zipcodes = {}
        for feature in self.layer:
            zipcode = feature.get('POSTAL')
            geom = feature.geom.geos
            if zipcode not in zipcodes:
                zipcodes[zipcode] = geom
            else:
                # If it's a MultiPolygon geom we're adding to our
                # existing geom, we need to "unroll" it into its
                # constituent polygons 
                if isinstance(geom, MultiPolygon):
                    subgeoms = list(geom)
                else:
                    subgeoms = [geom]
                existing_geom = zipcodes[zipcode]
                if not isinstance(existing_geom, MultiPolygon):
                    new_geom = MultiPolygon([existing_geom])
                    new_geom.extend(subgeoms)
                    zipcodes[zipcode] = new_geom
                else:
                    existing_geom.extend(subgeoms)

        sorted_zipcodes = sorted(zipcodes.iteritems(), key=lambda x: int(x[0]))
        now = datetime.datetime.now()
        num_created = 0
        for i, (zipcode, geom) in enumerate(sorted_zipcodes):
            if not geom.valid:
                geom = geom.buffer(0.0)
                if not geom.valid:
                    print >> sys.stderr, 'Warning: invalid geometry for %s' % zipcode
            geom.srid = 4326
            zipcode_obj, created = Location.objects.get_or_create(
                name = zipcode,
                normalized_name = zipcode,
                slug = zipcode,
                location_type = self.location_type,
                location = geom,
                centroid = geom.centroid,
                display_order = i,
                city = self.city,
                source = 'ESRI',
                area = geom.transform(3395, True).area,
                is_public = True,
                creation_date = now,
                last_mod_date = now,
            )
            if created:
                num_created += 1
            if verbose:
                print >> sys.stderr, '%s ZIP Code %s ' % (created and 'Created' or 'Already had', zipcode_obj.name)
        return num_created
Ejemplo n.º 5
0
 def _make_square_polygon(self, x, y):
     d = 0.1
     polygon = MultiPolygon(
         Polygon(
             ((x, y),
              (x, y + d),
              (x + d, y + d),
              (x + d, y),
              (x, y)), srid=4326),
         srid=4326)
     polygon.transform(3857)
     return polygon
Ejemplo n.º 6
0
 def test_spatial_fields(self,):
     bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12))  # in africa
     the_geom = MultiPolygon(GEOSGeometry(bbox, srid=4326), srid=4326)
     the_geog = MultiPolygon(GEOSGeometry(bbox), srid=4326)
     the_geom_webmercator = the_geom.transform(ct=3857, clone=True)
     job = Job.objects.all()[0]
     self.assertIsNotNone(job)
     geom = job.the_geom
     geog = job.the_geog
     geom_web = job.the_geom_webmercator
     self.assertEqual(the_geom, geom)
     self.assertEqual(the_geog, geog)
     self.assertEqual(the_geom_webmercator, geom_web)
Ejemplo n.º 7
0
    def set_geometry_from_government_unit(self):
        objs = self.government_unit_objects

        if objs:
            for g in [x for x in objs if getattr(x, 'geom', None)]:
                self.geom = MultiPolygon(self.geom.union(g.geom))
            self.save()
Ejemplo n.º 8
0
    def test_can_create_serializer(self):
        mp = MapPoint.objects.get(pk=1)
        setattr(mp, "field1", "mp_field1")
        setattr(mp, "field2", "mp_field2")
        names = DatasetNameField(field1_name="field1",
                                 field2_name="field2",
                                 field3_name="field3")
        names.save()
        ds = Dataset(name="ds", names=names)
        ds.save()
        p1 = Polygon(((0, 0), (0, 1), (1, 1), (0, 0)))
        p2 = Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
        mpoly = MultiPolygon(p1, p2)
        polygon = MapPolygon(lat='42.7302',
                             lon='73.6788',
                             field1=1.0,
                             field2=2.0,
                             mpoly=mpoly,
                             dataset=ds)
        polygon.save()
        me = MapElement(dataset=ds, mappoint=mp, mappolygon=polygon)
        me.save()
        df = DataField(dataset=ds,
                       field_type='I',
                       field_name='int_data',
                       field_en='test')
        df.save()
        element = DataElement(mapelement=me, datafield=df, int_data=23)
        element.save()
        request = RequestFactory().put('/?data=all')
        self.user = User.objects.get(username='******')
        request.user = self.user
        # convert the HTTP Request object to a REST framework Request object
        self.request = APIView().initialize_request(request)
        serializer = TestSerializer(context={'request': self.request})
        expected_address = {
            "street": "",
            "city": "Troy",
            "state": "NY",
            "zipcode": "",
            "county": ""
        }
        self.assertEqual(serializer.get_address(me), expected_address)
        self.assertEqual(serializer.get_data(me)['test'], 23)
        self.assertEqual(serializer.get_geom(me)['type'], 'MultiPolygon')

        tag1 = Tag(dataset=ds, tag='tag1', approved=True, count=1)
        tag1.save()
        tag2 = Tag(dataset=ds, tag='tag2', approved=True, count=1)
        tag2.save()
        tagindiv1 = TagIndiv(tag=tag1, mapelement=me)
        tagindiv1.save()
        tagindiv2 = TagIndiv(tag=tag2, mapelement=me)
        tagindiv2.save()
        self.assertTrue('tag1' in serializer.get_tags(me))
        self.assertTrue('tag2' in serializer.get_tags(me))
Ejemplo n.º 9
0
    def importwktcommunes(self):

        j = 0
        communes_text_save = 'communes sauvées'
        communes_text_error = 'communes pas sauvées'
        communes_errors = {}
        # sion : 2757528 champéry : 1685287, st-maurice : 2757524
        for i in range(1685264, 1685408):
            try:
                name_data = urlopen(
                    'http://www.openstreetmap.org/api/0.6/relation/' +
                    str(i)).read().decode('utf-8')
                doc = ET.fromstring(name_data)
                name = doc[0].findall('tag')[2].attrib['v'].replace(
                    '(VS)', '').replace(' ', '')
                wkt_data = urlopen(
                    'http://polygons.openstreetmap.fr/get_wkt.py?id=' +
                    str(i) + "&params=0").read().decode('utf-8')
                path = settings.BASE_DIR + '/associations/imports/geo/regionchild2/' + name.lower(
                ) + '.wkt'
                file = open(path, 'w+')
                file.write(wkt_data)
                file.close()
                boundaries = GEOSGeometry(wkt_data)
                if not isinstance(boundaries, MultiPolygon):
                    boundaries = MultiPolygon(GEOSGeometry(wkt_data))
            except:
                pass
            else:
                if RegionChild2.objects.filter(
                        name__in=[name.lower(), name]).exists():
                    # TODO : les communes sont en capitalize maintenant
                    reg = RegionChild2.objects.get(
                        name__in=[name.lower(), name])

                    reg.boundaries = boundaries
                    print('on sauverait la commune ' + str(reg.name) +
                          ' avec le fichier ' + path)
                    print('on changerait le nom de la commune de ' +
                          str(reg.name) + ' pour ' + name)
                    communes_text_save += ', ' + name
                    reg.name = name
                    reg.save()
                    j += 1
                else:
                    print(
                        'La commune ' + name +
                        ' ne se trouve pas dans la bdd, prière de le faire à la main'
                    )
                    communes_errors[name] = {
                        'boundaries': boundaries,
                        'name': name,
                        'file': file,
                        'path': path
                    }
                    communes_text_error += ', ' + name
Ejemplo n.º 10
0
    def handle(self, *args, **options):

                                                                                                                                                                                                 
        api_client = ApiClient()
        api_client.configuration.host = "https://deims.org/api"
        api = swagger_client.DefaultApi(api_client=api_client)
        sites = api.sites_get()

        for s in sites:
            site = api.sites_resource_id_get(resource_id=s.id.suffix)
            if not (set(['Italy', 'Slovenia', 'Croatia']) & set(site.attributes.geographic.country)):
                continue
            print(site.id.prefix, site.id.suffix, site.title, site.attributes.geographic.country)

            domain_area = None
            if site.attributes.geographic.boundaries is not None:
                domain_area = GEOSGeometry(site.attributes.geographic.boundaries, srid=4326)

                if domain_area and (isinstance(domain_area, LineString) 
                                    or (domain_area, MultiLineString)):
                    domain_area = domain_area.buffer(0.0001)

                if domain_area and isinstance(domain_area, Polygon):
                    domain_area = MultiPolygon(domain_area)

            obj, created = EcosSite.objects.get_or_create(
                suffix = site.id.suffix,
                defaults= {'location' : GEOSGeometry(site.attributes.geographic.coordinates, srid=4326)}
                
            )
            
            obj.data = json.loads(api_client.last_response.data) 
            obj.denomination = site.title
            obj.description = site.attributes.general.abstract
            obj.domain_area = domain_area 
            obj.website= site.id.prefix+site.id.suffix
            obj.last_update = site.changed
            obj.img = imgimport(site.id.suffix)
            obj.location = GEOSGeometry(site.attributes.geographic.coordinates, srid=4326)

            obj.save()

            if site.attributes.focus_design_scale.parameters is not None:
                #print(site.attributes.focus_design_scale.parameters.__class__, site.attributes.focus_design_scale.parameters.__dict__)
                #print(site.attributes.focus_design_scale.parameters)
                #print(site.attributes.focus_design_scale.parameters[0].label)
                #print(site.attributes.focus_design_scale.parameters[0].uri)

                preferred_label_en = site.attributes.focus_design_scale.parameters[0].label
                
                obj, created = Parameter.objects.get_or_create(
                    uri = site.attributes.focus_design_scale.parameters[0].uri,
                    defaults={'preferred_label_en': preferred_label_en}
                )

                preferred_label_en = preferred_label_en
Ejemplo n.º 11
0
    def get_polygons(self, geom):
        result = GEOSGeometry(json.dumps(geom))

        if isinstance(result, Polygon):
            result = MultiPolygon(result)

        if self.srid:
            result.srid = self.srid

        return result
Ejemplo n.º 12
0
 def test_geometry_is_empty_with_empty_geometry(self):
     """IF an empty GEOSGeometry object should be validated, THEN the validator shall raise a ValidationError with the message 'Empty geometry collections are not allowed.'."""
     raised = False
     try:
         geometry_is_empty(MultiPolygon())
     except ValidationError as e:
         raised = True
         self.assertEqual('Empty geometry collections are not allowed.', e.message, msg='ValidationError message is not as expacted.')
     finally:
         self.assertIs(True, raised, msg='ValidationError didn\'t raised')
Ejemplo n.º 13
0
    def derive_location_from_related_plans(self, buffer: int = 5):
        """
        Derive unified location polygon based on related plan models.
        Buffer the individual points with N meters.

        :param buffer: Buffer radius
        """
        locations = self._get_related_locations()
        if len(locations) == 0:
            self.location = None
        else:
            location_polygons = MultiPolygon(
                [p.buffer(buffer) for p in self._get_related_locations()],
                srid=settings.SRID,
            )
            area = location_polygons.convex_hull

            self.location = MultiPolygon(area, srid=settings.SRID)
        self.save(update_fields=["location"])
Ejemplo n.º 14
0
 def test_multipolygon_validation(self):
     p = Patient.objects.create(name='Joe')
     p.eav.zone = True
     self.assertRaises(ValidationError, p.save)
     poly1 = Polygon(((0.0, 0.0), (0.0, 50.0), (50.0, 50.0), (50.0, 0.0), (0.0, 0.0)))
     poly2 = Polygon(((50.0, 50.0), (50.0, 80.0), (80.0, 80.0), (80.0, 50.0), (50.0, 50.0)))
     multipoly = MultiPolygon((poly1, poly2))
     p.eav.zone = multipoly
     p.save()
     self.assertEqual(Patient.objects.get(pk=p.pk).eav.zone, multipoly)
Ejemplo n.º 15
0
 def setUp(self):
     boundary = Boundary.objects.create(
         label='fooOK',
         source_file='foo.zip',
         status=Boundary.StatusTypes.COMPLETE)
     coords = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))
     self.poly = BoundaryPolygon.objects.create(
         data={},
         geom=MultiPolygon(Polygon(LinearRing(coords))),
         boundary=boundary)
Ejemplo n.º 16
0
    def __create_imagetiles__(self, *args, **kwargs):

        if self.imagelayer_file.path.split('.')[ -1 ] == 'tif':

            # create tiles for one image
            seconds, polygon = self.__create_imagetiles_one__( self.imagelayer_file.path, *args, **kwargs )

            # update layer
            self.geom                 = MultiPolygon([ polygon ])
            self.imagelayer_available = True

            # override again
            super(ImageLayer, self).save(*args, **kwargs)

        elif self.imagelayer_file.path.split('.')[ -1 ] == 'zip':

            # data
            m_polyset = []
            seconds   = 0

            # unzip file
            for path in unzip(self.imagelayer_file.path, '/tmp'):

                # create tiles for one image
                second, polygon = self.__create_imagetiles_one__(path, *args, **kwargs)
                print( polygon )

                # update data
                seconds += second
                m_polyset.append( polygon )

            # update layer
            self.geom = MultiPolygon( m_polyset )
            self.imagelayer_available = True

            print( seconds )

            # override again
            super(ImageLayer, self).save(*args, **kwargs)


        else:
            assert False
Ejemplo n.º 17
0
    def test_boundary_constraint(self):
        b = Boundary.objects.create(
            geom=MultiPolygon(make_simple_polygon(0)),
            name='whatever',
            category='whatever',
            sort_order=1)

        inparams = search._parse_dict_value({'IN_BOUNDARY': b.pk})
        self.assertEqual(inparams,
                         {'__contained': b.geom})
Ejemplo n.º 18
0
    def merge_features(self, features):
        polygons = []
        for feat in features:
            if isinstance(feat.geom.geos, MultiPolygon):
                multipoly = feat.geom.geos
                polygons = polygons + [poly for poly in multipoly]
            else:
                polygons.append(feat.geom.geos)

        return MultiPolygon(polygons)
Ejemplo n.º 19
0
 def test_job_outside_region(self,):
     job = Job.objects.all()[0]
     bbox = Polygon.from_bbox((2.74, 47.66, 21.61, 60.24))  # outside any region
     the_geom = MultiPolygon(GEOSGeometry(bbox, srid=4326))
     job.the_geom = the_geom
     job.save()
     regions = Region.objects.filter(the_geom__intersects=job.the_geom).intersection(job.the_geom,
                                                                                     field_name='the_geom').order_by(
         '-intersection')
     self.assertEquals(0, len(regions))
    def forwards(self, orm):
        # Adding field 'Instance.bounds'
        dummy_default_multi_polygon = MultiPolygon(
            Polygon(((0, 0), (1, 0), (1, 1), (0, 1), (0, 0))))

        db.add_column(
            u'treemap_instance',
            'bounds',
            self.gf('django.contrib.gis.db.models.fields.MultiPolygonField')(
                srid=3857, default=dummy_default_multi_polygon),
            keep_default=False)

        for instance in orm.Instance.objects.all():
            point = instance.center
            calculated_box = MultiPolygon(
                Polygon.from_bbox((point.x - 2500, point.y - 2500,
                                   point.x + 2500, point.y + 2500)))
            instance.bounds = calculated_box
            instance.save()
Ejemplo n.º 21
0
 def setUp(self):
     pos_data = {
         'document': '12345',
         'owner_name': 'John',
         'trading_name': 'Doe',
         'coverage_area': MultiPolygon(Polygon((Point(0, 0), Point(1, 1), Point(0, 2), Point(0, 0)))),
         'address': Point(1, 0)
     }
     self.point_of_sale = PointOfSale(**pos_data)
     self.point_of_sale.save()
Ejemplo n.º 22
0
 def _create_models(self):
     mommy.make(
         models.Garea,
         geom=MultiPolygon(Polygon(
             ((30, 20), (45, 40), (10, 40), (30, 20)))),
         name="Baranduin",
         code="ME07",
     )
     mommy.make(models.Station, geom=Point(x=35, y=20), name="Sarn Ford")
     mommy.make(models.Station, geom=Point(x=5, y=20), name="Mithlond")
Ejemplo n.º 23
0
def convert_to_multipolygon(geom):
    """
    Function to convert a geometry into a MultiPolygon. This function will
    return an empty geometry if conversion is not possible. Examples are
    if the geometry are points or lines, then an empty geomtetry is returned.
    """
    cursor = connection.cursor()

    # Store this geom's srid
    srid = geom.srid

    # Setup empty response
    empty = MultiPolygon([], srid=srid)

    # Check if geom is empty or has no area (point & line)
    if geom.empty or geom.area == 0:
        return empty

    # Try to iteratively convert to valid multi polygon
    for i in range(10):
        # Check if conditions are met
        if geom.geom_type != 'MultiPolygon' or geom.valid_reason != 'Valid Geometry':
            # Run cleaning sequence
            try:
                sql = "SELECT ST_AsText(ST_CollectionExtract(ST_MakeValid('{wkt}'), 3))".format(
                    wkt=geom.wkt)
                cursor.execute(sql)
                geom = GEOSGeometry(cursor.fetchone()[0], srid=srid)
                if geom.geom_type != 'MultiPolygon':
                    geom = MultiPolygon(geom, srid=srid)
            # If cleaning sequence raises exception, return empty geometry
            except:
                return empty
        # If conditions are met, stop iterating
        else:
            break

    # Check if all conditions are statisfied after conversion
    if geom.empty or geom.area == 0 or geom.geom_type != 'MultiPolygon'\
            or geom.valid_reason != 'Valid Geometry':
        return empty
    else:
        return geom
Ejemplo n.º 24
0
 def test_create(self):
     category = mommy.make(models.GareaCategory)
     garea = models.Garea(
         name="Esgalduin",
         category=category,
         geom=MultiPolygon(Polygon(
             ((30, 20), (45, 40), (10, 40), (30, 20)))),
     )
     garea.save()
     self.assertEqual(models.Garea.objects.first().name, "Esgalduin")
Ejemplo n.º 25
0
    def test_paths_link(self):
        p1 = PathFactory.create(geom=LineString((0, 0), (1, 1)))
        p2 = PathFactory.create(geom=LineString((1, 1), (3, 3)))
        p3 = PathFactory.create(geom=LineString((3, 3), (4, 4)))
        p4 = PathFactory.create(geom=LineString((4, 1), (6, 2), (4, 3)))

        c1 = City.objects.create(code='005177',
                                 name='Trifouillis-les-oies',
                                 geom=MultiPolygon(
                                     Polygon(((0, 0), (2, 0), (2, 4), (0, 4),
                                              (0, 0)),
                                             srid=settings.SRID)))
        City.objects.create(code='005179',
                            name='Trifouillis-les-poules',
                            geom=MultiPolygon(
                                Polygon(
                                    ((2, 0), (5, 0), (5, 4), (2, 4), (2, 0)),
                                    srid=settings.SRID)))

        # There should be automatic link after insert
        self.assertEqual(len(p1.cities), 1)
        self.assertEqual(len(p2.cities), 2)
        self.assertEqual(len(p3.cities), 1)
        self.assertEqual(len(p4.cities), 1)

        c1.geom = MultiPolygon(
            Polygon(((1.5, 0), (2, 0), (2, 4), (1.5, 4), (1.5, 0)),
                    srid=settings.SRID))
        c1.save()

        # Links should have been updated after geom update
        self.assertEqual(len(p1.cities), 0)
        self.assertEqual(len(p2.cities), 2)
        self.assertEqual(len(p3.cities), 1)
        self.assertEqual(len(p4.cities), 1)

        c1.delete()

        # Links should have been updated after delete
        self.assertEqual(len(p1.cities), 0)
        self.assertEqual(len(p2.cities), 1)
        self.assertEqual(len(p3.cities), 1)
        self.assertEqual(len(p4.cities), 1)
 def create_polygon(self):
     poly_1 = Polygon(((0, 0), (0, 1), (1, 1), (0, 0)))
     poly_2 = Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
     multipoly = MultiPolygon(poly_1, poly_2)
     return MmtPolygon(
         name='test polygon',
         geom=multipoly,
         popup_image=self.test_img,
         banner_image=self.test_img,
         popup_audio_file=File(file=self.test_audio_file).save())
Ejemplo n.º 27
0
    def test_helpers(self):
        trek = TrekFactory.create(no_path=True)
        p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
        p2 = PathFactory.create(geom=LineString((4, 4), (8, 8)))
        poi = POIFactory.create(no_path=True)
        service = ServiceFactory.create(no_path=True)
        service.type.practices.add(trek.practice)
        PathAggregationFactory.create(topo_object=trek,
                                      path=p1,
                                      start_position=0.5)
        PathAggregationFactory.create(topo_object=trek, path=p2)
        PathAggregationFactory.create(topo_object=poi,
                                      path=p1,
                                      start_position=0.6,
                                      end_position=0.6)
        PathAggregationFactory.create(topo_object=service,
                                      path=p1,
                                      start_position=0.7,
                                      end_position=0.7)
        # /!\ District are automatically linked to paths at DB level
        d1 = DistrictFactory.create(geom=MultiPolygon(
            Polygon(((-2, -2), (3, -2), (3, 3), (-2, 3), (-2, -2)))))

        # Ensure related objects are accessible
        self.assertItemsEqual(trek.pois, [poi])
        self.assertItemsEqual(trek.services, [service])
        self.assertItemsEqual(poi.treks, [trek])
        self.assertItemsEqual(service.treks, [trek])
        self.assertItemsEqual(trek.districts, [d1])

        # Ensure there is no duplicates
        PathAggregationFactory.create(topo_object=trek,
                                      path=p1,
                                      end_position=0.5)
        self.assertItemsEqual(trek.pois, [poi])
        self.assertItemsEqual(trek.services, [service])
        self.assertItemsEqual(poi.treks, [trek])
        self.assertItemsEqual(service.treks, [trek])

        d2 = DistrictFactory.create(
            geom=MultiPolygon(Polygon(((3, 3), (9, 3), (9, 9), (3, 9), (3,
                                                                        3)))))
        self.assertItemsEqual(trek.districts, [d1, d2])
class MultiPolygonFieldDefinitionTest(GeometryFieldDefinitionTestMixin,
                                      BaseModelDefinitionTestCase):
    field_definition_cls_name = 'MultiPolygonFieldDefinition'
    field_values = (
        MultiPolygon(
            Polygon(((0.0, 0.0), (0.0, 50.0), (50.0, 50.0), (50.0, 0.0),
                     (0.0, 0.0))),
            Polygon(((0.0, 0.0), (18, 50.0), (47.0, 55.0), (50.0, 0.0),
                     (0.0, 0.0))),
        ),
        MultiPolygon(
            Polygon(((0.0, 0.0), (0.0, 50.0), (50.0, 50.0), (50.0, 0.0),
                     (0.0, 0.0))),
            Polygon(((0.0, 0.0), (18, 50.0), (47.0, 55.0), (50.0, 0.0),
                     (0.0, 0.0))),
            Polygon(((0.0, 0.0), (0.0, 50.0), (50.0, 51.0), (50.0, 45),
                     (0.0, 0.0))),
        ),
    )
Ejemplo n.º 29
0
    def run(self, result=None):
        if not result:
            result = PlanningUnit.objects.all()

        # PU Filtration occurs here
        result = self.run_filters(result)

        try:
            dissolved_geom = result.aggregate(Union('geometry'))
            if dissolved_geom:
                dissolved_geom = dissolved_geom['geometry__union']
            else:
                raise Exception(
                    "No planning units available with the current filters.")
        except:
            raise Exception(
                "No planning units available with the current filters.")

        if type(dissolved_geom) == MultiPolygon:
            self.geometry_dissolved = dissolved_geom
        else:
            try:
                self.geometry_dissolved = MultiPolygon(
                    dissolved_geom, srid=dissolved_geom.srid)
            except:
                raise Exception(
                    "Unable to dissolve the returned planning units into a single geometry"
                )

        self.active = True

        geom_clone = self.geometry_dissolved.clone()
        geom_clone.transform(2163)
        self.geometry_final_area = geom_clone.area
        self.planning_units = ','.join(
            str(i) for i in result.values_list('id', flat=True))

        if self.planning_units:
            self.satisfied = True
        else:
            self.satisfied = False

        return True
Ejemplo n.º 30
0
def _geometry_collection_to_multipolygon(geometry_collection):
    assert isinstance(geometry_collection, GeometryCollection)
    polygons = []
    for geom in geometry_collection:
        if geom.dims < 2:
            # Just ignore everything that has dimension 0 or 1,
            # i.e. points, lines, or linestrings, multipoints, etc.
            continue
        polygons.extend(_to_multipolygon(geom))
    return MultiPolygon(polygons, srid=geometry_collection.srid)
Ejemplo n.º 31
0
    def setUp(self):
        # Create two areas, and one areatype
        # create 3 signals, 1 for area A, 1 for area B and one outside
        self.area_type_district = AreaTypeFactory.create(code='district',
                                                         name='district')

        min_lon, min_lat, max_lon, max_lat = BBOX
        extent_lon = max_lon - min_lon
        extent_lat = max_lat - min_lat

        w = Polygon.from_bbox((min_lon, min_lat, min_lon + extent_lon * 0.5,
                               min_lat + extent_lat))
        e = Polygon.from_bbox((min_lon + extent_lon * 0.5, min_lat,
                               min_lon + extent_lon, min_lat + extent_lat))

        AreaFactory.create(name='west',
                           code='west',
                           _type=self.area_type_district,
                           geometry=MultiPolygon([w]))
        AreaFactory.create(name='east',
                           code='east',
                           _type=self.area_type_district,
                           geometry=MultiPolygon([e]))

        center_w = Point(
            (min_lon + 0.25 * extent_lon, min_lat + 0.5 * extent_lat))
        center_e = Point(
            (min_lon + 0.75 * extent_lon, min_lat + 0.5 * extent_lat))
        to_north = Point(
            (min_lon + 0.5 * extent_lon, min_lat + 2 * extent_lat))

        self.signal_west = SignalFactory.create(location__geometrie=center_w,
                                                location__area_type_code=None,
                                                location__area_code=None,
                                                location__area_name=None)
        self.signal_east = SignalFactory.create(location__geometrie=center_e,
                                                location__area_type_code=None,
                                                location__area_code=None,
                                                location__area_name=None)
        self.signal_north = SignalFactory.create(location__geometrie=to_north,
                                                 location__area_type_code=None,
                                                 location__area_code=None,
                                                 location__area_name=None)
Ejemplo n.º 32
0
class AreaFactory(TaggableModelFactory):
    class Meta:
        model = Area

    name = factory.LazyFunction(lambda: fake.word())
    area_type = 'school-grounds'
    polygon = factory.LazyFunction(lambda: MultiPolygon(
        Polygon(((87.940101, 42.023135), (87.523661, 42.023135),
                 (87.523661, 41.644286), (87.940101, 41.644286),
                 (87.940101, 42.023135)))))
Ejemplo n.º 33
0
def administrative_division2(administrative_division_type):
    division = AdministrativeDivision.objects.create(
        name_en='test division 2',
        type=administrative_division_type,
        ocd_id='ocd-division/test:2')
    coords = ((100, 100), (100, 300), (300, 300), (300, 100), (100, 100))
    AdministrativeDivisionGeometry.objects.create(division=division,
                                                  boundary=MultiPolygon(
                                                      [Polygon(coords)]))
    return division
Ejemplo n.º 34
0
def fixup_intersection(geometry):
	# Interesection and simplify can give us back any sort of geometry,
	# but our shape columns must be MultiPolygons. Additionally,
	# intersection is also known to return invalid geometries. That's
	# unfortunate. We can test if it's valid, but there's no guaranteed
	# way to fix it.
	
	if geometry.geom_type == "GeometryCollection":
		polygons = []
		for p in geometry:
			mp = fixup_intersection(p)
			if mp: polygons.extend(mp)
	elif geometry.geom_type == "MultiPolygon":
		polygons = geometry
	elif geometry.geom_type == "Polygon":
		polygons = [geometry]
	elif geometry.geom_type == "LinearRing":
		polygons = [Polygon([geometry])]
	else:
		# Other geomtries (point, line) are useless to us.
		return None
		
	valid_polygons = []
	for p in polygons:
		# Check if the resulting polygon is valid.
		
		# In order to prevent GEOS_NOTICEs which clutter output, do our own
		# check for whether the polygon's outer ring has at least three distinct
		# coordinates. If not, we can skip now because it is obviously not
		# correctable.
		if len(set(p[0])) < 3: continue
		
		if not p.valid:
			# Use buffer() as a trick to fix polygon.
			# http://workshops.opengeo.org/postgis-intro/validity.html
			try:
				p = p.buffer(0.0) # raises an error sometimes
				if not p.valid: raise Exception()
			except:
				print "Could not fix invalid polygon."
				continue
			print "Fixed invalid polygon! Yay!", p.area
			
		# If it's really small, we don't really care about this component anyway.
		# We shouldn't trust the area computation on an invalid shape, so we can't
		# do this earlier. It's possible buffer() gives back valid zero-area shapes
		# even if the input was supposed to be something real, so we might be
		# unwittingly eliminating wants-to-be-good polygons here.
		if p.area < .00000000001: continue
			
		valid_polygons.append(p)
		
	if len(valid_polygons) == 0: return None
		
	return MultiPolygon(valid_polygons)
Ejemplo n.º 35
0
    def read_ds(self, ds):
        """ Return the ENVISAT specific metadata items.
        """

        filename = ds.GetFileList()[0]

        path, data_file = split(filename)

        #assert(len(parts) >= 8)
        parts = path.split("/")[-7:]

        out_date = []
        for i in parts[1:4]:
            try:
                out_date.append(int(i))
            except:
                out_date.append(int(i[1:]))

        try:
            raw_date = [
                int(i[1:]) if i[0].isalpha() else int(i) for i in parts[1:4]
            ]
            raw_time = parts[4][1:] if parts[4].startswith("T") else parts[4]
        except Exception as e:
            raise TypeError("Timestring in file path can't be parsed: %s" % e)

        timestamp = datetime.combine(
            date(*raw_date),
            time(int(raw_time[:2]), int(raw_time[2:4]), int(raw_time[4:])))
        timestamp = make_aware(timestamp, utc)

        size = (ds.RasterXSize, ds.RasterYSize)
        gt = ds.GetGeoTransform()

        def gtrans(x, y):
            return gt[0] + x * gt[1] + y * gt[2], gt[3] + x * gt[4] + y * gt[5]

        vpix = [(0, 0), (0, size[1]), (size[0], 0), (size[0], size[1])]
        vx, vy = zip(*(gtrans(x, y) for x, y in vpix))

        return {
            "identifier":
            splitext(basename(filename))[0],
            "begin_time":
            timestamp,
            "end_time":
            timestamp,
            "range_type_name":
            parts[0],
            "footprint":
            MultiPolygon(
                Polygon.from_bbox((min(vx), min(vy), max(vx), max(vy)))),
            # "begin_time": parse_datetime(ds.GetMetadataItem("MPH_SENSING_START")),
            # "end_time": parse_datetime(ds.GetMetadataItem("MPH_SENSING_STOP"))
        }
Ejemplo n.º 36
0
    def test_replacing(self):
        command = LoadGeos()
        old_geo = {
            'geoid':
            '1111111111',
            'geo_type':
            Geo.TRACT_TYPE,
            'name':
            'Geo in 1990',
            'year':
            '1990',
            'state':
            '11',
            'county':
            '111',
            'tract':
            '11111',
            'minlat':
            -1,
            'maxlat':
            1,
            'minlon':
            -1,
            'maxlon':
            1,
            'centlat':
            0,
            'centlon':
            0,
            'geom':
            MultiPolygon(Polygon(((0, 0), (0, 2), (-1, 2), (0, 0))),
                         Polygon(((-4, -2), (-6, -1), (-2, -2), (-4, -2))))
        }
        command.save_batch([old_geo])
        # Geo save worked
        self.assertEqual(1, Geo.objects.filter(geoid='1111111111').count())

        census = Census2010Sex(total_pop=100, male=45, female=55)
        census.geoid_id = '1111111111'
        census.save()
        # Census data worked
        self.assertEqual(1, Census2010Sex.objects.all().count())

        new_geo = old_geo.copy()
        new_geo['name'] = 'Geo in 2000'
        command.save_batch([new_geo])
        # check that both models still exist
        query = Geo.objects.filter(geoid='1111111111')
        self.assertEqual(1, query.count())
        self.assertEqual('Geo in 2000', query.get().name)
        self.assertEqual(1, Census2010Sex.objects.all().count())

        Geo.objects.all().delete()
        Census2010Sex.objects.all().delete()
Ejemplo n.º 37
0
 def test_path_touching_land_layer(self):
     p1 = PathFactory.create(
         geom=LineString((3, 3), (4, 4), srid=settings.SRID))
     City.objects.create(code='005177',
                         name='Trifouillis-les-oies',
                         geom=MultiPolygon(
                             Polygon(
                                 ((0, 0), (2, 0), (2, 2), (0, 2), (0, 0)),
                                 srid=settings.SRID)))
     p1.geom = LineString((2, 2), (4, 4), srid=settings.SRID)
     p1.save()
Ejemplo n.º 38
0
    def get_data_bounds_polygon(self):
        parsed_geojson = json.loads(self.data)
        def get_polygons(obj):
            polys = []
            if "type" in obj:
                if obj["type"] == "FeatureCollection":
                    for feature in obj["features"]:
                        extent_polys = get_polygons(feature)
                        polys.extend(extent_polys)
                elif obj["type"] == "Feature":
                    # process "geometry"
                    geom = GEOSGeometry(json.dumps(obj["geometry"]))
                    # get extent_poly of geom
                    extent_poly = Polygon.from_bbox(geom.extent)
                    polys.append(extent_poly)

                elif obj["type"] in ("Polygon", "LineString", "Point"):
                    # process "geometry"
                    geom = GEOSGeometry(json.dumps(obj))
                    # get extent_poly of geom
                    extent_poly = Polygon.from_bbox(geom.extent)
                    polys.append(extent_poly)
            return polys

        geojson_extent_polygons = []
        if isinstance(parsed_geojson, list):
            for obj in parsed_geojson:
                polygons = get_polygons(obj)
                geojson_extent_polygons.extend(polygons)
        elif "type" in parsed_geojson and parsed_geojson["type"] in ("FeatureCollection", "Feature", "Polygon", "LineString", "Point"):
            polygons = get_polygons(parsed_geojson)
            geojson_extent_polygons.extend(polygons)


        # process polygons into polyons extent
        mploy = MultiPolygon(geojson_extent_polygons)
        mploy.srid = WGS84_SRID  # Expected...
        mploy.transform(METERS_SRID)
        poly = Polygon.from_bbox(mploy.extent)
        poly.srid  = METERS_SRID
        return poly
Ejemplo n.º 39
0
    def get_bounds(content, content_pois, content_areas):
        """ gets map bounds. first try to get content bounds,
            last will get content_pois extent """
        points = [c.main_location for c in content_pois if c.has_location()]

        areas = []
        for c in content_areas:
            if hasattr(c, 'location'):
                if getattr(c.location, 'borders', None):
                    areas.append(c.location.borders)
            elif getattr(c, 'borders', None):
                areas.append(c.borders)
        if points and areas:
            mp = MultiPoint(points)
            ap = MultiPolygon(areas)
            return ap.simplify().union(mp).extent
        elif len(points) > 1:
            mp = MultiPoint(points)
            return mp.extent
        elif points:
            # if we have just one point expand the borders
            # to avoid to much zoom
            content = content_pois[0]
        elif areas:
            ap = MultiPolygon(areas)
            return ap.extent
        elif content and content.main_location:
            return (content.main_location.x - 0.002,
                    content.main_location.y - 0.002,
                    content.main_location.x + 0.002,
                    content.main_location.y + 0.002)
        else:
            # default bounding box to avoid google maps failures
            return (settings.DEFAULT_LONGITUDE - 1.0,
                    settings.DEFAULT_LATITUDE - 1.0,
                    settings.DEFAULT_LONGITUDE + 1.0,
                    settings.DEFAULT_LATITUDE + 1.0)
Ejemplo n.º 40
0
def remove_sliver_polygons(geom, srid=WEB_MERCATOR_SRID, minarea_sqm=10):
    """Routine to remove sliver polygons from a multipolygon object"""

    # Transform to projected coordinate system
    clean_geom = geom.transform(srid, clone=True)

    # Split into components
    components = []
    while clean_geom:
        components.append(clean_geom.pop())

    # Filter components by size
    big_components = [x for x in components if x.area > minarea_sqm]

    # If small area was found, update geom with larger components
    if(len(big_components) < len(components)):
        geom = MultiPolygon(big_components, srid=srid)
        geom.transform(WEB_MERCATOR_SRID)

    # Make sure its a proper multi polygon
    geom = convert_to_multipolygon(geom)

    # Return cleaned geometry
    return geom
Ejemplo n.º 41
0
 def run(self):
     leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(','))
     leaseblock_geoms = [lb.geometry for lb in leaseblocks]
     
     from django.contrib.gis.geos import MultiPolygon
     dissolved_geom = leaseblock_geoms[0]
     for geom in leaseblock_geoms:
         try:
             dissolved_geom = dissolved_geom.union(geom)
         except:
             pass
     
     if type(dissolved_geom) == MultiPolygon:
         self.geometry_actual = dissolved_geom
     else:
         self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid)
     return True  
Ejemplo n.º 42
0
    def run(self):
        leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(','))

        if not leaseblocks.exists():
            # We can't return False, because we'll get a collection without
            # any lease blocks, which doesn't work on the client side.
            # Throw an exception instead.
            # TODO: Make the client handle the "selection didn't work" case.
            # This is most likely because there are no lease blocks in the db.
            raise Exception("No lease blocks available with the current selection.")

        dissolved_geom = leaseblocks.aggregate(Union('geometry'))
        if dissolved_geom:
            dissolved_geom = dissolved_geom['geometry__union']
        else:
            raise Exception("No lease blocks available with the current filters.")

        if type(dissolved_geom) == MultiPolygon:
            self.geometry_actual = dissolved_geom
        else:
            self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid)
        return True  
def _combine(geometries):
    """
    Try to combine geometries with least computing power necessary.

    If there are both lines and polygons, all lines are first buffered
    one by one into polygons, and then added to a MultiPolygon together
    with the other polygons.

    If there are only polygons, they are combined in a single
    multipolygon.

    If there are only lines, they are combined in a single multilinestring
    """
    if len(geometries) == 1:
        return geometries[0]

    lines = [g for g in geometries
             if isinstance(g, (LineString))]
    multilines = [g for g in geometries
             if isinstance(g, (MultiLineString))]
    polygons = [g for g in geometries
                if isinstance(g, (Polygon))]
    multipolygons = [g for g in geometries
                if isinstance(g, (MultiPolygon))]

    if polygons or multipolygons:
        if lines or multilines:
            # All kinds of stuff present
            lines.extend([l for ml in multilines for l in ml])
            print 'buffering lines'
            for l in lines:
                polygons.append(l.buffer(0.0001, 2))
        result = MultiPolygon(polygons)
        for mp in multipolygons:
            result.extend(mp)
    else:
        # Only lines or multilines
        result = MultiLineString(lines)
        for ml in multilines:
            result.extend(ml)

    return result
Ejemplo n.º 44
0
class FireDepartment(RecentlyUpdatedMixin, models.Model):
    """
    Models Fire Departments.
    """

    DEPARTMENT_TYPE_CHOICES = [
        ('Volunteer', 'Volunteer'),
        ('Mostly Volunteer', 'Mostly Volunteer'),
        ('Career', 'Career'),
        ('Mostly Career', 'Mostly Career'),
    ]

    REGION_CHOICES = [
        ('Northeast', 'Northeast'),
        ('West', 'West'),
        ('South', 'South'),
        ('Midwest', 'Midwest'),
        (None, '')
    ]

    POPULATION_CLASSES = [
        (0, 'Population less than 2,500.'),
        (1, 'Population between 2,500 and 4,999.'),
        (2, 'Population between 5,000 and 9,999.'),
        (3, 'Population between 10,000 and 24,999.'),
        (4, 'Population between 25,000 and 49,999.'),
        (5, 'Population between 50,000 and 99,999.'),
        (6, 'Population between 100,000 and 249,999.'),
        (7, 'Population between 250,000 and 499,999.'),
        (8, 'Population between 500,000 and 999,999.'),
        (9, 'Population greater than 1,000,000.'),
    ]

    created = models.DateTimeField(auto_now=True)
    modified = models.DateTimeField(auto_now=True)
    fdid = models.CharField(max_length=10)
    name = models.CharField(max_length=100)
    headquarters_address = models.ForeignKey(Address, null=True, blank=True, related_name='firedepartment_headquarters')
    mail_address = models.ForeignKey(Address, null=True, blank=True)
    headquarters_phone = PhoneNumberField(null=True, blank=True)
    headquarters_fax = PhoneNumberField(null=True, blank=True)
    department_type = models.CharField(max_length=20, choices=DEPARTMENT_TYPE_CHOICES, null=True, blank=True)
    organization_type = models.CharField(max_length=75, null=True, blank=True)
    website = models.URLField(null=True, blank=True)
    state = models.CharField(max_length=2)
    region = models.CharField(max_length=20, choices=REGION_CHOICES, null=True, blank=True)
    geom = models.MultiPolygonField(null=True, blank=True)
    objects = CalculationManager()
    priority_departments = PriorityDepartmentsManager()
    dist_model_score = models.FloatField(null=True, blank=True, editable=False, db_index=True)

    risk_model_deaths = models.FloatField(null=True, blank=True, db_index=True,
                                          verbose_name='Predicted deaths per year.')

    risk_model_injuries = models.FloatField(null=True, blank=True, db_index=True,
                                            verbose_name='Predicted injuries per year.')

    risk_model_fires = models.FloatField(null=True, blank=True, db_index=True,
                                         verbose_name='Predicted number of fires per year.')

    risk_model_fires_size0 = models.FloatField(null=True, blank=True, db_index=True,
                                               verbose_name='Predicted number of size 0 fires.')

    risk_model_fires_size0_percentage = models.FloatField(null=True, blank=True,
                                                          verbose_name='Percentage of size 0 fires.')

    risk_model_fires_size1 = models.FloatField(null=True, blank=True, db_index=True,
                                               verbose_name='Predicted number of size 1 fires.')

    risk_model_fires_size1_percentage = models.FloatField(null=True, blank=True,
                                                          verbose_name='Percentage of size 1 fires.')

    risk_model_fires_size2 = models.FloatField(null=True, blank=True, db_index=True,
                                                   verbose_name='Predicted number of size 2 firese.')

    risk_model_fires_size2_percentage = models.FloatField(null=True, blank=True,
                                                          verbose_name='Percentage of size 2 fires.')

    government_unit = RelatedObjectsDescriptor()
    population = models.IntegerField(null=True, blank=True)
    population_class = models.IntegerField(null=True, blank=True, choices=POPULATION_CLASSES)
    featured = models.BooleanField(default=False, db_index=True)

    class Meta:
        ordering = ('name',)
        index_together = [
            ['population', 'id', 'region'],
            ['population', 'region']
        ]

    @property
    def headquarters_geom(self):
        return getattr(self.headquarters_address, 'geom', None)

    @property
    def predicted_fires_sum(self):
        """
        Convenience method to sum
        """
        if self.risk_model_fires_size0 is None and self.risk_model_fires_size1 is None \
                and self.risk_model_fires_size2 is None:
            return

        return (self.risk_model_fires_size0 or 0) + (self.risk_model_fires_size1 or 0) + \
               (self.risk_model_fires_size2 or 0)

    @property
    def size2_and_greater_sum(self):
        """
        Convenience method to sum
        """
        if self.risk_model_fires_size1 is None and self.risk_model_fires_size2 is None:
            return

        return (self.risk_model_fires_size1 or 0) + (self.risk_model_fires_size2 or 0)

    @property
    def size2_and_greater_percentile_sum(self):
        """
        Convenience method to sum
        """
        if self.risk_model_fires_size1_percentage is None and self.risk_model_fires_size2_percentage is None:
            return

        return (self.risk_model_fires_size1_percentage or 0) + (self.risk_model_fires_size2_percentage or 0)

    @property
    def deaths_and_injuries_sum(self):

        if self.risk_model_deaths is None and self.risk_model_injuries is None:
            return

        return (self.risk_model_deaths or 0) + (self.risk_model_injuries or 0)

    @cached_property
    def population_class_stats(self):
        """
        Returns summary statistics for calculation fields in the same population class.
        """

        if not self.population_class:
            return []

        cache_key = 'population_class_{0}_stats'.format(self.population_class)
        cached = cache.get(cache_key)

        if cached:
            return cached

        fields = ['dist_model_score', 'risk_model_fires', 'risk_model_deaths_injuries_sum',
                  'risk_model_size1_percent_size2_percent_sum', 'residential_fires_avg_3_years']
        aggs = []

        for field in fields:
            aggs.append(Min(field))
            aggs.append(Max(field))
            aggs.append(Avg(field))

        results = self.population_metrics_table.objects.filter(population_class=self.population_class).aggregate(*aggs)
        cache.set(cache_key, results, timeout=60 * 60 * 24)
        return results

    def report_card_scores(self):
        if not self.population_metrics_table:
            return

        row = self.population_metrics_row

        from .managers import Ntile
        from django.db.models import When, Case, Q
        qs = self.population_metrics_table.objects.filter(risk_model_fires_quartile=row.risk_model_fires_quartile)
        qs = qs.annotate(**{'dist_quartile': Case(When(**{'dist_model_score__isnull': False, 'then': Ntile(4, output_field=models.IntegerField(), order_by='dist_model_score')}), output_field=models.IntegerField(), default=None)})
        qs = qs.annotate(**{'size2_plus_quartile': Case(When(**{'risk_model_size1_percent_size2_percent_sum_quartile__isnull': False, 'then': Ntile(4, output_field=models.IntegerField(), order_by='risk_model_size1_percent_size2_percent_sum_quartile')}), output_field=models.IntegerField(), default=None)})
        qs = qs.annotate(**{'deaths_injuries_quartile': Case(When(**{'risk_model_deaths_injuries_sum_quartile__isnull': False, 'then': Ntile(4, output_field=models.IntegerField(), order_by='risk_model_deaths_injuries_sum_quartile')}), output_field=models.IntegerField(), default=None)})
        qs = qs.filter(id=self.id)
        return qs


    @property
    def population_metrics_table(self):
        """
        Returns the appropriate population metrics table for this object.
        """
        try:
            return get_model('firestation.PopulationClass{0}Quartile'.format(self.population_class))
        except LookupError:
            return None

    @cached_property
    def population_metrics_row(self):
        """
        Returns the matching row from the population metrics table.
        """

        population_table = self.population_metrics_table

        if not population_table:
            return

        return population_table.objects.get(id=self.id)

    @property
    def government_unit_objects(self):
        """
        Memoize the government_unit generic key lookup.
        """
        if not getattr(self, '_government_unit_objects', None):
            self._government_unit_objects = self.government_unit.all().generic_objects()

        return self._government_unit_objects

    @property
    def fips(self):
        objs = self.government_unit_objects

        if objs:
            return [obj.fips for obj in objs if hasattr(obj, 'fips')]

        return []

    @property
    def geom_area(self):
        """
        Project the department's geometry into north america lambert conformal conic
        Returns km2
        """
        if self.geom:
            try:
                return self.geom.transform(102009, clone=True).area / 1000000
            except:
                return

    @cached_property
    def nfirs_deaths_and_injuries_sum(self):
        return self.nfirsstatistic_set.filter(Q(metric='civilian_casualties') | Q(metric='firefighter_casualties'),
                                              fire_department=self,
                                              year__gte=2010).aggregate(Avg('count'))

    @cached_property
    def residential_fires_3_year_avg(self):

        if self.population_metrics_row:
            return self.population_metrics_row.residential_fires_avg_3_years

        return self.nfirsstatistic_set.filter(fire_department=self,
                                              metric='residential_structure_fires',
                                              year__gte=2010).aggregate(Avg('count'))

    def get_population_class(self):
        """
        Returns the population class of a department based on NFPA community sizes categories as an integer.

        9: > 1,000,000
        8: (500000, 999999)
        7: (250000, 499999)
        6: (100000, 249999)
        5: (50000, 99999)
        4: (25000, 49999)
        3: (10000, 24999)
        2: (5000, 9999)
        1: (2500, 4999)
        0: < 2500
        """
        if self.population is None:
            return

        if self.population < 2500:
            return 0

        if self.population >= 1000000:
            return 9

        community_sizes = [
                (500000, 999999),
                (250000, 499999),
                (100000, 249999),
                (50000, 99999),
                (25000, 49999),
                (10000, 24999),
                (5000, 9999),
                (2500, 4999)]

        for clazz, min_max in zip(reversed(range(1, 9)), community_sizes):
            if min_max[0] <= self.population <= min_max[1]:
                return clazz

    @property
    def similar_departments(self, ignore_regions_min=1000000):
        """
        Identifies similar departments based on the protected population size and region.
        """

        params = {}

        if self.population >= 1000000:
            params['population__gte'] = 1000000

        elif self.population < 2500:
            params['population__lt'] = 2500

        else:
            community_sizes = [
                (500000, 999999),
                (250000, 499999),
                (100000, 249999),
                (50000, 99999),
                (25000, 49999),
                (10000, 24999),
                (5000, 9999),
                (2500, 4999)]

            for lower_bound, upper_bound in community_sizes:
                if lower_bound <= self.population <= upper_bound:
                    params['population__lte'] = upper_bound
                    params['population__gte'] = lower_bound

        similar = FireDepartment.objects.filter(**params)\
            .exclude(id=self.id)\
            .extra(select={'difference': "abs(population - %s)"}, select_params=[self.population])\
            .extra(order_by=['difference'])

        # Large departments may not have similar departments in their region.
        if self.population < ignore_regions_min:
            similar = similar.filter(region=self.region)

        return similar

    @property
    def thumbnail_name(self):
        return slugify(' '.join(['us', self.state, self.name])) + '.jpg'

    @property
    def thumbnail_name_no_marker(self):
        return slugify(' '.join(['us', self.state, self.name, 'no marker'])) + '.jpg'

    @property
    def thumbnail(self):
        return 'https://s3.amazonaws.com/firecares-static/department-thumbnails/{0}'.format(self.thumbnail_name)

    @property
    def thumbnail_no_marker(self):
        return 'https://s3.amazonaws.com/firecares-static/department-thumbnails/{0}' \
            .format(self.thumbnail_name_no_marker)

    def generate_thumbnail(self, marker=True):
        geom = None

        if self.geom:
            geom = self.geom.centroid
        elif self.headquarters_address and self.headquarters_address.geom:
            geom = self.headquarters_address.geom
        else:
            return '/static/firestation/theme/assets/images/content/property-1.jpg'

        if marker and geom:
            marker = 'pin-l-embassy+0074D9({geom.x},{geom.y})/'.format(geom=geom)

        return 'http://api.tiles.mapbox.com/v4/garnertb.mmlochkh/{marker}' \
               '{geom.x},{geom.y},8/500x300.png?access_token={access_token}'.format(marker=marker,
                                                                                    geom=geom,
                                                                                    access_token=getattr(settings, 'MAPBOX_ACCESS_TOKEN', ''))

    def set_geometry_from_government_unit(self):
        objs = self.government_unit_objects

        if objs:
            for g in [x for x in objs if getattr(x, 'geom', None)]:
                self.geom = MultiPolygon(self.geom.union(g.geom))
            self.save()

    def set_population_from_government_unit(self):
        """
        Stores the population of government units on the FD object to speed up querying.
        """
        objs = self.government_unit_objects

        if objs:

            for gov_unit in objs:
                pop = getattr(gov_unit, 'population', None)

                if pop is not None:
                    if self.population is None:
                        self.population = 0

                    self.population += pop
        else:
            self.population = None

        self.save()

    @classmethod
    def get_histogram(cls, field, bins=400):
        hist = histogram(list(cls.objects.filter(**{'{0}__isnull'.format(field): False})
                         .values_list(field, flat=True)), bins=bins)
        return json.dumps(zip(hist[1], hist[0]), separators=(',', ':'))

    def set_region(self, region):
        validate_choice(FireDepartment.REGION_CHOICES)(region)
        self.region = region
        self.save()

    @cached_property
    def description(self):
        """
        A text description of the department used for displaying on the client side.
        """
        try:
            name = self.name

            if not self.name.lower().endswith('department') and not self.name.lower().endswith('district'):
                name += ' fire department'

            return "The {name} is a {department_type} department located in the {object.region} NFPA region and headquartered in " \
                   "{object.headquarters_address.city}, {object.headquarters_address.state_province}."\
                .format(name=name,
                        department_type=self.department_type.lower(),
                        object=self).strip()
        except:
            return 'No description for Fire Department'

    def residential_structure_fire_counts(self):
        return self.nfirsstatistic_set.filter(metric='residential_structure_fires')\
            .extra(select={
                    'year_max': 'SELECT MAX(COUNT) FROM firestation_nfirsstatistic b WHERE b.year = firestation_nfirsstatistic.year and b.metric=firestation_nfirsstatistic.metric'
                   })\
            .extra(select={
                    'year_min': 'SELECT MIN(COUNT) FROM firestation_nfirsstatistic b WHERE b.year = firestation_nfirsstatistic.year and b.metric=firestation_nfirsstatistic.metric'
                   })

    @classmethod
    def load_from_usfa_csv(cls):
        """
        Loads Fire Departments from http://apps.usfa.fema.gov/census-download.
        """
        us, _ = Country.objects.get_or_create(name='United States of America', iso_code='US')

        with open(os.path.join(os.path.dirname(__file__), 'scripts/usfa-census-national.csv'), 'r') as csvfile:

            # This only runs once, since there isn't a good key to identify duplicates
            if not cls.objects.all().count():
                reader = csv.DictReader(csvfile)
                counter = 0
                for row in reader:
                    # only run once.
                    hq_address_params = {}
                    hq_address_params['address_line1'] = row.get('HQ Addr1')
                    hq_address_params['address_line2'] = row.get('HQ Addr2')
                    hq_address_params['city'] = row.get('HQ City')
                    hq_address_params['state_province'] = row.get('HQ State')
                    hq_address_params['postal_code'] = row.get('HQ Zip')
                    hq_address_params['country'] = us
                    headquarters_address, _ = Address.objects.get_or_create(**hq_address_params)
                    headquarters_address.save()

                    mail_address_params = {}
                    mail_address_params['address_line1'] = row.get('Mail Addr1')
                    mail_address_params['address_line2'] = row.get('Mail Addr2') or row.get('Mail PO Box')
                    mail_address_params['city'] = row.get('Mail City')
                    mail_address_params['state_province'] = row.get('Mail State')
                    mail_address_params['postal_code'] = row.get('Mail Zip')
                    mail_address_params['country'] = us
                    mail_address, _ = Address.objects.get_or_create(**mail_address_params)
                    mail_address.save()

                    params = {}
                    params['fdid'] = row.get('FDID')
                    params['name'] = row.get('Fire Dept Name')
                    params['headquarters_phone'] = row.get('HQ Phone')
                    params['headquarters_fax'] = row.get('HQ Fax')
                    params['department_type'] = row.get('Dept Type')
                    params['organization_type'] = row.get('Organization Type')
                    params['website'] = row.get('Website')
                    params['headquarters_address'] = headquarters_address
                    params['mail_address'] = mail_address
                    params['state'] = row.get('HQ State')

                    cls.objects.create(**params)
                    counter += 1

                assert counter == cls.objects.all().count()

    @cached_property
    def slug(self):
        return slugify(self.name)

    def get_absolute_url(self):
        return reverse('firedepartment_detail_slug', kwargs=dict(pk=self.id, slug=self.slug))

    def find_jurisdiction(self):
        from firecares.usgs.models import CountyorEquivalent, IncorporatedPlace, UnincorporatedPlace

        counties = CountyorEquivalent.objects.filter(state_name='Virginia')
        for county in counties:
            incorporated = IncorporatedPlace.objects.filter(geom__intersects=county.geom)
            unincoporated = UnincorporatedPlace.objects.filter(geom__intersects=county.geom)
            station = FireStation.objects.filter(geom__intersects=county.geom)

            print 'County', county.name
            print 'Incorporated Place', incorporated.count()
            print 'Unincorporated Place', unincoporated.count()
            print 'Stations:', station

    def __unicode__(self):
        return self.name
Ejemplo n.º 45
0
    def post(self, request, *args, **kwargs):
        if request.is_ajax:

            cur_shp_id = kwargs['pk']
            cur_shp = Shapefile.objects.get(id=cur_shp_id)

            # [A] check if features for this shp already exist in helper table
            if cur_shp.feature_set.count() == HelperDitchesNumber.objects \
                .filter(shapefile_id=cur_shp_id) \
                    .count():
                cur_feat = HelperDitchesNumber.objects.filter(
                    shapefile_id=cur_shp_id)
            else:
                # create helping features and fill in perimeter for each
                cur_shp_geom = get_geos_geometry(cur_shp)
                cur_shp_geom.set_srid(4326)
                cur_shp_geom.transform(3857)
                for feature in cur_shp_geom:
                    if feature.geom_type == 'Polygon':
                        feature = MultiPolygon(feature)

                    # get perimeter in Shapefile's projection
                    proj_feature = feature
                    proj_feature.set_srid(cur_shp_geom.srid)
                    proj_feature.transform(cur_shp.proj)

                    new_feat = HelperDitchesNumber(
                        poly=feature,
                        shapefile_id=cur_shp_id,
                        perimeter=proj_feature.length)
                    new_feat.save()

                cur_shp.stat_ditch_comp = cur_shp.stat_ditch_comp = True
                cur_shp.save()

                cur_feat = HelperDitchesNumber.objects.filter(
                    shapefile_id=cur_shp_id)

            # [B] check if this shapefile has custom class number defined
            if cur_shp.classes:
                class_num = cur_shp.classes
            else:
                class_num = settings.GEOSTAT_SETTINGS[
                    'jenk_natural_breaks_classes']

            # [C] get all perimeters as list
            perimeters = cur_feat.values_list('perimeter', flat=True)
            perim_list = []
            for x in perimeters:
                perim_list.append(x)

            # [D] calculate Jenks Natural Breaks, save in shapefile and features
            jnb_classes_list = get_jenks_breaks(perim_list, int(class_num))
            Shapefile.objects.filter(id=cur_shp_id) \
                .update(jnb=jnb_classes_list)

            # [E] fill in the class for each feature of the helper layer
            for feature in cur_feat:
                class_val = classify(feature.perimeter, jnb_classes_list)
                feature.class_n = class_val
                feature.save()

            context = context_results(cur_shp_id)
            return render_to_response(
                'appgeostat/shp_detail_table.html', context)
Ejemplo n.º 46
0
class School(models.Model):
    """ School """
    name = models.CharField(max_length=200)
    slug = models.SlugField(max_length=200, blank=True, null=True)

    schid = models.CharField('School ID', max_length=8, blank=True, null=True, unique=True)
    address = models.CharField(max_length=150, blank=True, null=True)
    town_mail = models.CharField(max_length=25, blank=True, null=True)
    town = models.CharField(max_length=25, blank=True, null=True)
    state = models.CharField(max_length=2, blank=True, null=True)
    zip = models.CharField(max_length=10, blank=True, null=True)
    principal = models.CharField(max_length=50, blank=True, null=True)
    phone = models.CharField(max_length=15, blank=True, null=True)
    fax = models.CharField(max_length=15, blank=True, null=True)
    grades = models.CharField(max_length=70, blank=True, null=True)
    schl_type = models.CharField(max_length=3, blank=True, null=True)
    districtid = models.ForeignKey('District', blank=True, null=True)

    survey_incentive = models.TextField(blank=True, null=True)
    survey_active = models.BooleanField('Is survey active?')

    # GeoDjango
    geometry = models.PointField(srid=26986)
    shed_05 = models.MultiPolygonField(srid=26986, null=True, blank=True)
    shed_10 = models.MultiPolygonField(srid=26986, null=True, blank=True)
    shed_15 = models.MultiPolygonField(srid=26986, null=True, blank=True)
    shed_20 = models.MultiPolygonField(srid=26986, null=True, blank=True)
    objects = CustomManager()

    def __unicode__(self):
        return self.name

    @property
    def district(self):
        return self.districtid

    @permalink
    def get_absolute_url(self):
        return ('survey_school_form', None, {'school_slug': self.slug, 'district_slug': self.districtid.slug})

    def get_intersections(self):
        #school_circle = self.geometry.buffer(5000)
        #intersections = Intersection.objects.filter(geometry__intersects=school_circle)
        relation = SchoolTown.objects.get(schid=self.schid)
        intersections = Intersection.objects.filter(town_id=relation.town_id)
        return intersections

    def update_sheds(self):
        from maps import get_sheds
        sheds = get_sheds(self.id)

        self.shed_05 = MultiPolygon(GEOSGeometry(sheds[0.5]))
        self.shed_10 = MultiPolygon(GEOSGeometry(sheds[1.0]))
        self.shed_15 = MultiPolygon(GEOSGeometry(sheds[1.5]))
        self.shed_20 = MultiPolygon(GEOSGeometry(sheds[2.0]))

        g = self.shed_20.difference(self.shed_15)
        try:
            self.shed_20 = g
        except TypeError:
            if g.area == 0:
                self.shed_20 = None

        g = self.shed_15.difference(self.shed_10)
        try:
            self.shed_15 = g
        except TypeError:
            if g.area == 0:
                self.shed_15 = None

        g = self.shed_15.difference(self.shed_10)
        try:
            self.shed_15 = g
        except TypeError:
            if g.area == 0:
                self.shed_15 = None


    class Meta:
        ordering = ['name']
Ejemplo n.º 47
0
class LeaseBlockSelection(Analysis):
    leaseblock_ids = models.TextField()
    description = models.TextField(null=True, blank=True)
    geometry_actual = models.MultiPolygonField(srid=settings.GEOMETRY_DB_SRID,
        null=True, blank=True, verbose_name="Lease Block Selection Geometry")
    
    def serialize_attributes(self):
        blocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(','))

        def mean(data):
            return sum(data) / float(len(data))

        if not blocks.exists():
            return {'event': 'click',
                    'attributes': {'title': 'Number of blocks', 'data': 0},
                    'report_values': {}}

        report_values = {
            'wind-speed': {
                'min': self.reduce(min,
                       [b.min_wind_speed_rev for b in blocks], digits=3, offset=-0.125),
                'max': self.reduce(max,
                       [b.max_wind_speed_rev for b in blocks], digits=3, offset=0.125),
                'avg': self.reduce(mean,
                       [b.avg_wind_speed for b in blocks], digits=3),
                'selection_id': self.uid },

            'distance-to-substation': {
                'min': self.reduce(min,
                       [b.substation_min_distance for b in blocks], digits=0),
                'max': self.reduce(max,
                       [b.substation_max_distance for b in blocks], digits=0),
                'avg': self.reduce(mean,
                       [b.substation_mean_distance for b in blocks], digits=1),
                'selection_id': self.uid },

            'distance-to-awc': {
                'min': self.reduce(min,
                       [b.awc_min_distance for b in blocks], digits=0),
                'max': self.reduce(max,
                       [b.awc_max_distance for b in blocks], digits=0),
                'avg': self.reduce(mean,
                       [b.awc_avg_distance for b in blocks], digits=1),
                'selection_id': self.uid },

            'distance-to-shipping': {
                'min': self.reduce(min,
                       [b.tsz_min_distance for b in blocks], digits=0),
                'max': self.reduce(max,
                       [b.tsz_max_distance for b in blocks], digits=0),
                'avg': self.reduce(mean,
                       [b.tsz_mean_distance for b in blocks], digits=1),
                'selection_id': self.uid },

            'distance-to-shore': {
                'min': self.reduce(min,
                       [b.min_distance for b in blocks], digits=0),
                'max': self.reduce(max,
                       [b.max_distance for b in blocks], digits=0),
                'avg': self.reduce(mean,
                       [b.avg_distance for b in blocks], digits=1),
                'selection_id': self.uid },

            'depth': {
                # note: accounting for the issue in which max_depth
                # is actually a lesser depth than min_depth
                'min': -1 * self.reduce(max,
                       [b.max_distance for b in blocks], digits=0, handle_none=0),
                'max': -1 * self.reduce(min,
                       [b.min_distance for b in blocks], digits=0, handle_none=0),
                'avg': -1 * self.reduce(mean,
                       [b.avg_distance for b in blocks], digits=0, handle_none=0),
                'selection_id': self.uid }}

        attrs = (
            ('Average Wind Speed Range',
                '%(min)s to %(max)s m/s' % report_values['wind-speed']),
            ('Average Wind Speed',
                '%(avg)s m/s' % report_values['wind-speed']),
            ('Distance to Coastal Substation',
                '%(min)s to %(max)s miles' % report_values['distance-to-substation']),
            ('Average Distance to Coastal Substation',
                 '%(avg)s miles' % report_values['distance-to-substation']),
            ('Distance to Proposed AWC Hub',
                '%(min)s to %(max)s miles' % report_values['distance-to-awc']),
            ('Average Distance to Proposed AWC Hub',
                '%(avg)s miles' % report_values['distance-to-awc']),
            ('Distance to Ship Routing Measures',
                '%(min)s to %(max)s miles' % report_values['distance-to-shipping']),
            ('Average Distance to Ship Routing Measures',
                '%(avg)s miles' % report_values['distance-to-shipping']),
            ('Distance to Shore',
                '%(min)s to %(max)s miles' % report_values['distance-to-shore']),
            ('Average Distance to Shore',
                '%(avg)s miles' % report_values['distance-to-shore']),
            ('Depth',
                '%(min)s to %(max)s meters' % report_values['depth']),
            ('Average Depth',
                '%(avg)s meters' % report_values['depth']),
            ('Number of blocks',
                self.leaseblock_ids.count(',') + 1)
        )

        attributes = []
        for t, d in attrs:
            attributes.append({'title': t, 'data': d})

        return {'event': 'click',
                'attributes': attributes,
                'report_values': report_values}
    
    @staticmethod
    def reduce(func, data, digits=None, filter_null=True, handle_none='Unknown', offset=None):
        """
        self.reduce: LeaseBlock's custom reduce
        why not the built-in reduce()? 
            handles rounding, null values, practical defaults.

        Input:
            func : function that aggregates data to a single value
            data : list of values
        Returns: a single value or a sensible default 
                 with some presentation logic intermingled

        In the case of `None` in your list,
        you can either filter them out with `filter_null` (default) or
        you can bail and use `handle_none` 
            which either raises an exception or returns a default "null" value

        Rounding and offsetting by constant values are also handled 
        for backwards compatibility. 
        """
        if filter_null:
            # Filter the incoming data to remove "nulls"
            # remove anything that is not a number
            data = [x for x in data if isinstance(x, (int, long, float, complex))]

        # Deal with any remaining None values
        if not data or None in data:
            if isinstance(handle_none, Exception):
                # We raise the exception to be handled upstream
                raise handle_none
            else:
                # bail and return the `handle_none` object
                # used for presentation or upstream logic ("Unknown", 0 or None)
                return handle_none

        # Rounding and offsetting
        agg = func(data)
        if offset:
            agg = agg + offset
        if isinstance(digits, int):
            agg = round(agg, digits)

        return agg
    
    def run(self):
        leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(','))

        if not leaseblocks.exists():
            # We can't return False, because we'll get a collection without
            # any lease blocks, which doesn't work on the client side.
            # Throw an exception instead.
            # TODO: Make the client handle the "selection didn't work" case.
            # This is most likely because there are no lease blocks in the db.
            raise Exception("No lease blocks available with the current selection.")

        dissolved_geom = leaseblocks.aggregate(Union('geometry'))
        if dissolved_geom:
            dissolved_geom = dissolved_geom['geometry__union']
        else:
            raise Exception("No lease blocks available with the current filters.")

        if type(dissolved_geom) == MultiPolygon:
            self.geometry_actual = dissolved_geom
        else:
            self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid)
        return True  
    
    def geojson(self, srid):
        props = get_properties_json(self)
        props['absolute_url'] = self.get_absolute_url()
        json_geom = self.geometry_actual.transform(srid, clone=True).json
        return get_feature_json(json_geom, json.dumps(props))
    
    class Options:
        verbose_name = 'Lease Block Selection'
        form = 'scenarios.forms.LeaseBlockSelectionForm'
        form_template = 'selection/form.html'
Ejemplo n.º 48
0
 def footprint_geom(self):
     # TODO: Check if extent of footprint
     e = self.geo_reference
     mp = MultiPolygon(Polygon.from_bbox((e.minx, e.miny, e.maxx, e.maxy)))
     mp.srid = e.srid
     return mp
Ejemplo n.º 49
0
def map_tile(request, layer_slug, boundary_slug, tile_zoom, tile_x, tile_y, format):
    if not has_imaging_library: raise Http404("Cairo is not available.")
    
    layer = get_object_or_404(MapLayer, slug=layer_slug)
    
    # Load basic parameters.
    try:
        size = int(request.GET.get('size', '256' if format not in ('json', 'jsonp') else '64'))
        if size not in (64, 128, 256, 512, 1024): raise ValueError()
        
        srs = int(request.GET.get('srs', '3857'))
    except ValueError:
        raise Http404("Invalid parameter.")
        
    db_srs, out_srs = get_srs(srs)
    
    # Get the bounding box for the tile, in the SRS of the output.
    
    try:
        tile_x = int(tile_x)
        tile_y = int(tile_y)
        tile_zoom = int(tile_zoom)
    except ValueError:
        raise Http404("Invalid parameter.")
    
    # Guess the world size. We need to know the size of the world in
    # order to locate the bounding box of any viewport at zoom levels
    # greater than zero.
    if "radius" not in request.GET:
        p = Point( (-90.0, 0.0), srid=db_srs.srid )
        p.transform(out_srs)
        world_left = p[0]*2
        world_top = -world_left
        world_size = -p[0] * 4.0
    else:
        p = Point((0,0), srid=out_srs.srid )
        p.transform(db_srs)
        p1 = Point([p[0] + 1.0, p[1] + 1.0], srid=db_srs.srid)
        p.transform(out_srs)
        p1.transform(out_srs)
        world_size = math.sqrt(abs(p1[0]-p[0])*abs(p1[1]-p[1])) * float(request.GET.get('radius', '50'))
        world_left = p[0] - world_size/2.0
        world_top = p[1] + world_size/2.0
    tile_world_size = world_size / math.pow(2.0, tile_zoom)

    p1 = Point( (world_left + tile_world_size*tile_x, world_top - tile_world_size*tile_y) )
    p2 = Point( (world_left + tile_world_size*(tile_x+1), world_top - tile_world_size*(tile_y+1)) )
    bbox = Polygon( ((p1[0], p1[1]),(p2[0], p1[1]),(p2[0], p2[1]),(p1[0], p2[1]),(p1[0], p1[1])), srid=out_srs.srid )
    
    # A function to convert world coordinates in the output SRS into
    # pixel coordinates.
       
    blon1, blat1, blon2, blat2 = bbox.extent
    bx = float(size)/(blon2-blon1)
    by = float(size)/(blat2-blat1)
    def viewport(coord):
        # Convert the world coordinates to image coordinates according to the bounding box
        # (in output SRS).
        return float(coord[0] - blon1)*bx, (size-1) - float(coord[1] - blat1)*by

    # Convert the bounding box to the database SRS.

    db_bbox = bbox.transform(db_srs, clone=True)
    
    # What is the width of a pixel in the database SRS? If it is smaller than
    # SIMPLE_SHAPE_TOLERANCE, load the simplified geometry from the database.
    
    shape_field = 'shape'
    pixel_width = (db_bbox.extent[2]-db_bbox.extent[0]) / size / 2
    if pixel_width > boundaries_settings.SIMPLE_SHAPE_TOLERANCE:
        shape_field = 'simple_shape'

    # Query for any boundaries that intersect the bounding box.
    
    boundaries = Boundary.objects.filter(set=layer.boundaryset, shape__intersects=db_bbox)\
        .values("id", "slug", "name", "label_point", shape_field)
    if boundary_slug: boundaries = boundaries.filter(slug=boundary_slug)
    boundary_id_map = dict( (b["id"], b) for b in boundaries )
    
    if len(boundaries) == 0:
        if format == "svg":
            raise Http404("No boundaries here.")
        elif format in ("png", "gif"):
            # Send a 1x1 transparent image. Google is OK getting 404s for map tile images
            # but OpenLayers isn't. Maybe cache the image?
            im = cairo.ImageSurface(cairo.FORMAT_ARGB32, 1, 1)
            ctx = cairo.Context(im)
            buf = StringIO()
            im.write_to_png(buf)
            v = buf.getvalue()
            if format == "gif": v = convert_png_to_gif(v)
            r = HttpResponse(v, content_type='image/' + format)
            r["Content-Length"] = len(v)
            return r
        elif format == "json":
            # Send an empty "UTF-8 Grid"-like response.
            return HttpResponse('{"error":"nothing-here"}', content_type="application/json")
        elif format == "jsonp":
            # Send an empty "UTF-8 Grid"-like response.
            return HttpResponse(request.GET.get("callback", "callback") +  '({"error":"nothing-here"})', content_type="text/javascript")
    
    # Query for layer style information and then set it on the boundary objects.
    
    styles = layer.boundaries.filter(boundary__in=boundary_id_map.keys())
    for style in styles:
        boundary_id_map[style.boundary_id]["style"] = style
    
    # Create the image buffer.
    if format in ('png', 'gif'):
        im = cairo.ImageSurface(cairo.FORMAT_ARGB32, size, size)
    elif format == 'svg':
        buf = StringIO()
        im = cairo.SVGSurface(buf, size, size)
    elif format in ('json', 'jsonp'):
        # This is going to be a "UTF-8 Grid"-like response, but we generate that
        # info by first creating an actual image, with colors coded by index to
        # represent which boundary covers which pixels.
        im = cairo.ImageSurface(cairo.FORMAT_RGB24, size, size)

	# Color helpers.
    def get_rgba_component(c):
        return c if isinstance(c, float) else c/255.0
    def get_rgba_tuple(clr, alpha=.25):
        # Colors are specified as tuples/lists with 3 (RGB) or 4 (RGBA)
        # components. Components that are float values must be in the
        # range 0-1, while all other values are in the range 0-255.
        # Because .gif does not support partial transparency, alpha values
        # are forced to 1.
        return (get_rgba_component(clr[0]), get_rgba_component(clr[1]), get_rgba_component(clr[2]),
            get_rgba_component(clr[3]) if len(clr) == 4 and format != 'gif' else (alpha if format != 'gif' else 1.0))

    # Create the drawing surface.
    ctx = cairo.Context(im)
    ctx.select_font_face(maps_settings.MAP_LABEL_FONT, cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
    
    if format in ('json', 'jsonp'):
        # For the UTF-8 Grid response, turn off anti-aliasing since the color we draw to each pixel
        # is a code for what is there.
        ctx.set_antialias(cairo.ANTIALIAS_NONE)
    
    def max_extent(shape):
        a, b, c, d = shape.extent
        return max(c-a, d-b)
    
    # Transform the boundaries to output coordinates.
    draw_shapes = []
    for bdry in boundaries:
        if not "style" in bdry: continue # Boundary had no corresponding MapLayerBoundary
        
        shape = bdry[shape_field]
        
        # Simplify to the detail that could be visible in the output. Although
        # simplification may be a little expensive, drawing a more complex
        # polygon is even worse.
        try:
            shape = shape.simplify(pixel_width, preserve_topology=True)
        except: # GEOSException
            pass # try drawing original
        
        # Make sure the results are all MultiPolygons for consistency.
        if shape.__class__.__name__ == 'Polygon':
            shape = MultiPolygon((shape,), srid=db_srs.srid)
        else:
            # Be sure to override SRS (for Google, see above). This code may
            # never execute?
            shape = MultiPolygon(list(shape), srid=db_srs.srid)

        # Is this shape too small to be visible?
        ext_dim = max_extent(shape)
        if ext_dim < pixel_width:
            continue

        # Convert the shape to the output SRS.
        shape.transform(out_srs)
        
        draw_shapes.append( (len(draw_shapes), bdry, shape, ext_dim) )
        
    # Draw shading, for each linear ring of each polygon in the multipolygon.
    for i, bdry, shape, ext_dim in draw_shapes:
        if not bdry["style"].color and format not in ('json', 'jsonp'): continue
        for polygon in shape:
            for ring in polygon: # should just be one since no shape should have holes?
                color = bdry["style"].color
                
                if format in ('json', 'jsonp'):
                    # We're returning a "UTF-8 Grid" indicating which feature is at
                    # each pixel location on the grid. In order to compute the grid,
                    # we draw to an image surface with a distinct color for each feature.
                    # Then we convert the pixel data into the UTF-8 Grid format.
                    ctx.set_source_rgb(*[ (((i+1)/(256**exp)) % 256)/255.0 for exp in xrange(3) ])
                
                elif isinstance(color, (tuple, list)):
                    # Specify a 3/4-tuple (or list) for a solid color.
                    ctx.set_source_rgba(*get_rgba_tuple(color))
                        
                elif isinstance(color, dict):
                    # Specify a dict of the form { "color1": (R,G,B), "color2": (R,G,B) } to
                    # create a solid fill of color1 plus smaller stripes of color2.
                    if color.get("color", None) != None:
                        ctx.set_source_rgba(*get_rgba_tuple(color["color"]))
                    elif color.get("color1", None) != None and color.get("color2", None) != None:
                        pat = cairo.LinearGradient(0.0, 0.0, size, size)
                        for x in xrange(0,size, 32): # divisor of the size so gradient ends at the end
                            pat.add_color_stop_rgba(*([float(x)/size] + list(get_rgba_tuple(color["color1"], alpha=.3))))
                            pat.add_color_stop_rgba(*([float(x+28)/size] + list(get_rgba_tuple(color["color1"], alpha=.3))))
                            pat.add_color_stop_rgba(*([float(x+28)/size] + list(get_rgba_tuple(color["color2"], alpha=.4))))
                            pat.add_color_stop_rgba(*([float(x+32)/size] + list(get_rgba_tuple(color["color2"], alpha=.4))))
                        ctx.set_source(pat)
                    else:
                        continue # skip fill
                else:
                    continue # Unknown color data structure.
                ctx.new_path()
                for pt in ring.coords:
                    ctx.line_to(*viewport(pt))
                ctx.fill()
                
    # Draw outlines, for each linear ring of each polygon in the multipolygon.
    for i, bdry, shape, ext_dim in draw_shapes:
        if format in ('json', 'jsonp'): continue
        if ext_dim < pixel_width * 3: continue # skip outlines if too small
        color = bdry["style"].color
        for polygon in shape:
            for ring in polygon: # should just be one since no shape should have holes?
                ctx.new_path()
                for pt in ring.coords:
                    ctx.line_to(*viewport(pt))
                    
                if not isinstance(color, dict) or not "border" in color or not "width" in color["border"]:
                    if ext_dim < pixel_width * 60:
                        ctx.set_line_width(1)
                    else:
                        ctx.set_line_width(2.5)
                else:
                    ctx.set_line_width(color["border"]["width"])
                    
                if not isinstance(color, dict) or not "border" in color or not "color" in color["border"]:
                    ctx.set_source_rgba(.3,.3,.3, .75)  # grey, semi-transparent
                else:
                    ctx.set_source_rgba(*get_rgba_tuple(color["border"]["color"], alpha=.75))
                ctx.stroke_preserve()
                
    # Draw labels.
    for i, bdry, shape, ext_dim in draw_shapes:
        if format in ('json', 'jsonp'): continue
        if ext_dim < pixel_width * 20: continue
        
        color = bdry["style"].color
        if isinstance(color, dict) and "label" in color and color["label"] == None: continue
        
        # Get the location of the label stored in the database, or fall back to
        # GDAL routine point_on_surface to get a point quickly.
        if bdry["style"].label_point:
            # Override the SRS on the point (for Google, see above). Then transform
            # it to world coordinates.
            pt = Point(tuple(bdry["style"].label_point), srid=db_srs.srid)
            pt.transform(out_srs)
        elif bdry["label_point"]:
            # Same transformation as above.
            pt = Point(tuple(bdry["label_point"]), srid=db_srs.srid)
            pt.transform(out_srs)
        else:
            # No label_point is specified so try to find one by using the
            # point_on_surface to find a point that is in the shape and
            # in the viewport's bounding box.
            try:
                pt = bbox.intersection(shape).point_on_surface
            except:
                # Don't know why this would fail. Bad geometry of some sort.
                # But we really don't want to leave anything unlabeled so
                # try the center of the bounding box.
                pt = bbox.centroid
                if not shape.contains(pt):
                    continue
        
        # Transform to world coordinates and ensure it is within the bounding box.
        if not bbox.contains(pt):
            # If it's not in the bounding box and the shape occupies most of this
            # bounding box, try moving the point to somewhere in the current tile.
            try:
                inters = bbox.intersection(shape)
                if inters.area < bbox.area/3: continue
                pt = inters.point_on_surface
            except:
                continue
        pt = viewport(pt)
        
        txt = bdry["name"]
        if isinstance(bdry["style"].metadata, dict): txt = bdry["style"].metadata.get("label", txt)
        if ext_dim > size * pixel_width:
            ctx.set_font_size(18)
        else:
            ctx.set_font_size(12)
        x_off, y_off, tw, th = ctx.text_extents(txt)[:4]
        
        # Is it within the rough bounds of the shape and definitely the bounds of this tile?
        if tw < ext_dim/pixel_width/5 and th < ext_dim/pixel_width/5 \
            and pt[0]-x_off-tw/2-4 > 0 and pt[1]-th-4 > 0 and pt[0]-x_off+tw/2+7 < size and pt[1]+6 < size:
            # Draw the background rectangle behind the text.
            ctx.set_source_rgba(0,0,0,.55)  # black, some transparency
            ctx.new_path()
            ctx.line_to(pt[0]-x_off-tw/2-4,pt[1]-th-4)
            ctx.rel_line_to(tw+9, 0)
            ctx.rel_line_to(0, +th+8)
            ctx.rel_line_to(-tw-9, 0)
            ctx.fill()
            
            # Now a drop shadow (also is partially behind the first rectangle).
            ctx.set_source_rgba(0,0,0,.3)  # black, some transparency
            ctx.new_path()
            ctx.line_to(pt[0]-x_off-tw/2-4,pt[1]-th-4)
            ctx.rel_line_to(tw+11, 0)
            ctx.rel_line_to(0, +th+10)
            ctx.rel_line_to(-tw-11, 0)
            ctx.fill()
            
            # Draw the text.
            ctx.set_source_rgba(1,1,1,1)  # white
            ctx.move_to(pt[0]-x_off-tw/2,pt[1])
            ctx.show_text(txt)
                
    if format in ("png", "gif"):
        # Convert the image buffer to raw bytes.
        buf = StringIO()
        im.write_to_png(buf)
        v = buf.getvalue()
        if format == "gif": v = convert_png_to_gif(v)
        
        # Form the response.
        r = HttpResponse(v, content_type='image/' + format)
        r["Content-Length"] = len(v)
    
    elif format == "svg":
        im.finish()
        v = buf.getvalue()
        r = HttpResponse(v, content_type='image/svg+xml')
        r["Content-Length"] = len(v)
    
    elif format in ('json', 'jsonp'):
        # Get the bytes, which are RGBA sequences.
        buf1 = list(im.get_data())
        
        # Convert the 4-byte sequences back into integers that refer back to
        # the boundary list. Count the number of pixels for each shape.
        shapeidx = []
        shapecount = { }
        for i in xrange(0, size*size):
            b = ord(buf1[i*4+2])*(256**0) + ord(buf1[i*4+1])*(256**1) + ord(buf1[i*4+0])*(256**2)
            shapeidx.append(b)
            if b > 0: shapecount[b] = shapecount.get(b, 0) + 1
            
        # Assign low unicode code points to the most frequently occuring pixel values,
        # except always map zero to character 32.
        shapecode1 = { }
        shapecode2 = { }
        for k, count in sorted(shapecount.items(), key = lambda kv : kv[1]):
            b = len(shapecode1) + 32 + 1
            if b >= 34: b += 1
            if b >= 92: b += 1
            shapecode1[k] = b
            shapecode2[b] = draw_shapes[k-1]
            
        buf = ''
        if format == 'jsonp': buf += request.GET.get("callback", "callback") + "(\n"
        buf += '{"grid":['
        for row in xrange(size):
            if row > 0: buf += ",\n         "
            buf += json.dumps(u"".join(unichr(shapecode1[k] if k != 0 else 32) for k in shapeidx[row*size:(row+1)*size]))
        buf += "],\n"
        buf += ' "keys":' + json.dumps([""] + [shapecode2[k][1]["slug"] for k in sorted(shapecode2)], separators=(',', ':')) + ",\n"
        buf += ' "data":' + json.dumps(dict( 
                    (shapecode2[k][1]["slug"], {
                            "name": shapecode2[k][1]["name"],
                    })
                    for k in sorted(shapecode2)), separators=(',', ':'))
        buf += "}"
        if format == 'jsonp': buf += ")"
        
        if format == "json":
            r = HttpResponse(buf, content_type='application/json')
        else:
            r = HttpResponse(buf, content_type='text/javascript')
    
    return r
Ejemplo n.º 50
0
class LeaseBlockSelection(Analysis):
    #input_a = models.IntegerField()
    #input_b = models.IntegerField()
    #output_sum = models.IntegerField(blank=True, null=True)
    leaseblock_ids = models.TextField()
    description = models.TextField(null=True, blank=True)
    #leaseblocks = models.ManyToManyField("LeaseBlock", null=True, blank=True)
    geometry_actual = models.MultiPolygonField(srid=settings.GEOMETRY_DB_SRID, null=True, blank=True, verbose_name="Lease Block Selection Geometry")
    
    @property
    def serialize_attributes(self):
        from general.utils import format
        attributes = []
        report_values = {}
        leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(','))
        if (len(leaseblocks) > 0): 
            #get wind speed range
            try:
                min_wind_speed = format(self.get_min_wind_speed(leaseblocks),3)
                max_wind_speed = format(self.get_max_wind_speed(leaseblocks),3)
                wind_speed_range = '%s to %s m/s' %(min_wind_speed, max_wind_speed)
            except:
                min_wind_speed = 'Unknown'
                max_wind_speed = 'Unknown'
                wind_speed_range = 'Unknown'
            attributes.append({'title': 'Average Wind Speed Range', 'data': wind_speed_range})
            try:
                avg_wind_speed = format(self.get_avg_wind_speed(leaseblocks),3)
                avg_wind_speed_output = '%s m/s' %avg_wind_speed
            except:
                avg_wind_speed = 'Unknown'
                avg_wind_speed_output = 'Unknown'
            attributes.append({'title': 'Average Wind Speed', 'data': avg_wind_speed_output})
            report_values['wind-speed'] = {'min': min_wind_speed, 'max': max_wind_speed, 'avg': avg_wind_speed, 'selection_id': self.uid}
            
            #get distance to coastal substation
            min_distance_to_substation = format(self.get_min_distance_to_substation(leaseblocks), 0)
            max_distance_to_substation = format(self.get_max_distance_to_substation(leaseblocks), 0)
            distance_to_substation_range = '%s to %s miles' %(min_distance_to_substation, max_distance_to_substation)
            attributes.append({'title': 'Distance to Coastal Substation', 'data': distance_to_substation_range})
            avg_distance_to_substation = format(self.get_avg_distance_to_substation(leaseblocks), 1)
            avg_distance_to_substation_output = '%s miles' %avg_distance_to_substation
            attributes.append({'title': 'Average Distance to Coastal Substation', 'data': avg_distance_to_substation_output})
            report_values['distance-to-substation'] = {'min': min_distance_to_substation, 'max': max_distance_to_substation, 'avg': avg_distance_to_substation, 'selection_id': self.uid}
                        
            #get distance to awc range
            min_distance_to_awc = format(self.get_min_distance_to_awc(leaseblocks), 0)
            max_distance_to_awc = format(self.get_max_distance_to_awc(leaseblocks), 0)
            distance_to_awc_range = '%s to %s miles' %(min_distance_to_awc, max_distance_to_awc)
            attributes.append({'title': 'Distance to Proposed AWC Hub', 'data': distance_to_awc_range})
            avg_distance_to_awc = format(self.get_avg_distance_to_awc(leaseblocks), 1)
            avg_distance_to_awc_output = '%s miles' %avg_distance_to_awc
            attributes.append({'title': 'Average Distance to Proposed AWC Hub', 'data': avg_distance_to_awc_output})
            report_values['distance-to-awc'] = {'min': min_distance_to_awc, 'max': max_distance_to_awc, 'avg': avg_distance_to_awc, 'selection_id': self.uid}
            
            #get distance to shipping lanes
            min_distance_to_shipping = format(self.get_min_distance_to_shipping(leaseblocks), 0)
            max_distance_to_shipping = format(self.get_max_distance_to_shipping(leaseblocks), 0)
            miles_to_shipping = '%s to %s miles' %(min_distance_to_shipping, max_distance_to_shipping)
            attributes.append({'title': 'Distance to Shipping Lanes', 'data': miles_to_shipping})
            avg_distance_to_shipping = format(self.get_avg_distance_to_shipping(leaseblocks),1)
            avg_distance_to_shipping_output = '%s miles' %avg_distance_to_shipping
            attributes.append({'title': 'Average Distance to Shipping Lanes', 'data': avg_distance_to_shipping_output})
            report_values['distance-to-shipping'] = {'min': min_distance_to_shipping, 'max': max_distance_to_shipping, 'avg': avg_distance_to_shipping, 'selection_id': self.uid}
            
            #get distance to shore range
            min_distance = format(self.get_min_distance(leaseblocks), 0)
            max_distance = format(self.get_max_distance(leaseblocks), 0)
            distance_to_shore = '%s to %s miles' %(min_distance, max_distance)
            attributes.append({'title': 'Distance to Shore', 'data': distance_to_shore})
            avg_distance = format(self.get_avg_distance(leaseblocks),1)
            avg_distance_output = '%s miles' %avg_distance
            attributes.append({'title': 'Average Distance to Shore', 'data': avg_distance_output})
            report_values['distance-to-shore'] = {'min': min_distance, 'max': max_distance, 'avg': avg_distance, 'selection_id': self.uid}
            
            #get depth range
            min_depth = format(self.get_min_depth(leaseblocks), 0)
            max_depth = format(self.get_max_depth(leaseblocks), 0)
            depth_range = '%s to %s meters' %(min_depth, max_depth)
            if min_depth == 0 or max_depth == 0:
                depth_range = 'Unknown'
            attributes.append({'title': 'Depth', 'data': depth_range})
            avg_depth = format(self.get_avg_depth(leaseblocks), 0)
            avg_depth_output = '%s meters' %avg_depth
            if avg_depth == 0:
                avg_depth_output = 'Unknown'
            attributes.append({'title': 'Average Depth', 'data': avg_depth_output})
            report_values['depth'] = {'min': min_depth, 'max': max_depth, 'avg': avg_depth, 'selection_id': self.uid}
            '''
            if self.input_filter_ais_density:
                attributes.append({'title': 'Excluding Areas with High Ship Traffic', 'data': ''})
            '''    
                
            attributes.append({'title': 'Number of Leaseblocks', 'data': self.leaseblock_ids.count(',')+1})
        else:
            attributes.append({'title': 'Number of Leaseblocks', 'data': 0})
        return { 'event': 'click', 'attributes': attributes, 'report_values': report_values }
    
    def get_min_wind_speed(self, leaseblocks):
        min_wind_speed = leaseblocks[0].min_wind_speed_rev
        for lb in leaseblocks:
            if lb.min_wind_speed_rev < min_wind_speed:
                min_wind_speed = lb.min_wind_speed_rev
        return min_wind_speed - .125
                
    def get_max_wind_speed(self, leaseblocks):
        max_wind_speed = leaseblocks[0].max_wind_speed_rev
        for lb in leaseblocks:
            if lb.max_wind_speed_rev > max_wind_speed:
                max_wind_speed = lb.max_wind_speed_rev
        return max_wind_speed + .125
          
    def get_avg_wind_speed(self, leaseblocks):
        total = 0
        for lb in leaseblocks:
            total += lb.min_wind_speed_rev
            total += lb.max_wind_speed_rev
        if total > 0:
            return total / (len(leaseblocks) * 2)
        else:
            return 0
    
    def get_min_distance(self, leaseblocks): 
        min_distance = leaseblocks[0].min_distance
        for lb in leaseblocks:
            if lb.min_distance < min_distance:
                min_distance = lb.min_distance
        return min_distance 
    
    def get_max_distance(self, leaseblocks):
        max_distance = leaseblocks[0].max_distance
        for lb in leaseblocks:
            if lb.max_distance > max_distance:
                max_distance = lb.max_distance
        return max_distance 
          
    def get_avg_distance(self, leaseblocks):
        total = 0
        for lb in leaseblocks:
            total += lb.avg_distance
        if total > 0:
            return total / (len(leaseblocks))
        else:
            return 0
    
    # note: accounting for the issue in which min_depth is actually a greater depth than max_depth 
    def get_max_depth(self, leaseblocks): 
        min_depth = leaseblocks[0].min_depth
        for lb in leaseblocks:
            if lb.min_depth < min_depth:
                min_depth = lb.min_depth
        return -min_depth
    
    # note: accounting for the issue in which max_depth is actually a lesser depth than min_depth
    def get_min_depth(self, leaseblocks):
        max_depth = leaseblocks[0].max_depth
        for lb in leaseblocks:
            if lb.max_depth > max_depth:
                max_depth = lb.max_depth
        return -max_depth
          
    def get_avg_depth(self, leaseblocks):
        total = 0
        for lb in leaseblocks:
            total += lb.avg_depth
        if total != 0:
            avg = -total / (len(leaseblocks))
            return avg
        else:
            return 0
    
    def get_min_distance_to_substation(self, leaseblocks): 
        substation_min_distance = leaseblocks[0].substation_min_distance
        for lb in leaseblocks:
            if lb.substation_min_distance < substation_min_distance:
                substation_min_distance = lb.substation_min_distance
        return substation_min_distance
    
    def get_max_distance_to_substation(self, leaseblocks):
        substation_max_distance = leaseblocks[0].substation_max_distance
        for lb in leaseblocks:
            if lb.substation_max_distance > substation_max_distance:
                substation_max_distance = lb.substation_max_distance
        return substation_max_distance
          
    def get_avg_distance_to_substation(self, leaseblocks):
        total = 0
        for lb in leaseblocks:
            total += lb.substation_mean_distance
        if total != 0:
            avg = total / len(leaseblocks)
            return avg
        else:
            return 0
    
    def get_min_distance_to_awc(self, leaseblocks): 
        awc_min_distance = leaseblocks[0].awc_min_distance
        for lb in leaseblocks:
            if lb.awc_min_distance < awc_min_distance:
                awc_min_distance = lb.awc_min_distance
        return awc_min_distance
    
    def get_max_distance_to_awc(self, leaseblocks):
        awc_max_distance = leaseblocks[0].awc_max_distance
        for lb in leaseblocks:
            if lb.awc_max_distance > awc_max_distance:
                awc_max_distance = lb.awc_max_distance
        return awc_max_distance
          
    def get_avg_distance_to_awc(self, leaseblocks):
        total = 0
        for lb in leaseblocks:
            total += lb.awc_avg_distance
        if total != 0:
            avg = total / len(leaseblocks)
            return avg
        else:
            return 0
    
    def get_min_distance_to_shipping(self, leaseblocks): 
        tsz_min_distance = leaseblocks[0].tsz_min_distance
        for lb in leaseblocks:
            if lb.tsz_min_distance < tsz_min_distance:
                tsz_min_distance = lb.tsz_min_distance
        return tsz_min_distance
    
    def get_max_distance_to_shipping(self, leaseblocks):
        tsz_max_distance = leaseblocks[0].tsz_max_distance
        for lb in leaseblocks:
            if lb.tsz_max_distance > tsz_max_distance:
                tsz_max_distance = lb.tsz_max_distance
        return tsz_max_distance
          
    def get_avg_distance_to_shipping(self, leaseblocks):
        total = 0
        for lb in leaseblocks:
            total += lb.tsz_mean_distance
        if total != 0:
            return total / len(leaseblocks)
        else:
            return 0
    
    
    def run(self):
        leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(','))
        leaseblock_geoms = [lb.geometry for lb in leaseblocks]
        
        from django.contrib.gis.geos import MultiPolygon
        dissolved_geom = leaseblock_geoms[0]
        for geom in leaseblock_geoms:
            try:
                dissolved_geom = dissolved_geom.union(geom)
            except:
                pass
        
        if type(dissolved_geom) == MultiPolygon:
            self.geometry_actual = dissolved_geom
        else:
            self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid)
        return True  
    
    def geojson(self, srid):
        props = get_properties_json(self)
        props['absolute_url'] = self.get_absolute_url()
        json_geom = self.geometry_actual.transform(srid, clone=True).json
        return get_feature_json(json_geom, json.dumps(props))
    
    class Options:
        verbose_name = 'Lease Block Selection'
        form = 'scenarios.forms.LeaseBlockSelectionForm'
        form_template = 'selection/form.html'
Ejemplo n.º 51
0
    def post(self, request, *args, **kwargs):
        if request.is_ajax:
            cur_shp_id = kwargs['pk']
            cur_shp = Shapefile.objects.get(id=cur_shp_id)

            # check if values for ditches and compounds have already been
            # calculated; if not, raise a warning in the interface
            if cur_shp.stat_comp_acc:
                pass
            else:
                # iterate on all open compounds
                for compound in cur_shp.helpercompoundsarea_set \
                        .filter(type='compound', open=True):
                    # get sides and relative lengths as dictionary
                    sides = get_side_dict(compound, 3857)
                    # get longest side in area polygon as a LineString
                    access_linestr = max(sides, key=sides.get)

                    # get access lenght as projected value
                    proj_access_linestr = access_linestr
                    proj_access_linestr.transform(cur_shp.proj)

                    # get the centroid of the access side
                    feature_centroid = compound.poly.centroid

                    # get compound's farthest point from centroid
                    max_point = Point(compound.poly.convex_hull.extent[2],
                                      compound.poly.convex_hull.extent[3],
                                      srid=3857)
                    radius = max_point.distance(feature_centroid)

                    # draw cardinal points around the compound every 45 degree,
                    # and rotate them by 12 degree to align perpendicularly to N
                    cardinal_pts = get_round_vertex(
                        45,
                        radius,
                        feature_centroid.x,
                        feature_centroid.y,
                        3857,
                        12)

                    # create "cake slices" using cardinal points
                    polygon_list = []
                    for i, item in enumerate(cardinal_pts):
                        points = (feature_centroid.coords,
                                  item.coords,
                                  cardinal_pts[i - 1].coords,
                                  feature_centroid.coords)
                        polygon_list.append(Polygon(points, srid=3857))
                    sectors = MultiPolygon(polygon_list, srid=3857)

                    # get access side centroid
                    access_centroid = access_linestr.centroid
                    access_centroid.transform(3857)

                    # find sector containing the access centroid; get direction
                    for sector in sectors:
                        if sector.contains(access_centroid):
                            direction = sectors.index(sector)
                        else:
                            pass

                    # save the access LineString in a separate table
                    new_compound_access = HelperCompoundsAccess(
                        shapefile_id=cur_shp_id,
                        comp=compound,
                        poly=access_linestr,
                        length=proj_access_linestr.length,
                        orientation=direction
                    )
                    new_compound_access.save()

                cur_shp.stat_comp_acc = True
                cur_shp.save()

            results = context_results(cur_shp_id)
            return render_to_response(self.template_name, {'context': results})
Ejemplo n.º 52
0
    def _import_division(self, muni, div, type_obj, syncher, parent_dict, feat):
        #
        # Geometry
        #
        geom = feat.geom
        if not geom.srid:
            geom.srid = GK25_SRID
        if geom.srid != PROJECTION_SRID:
            ct = CoordTransform(SpatialReference(geom.srid), SpatialReference(PROJECTION_SRID))
            geom.transform(ct)
        # geom = geom.geos.intersection(parent.geometry.boundary)
        geom = geom.geos
        if geom.geom_type == 'Polygon':
            geom = MultiPolygon(geom, srid=geom.srid)

        #
        # Attributes
        #
        attr_dict = {}
        lang_dict = {}
        for attr, field in div['fields'].items():
            if isinstance(field, dict):
                # Languages
                d = {}
                for lang, field_name in field.items():
                    val = feat[field_name].as_string()
                    # If the name is in all caps, fix capitalization.
                    if not re.search('[a-z]', val):
                        val = val.title()
                    d[lang] = val.strip()
                lang_dict[attr] = d
            else:
                val = feat[field].as_string()
                attr_dict[attr] = val.strip()

        origin_id = attr_dict['origin_id']
        del attr_dict['origin_id']

        if 'parent' in div:
            if 'parent_id' in attr_dict:
                parent = parent_dict[attr_dict['parent_id']]
                del attr_dict['parent_id']
            else:
                # If no parent id is available, we determine the parent
                # heuristically by choosing the one that we overlap with.
                parents = []
                for parent in parent_dict.values():
                    parent_geom = parent.geometry.boundary
                    if not geom.intersects(parent_geom):
                        continue
                    area = (geom - parent.geometry.boundary).area
                    if area > 1e-6:
                        continue
                    parents.append(parent)
                parents = sorted(parents)
                if not parents:
                    raise Exception("No parent found for %s" % origin_id)
                elif len(parents) > 1:
                    raise Exception("Too many parents for %s" % origin_id)
                parent = parents[0]
        elif 'parent_ocd_id' in div:
            try:
                parent = AdministrativeDivision.objects.get(ocd_id=div['parent_ocd_id'])
            except AdministrativeDivision.DoesNotExist:
                parent = None
        else:
            parent = muni.division

        if 'parent' in div and parent:
            full_id = "%s-%s" % (parent.origin_id, origin_id)
        else:
            full_id = origin_id
        obj = syncher.get(full_id)
        if not obj:
            obj = AdministrativeDivision(origin_id=origin_id, type=type_obj)

        validity_time_period = div.get('validity')
        if validity_time_period:
            obj.start = validity_time_period.get('start')
            obj.end = validity_time_period.get('end')
            if obj.start:
                obj.start = datetime.strptime(obj.start, '%Y-%m-%d').date()
            if obj.end:
                obj.end = datetime.strptime(obj.end, '%Y-%m-%d').date()

        if div.get('no_parent_division', False):
            muni = None

        obj.parent = parent
        obj.municipality = muni

        for attr in attr_dict.keys():
            setattr(obj, attr, attr_dict[attr])
        for attr in lang_dict.keys():
            for lang, val in lang_dict[attr].items():
                obj.set_current_language(lang)
                setattr(obj, attr, val)

        if 'ocd_id' in div:
            assert (parent and parent.ocd_id) or 'parent_ocd_id' in div
            if parent:
                if div.get('parent_in_ocd_id', False):
                    args = {'parent': parent.ocd_id}
                else:
                    args = {'parent': muni.division.ocd_id}
            else:
                args = {'parent': div['parent_ocd_id']}
            val = attr_dict['ocd_id']
            args[div['ocd_id']] = val
            obj.ocd_id = ocd.make_id(**args)
            self.logger.debug("%s" % obj.ocd_id)
        obj.save()
        syncher.mark(obj)

        try:
            geom_obj = obj.geometry
        except AdministrativeDivisionGeometry.DoesNotExist:
            geom_obj = AdministrativeDivisionGeometry(division=obj)

        geom_obj.boundary = geom
        geom_obj.save()