Beispiel #1
0
    def handle(self, *args, **options):
        fname = options.get('file', None)
        if not fname:
            print 'filenaam ontbreekt'
            return
        project = ProjectLocatie.objects.get(pk=1) # Texel
        user = User.objects.get(pk=1) # admin

        meetpunten = set()
        with open(fname) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                waarnemer_id = row['wnid']
                submitter = row['waarnemer']
                sample_id= row['locatie']
                lon = float(row['lon'])
                lat = float(row['lat'])
                oms = row['omschrijving']
                diep = row['boven/onder']
                ec = float(row['EC'])

                if diep == 'b': # boven
                    diep = 'Ondiep'
                elif diep in ('d', 'o'): # onder of diep
                    diep = 'Diep'
                date = parser.parse(row['datetime'])

                location = Point(lon,lat,srid=4326)
                location.transform(28992)
                name = 'MP%s.%s' % (waarnemer_id, sample_id)
                try:
                    waarnemer = Waarnemer.objects.get(pk=waarnemer_id)
                    waarnemer.akvoname = submitter.lower()
                    waarnemer.save()
                    meetpunt = waarnemer.meetpunt_set.get(name=name)
                    meetpunt.identifier = uuid.uuid4()
                    meetpunt.location = location
                    meetpunt.description = oms
                    meetpunt.save()
                except Waarnemer.DoesNotExist:
                    logging.error('Waarnemer niet gevonden: %d' % waarnemer_id)
                    continue
                except Meetpunt.DoesNotExist:
                    meetpunt=waarnemer.meetpunt_set.create(name=name,projectlocatie=project,location=location,description=oms)
                    logger.info('Meetpunt {mp} aangemaakt.'.format(mp=meetpunt))
                    
                waarnemingen = meetpunt.waarneming_set.filter(naam='EC_'+diep,datum=date)
                if waarnemingen.exists():
                    logger.warn('EC waarnemingen worden vervangen voor {locatie}, datum={date}'.format(locatie=meetpunt.name,date=date)) 
                    waarnemingen.delete()
                meetpunt.waarneming_set.create(naam='EC_'+diep if diep else 'EC', waarnemer=waarnemer, datum=date, device='acacia', waarde=ec, opmerking='', eenheid='uS/cm' )
                logger.debug('EC_{diep}, {date}, EC={ec}'.format(diep=diep, date=date, ec=ec))

                meetpunten.add(meetpunt)
                
        updateSeries(meetpunten, user)                
        
        #cartodb = CartoDb.objects.get(name='Texel Meet')
        #cartodb.runsql('DELETE FROM waarnemingen')
        #updateCartodb(cartodb, meetpunten)        
Beispiel #2
0
def geodata(request):
	if request.method=='GET':
		try:
        		x=float(request.GET['x'])
        		y=float(request.GET['y'])
        		p=float(request.GET['p'])
        		pt= Point(x,y,srid=4326)
        		pt.transform(32628)
		except Exception:
        		pass
		geojson = Serializer().serialize(
		#Lampadaire.objects.all()[:100],
		Lampadaire.objects.filter(geom__dwithin=(pt,D(m=p) ) ) )
		return HttpResponse(
			geojson,
        		content_type="application/json"
    		)
	elif request.method == 'POST':
		#s=request.POST.get['statut']
		s=request.POST['statut']
		id_lampe=float(request.POST['id'])
		sama_lampe=get_object_or_404(Lampadaire, gid=id_lampe)
		sama_lampe.states=s
		sama_lampe.save()
		geojson = Serializer().serialize(Lampadaire.objects.filter(gid=id_lampe))
		return HttpResponse(
			geojson,
        		content_type="application/json"
    		)
Beispiel #3
0
def query(request):
    x = float(request.REQUEST['x'])
    y = float(request.REQUEST['y'])
    pnt = GEOSPoint(x,y,srid=4326)

    # Query all the vector polygon layers
    polygons = Feature.objects.filter(geom__bboverlaps=pnt).filter(geom__intersects=pnt)
    results = {}
    for poly in polygons:
        attribs = Attribute.objects.filter(feature=poly.pk)
        for attrib in attribs:
            results['%s :: %s' % (poly.layer, attrib.key)] = attrib.value
 
    # Query all the raster layers
    rasters = Raster.objects.all()
    for rast in rasters:
        ds = gdal.Open(str(rast.filepath))
        numbands = ds.RasterCount
        srs = SpatialReference(ds.GetProjection())
        pnt_proj = pnt.transform(srs,clone=True)
        for band in range(1,numbands+1):
            val = getRasterValue(pnt_proj.x, pnt_proj.y, ds, band)
            if val:
                results["%s :: %s" % (rast.layer, "Band %s" % band)] = val
    
    return render_to_response('query.html', {'x': x, 'y':y, 'results': results}) 
    def handle(self, *args, **options):
        constituencies = {}
        for c in Constituency.objects.all():
            constituencies[reduceName(c.name)] = c
        for b in Boundary.objects.all():
            b.delete()
        for f in ['boundaries/data/england.kml', 
                  'boundaries/data/wales.kml', 
                  'boundaries/data/scotland.kml', 
                  'boundaries/data/northern_ireland.kml']:
            places = parse(f).getElementsByTagName("Placemark")
            for place in places:
                name = place.getElementsByTagName("name")[0].childNodes[0].toxml()
                v = []
                for coords in place.getElementsByTagName("coordinates"):
                    points = []
                    north = - google_dist
                    south = google_dist
                    east = - google_dist
                    west = google_dist
                    for coord in coords.childNodes[0].toxml().split(" "):
                        s = coord.split(",")
                        if len(s) == 3:
                            x, y = [float(c) for c in coord.split(",")][:2]
                            p = Point(x, y, srid=4326)
                            p.transform(900913)
                            gx, gy = p.coords
                            if gy > north: north = gy
                            if gy < south: south = gy                    
                            if gx > east: east = gx
                            if gx < west: west = gx
                            points.append((gx, gy))
                    for z in range(maxZoom + 1):
                        pixelsize2 = ((2 * google_dist / 256) / (2 ** z)) ** 2
                        u = []
                        previousX = 1e20
                        previousY = 1e20
                        for x, y in points:
                            if z == maxZoom:
                                u.append("(%f, %f)" % (x, y))
                            elif (x - previousX) ** 2 + (y - previousY) ** 2 > pixelsize2:
                                u.append("(%f, %f)" % (x, y))
                                previousX, previousY = x, y
                        if z != maxZoom and (previousX, previousY) != (x, y):
                            u.append("(%f, %f)" % (x, y))
                        if len(u) > 3:
                            constituency = constituencies[reduceName(name)] #Need to use this function due to slight name mismatches
                            boundary="[%s]" % reduce(lambda x, y: "%s, %s" %(x, y), u).strip()
                            b=Boundary(zoom = z,
                                       constituency = constituency, 
                                       boundary=boundary,
                                       east = east,
                                       west = west,
                                       north = north,
                                       south = south)
                            try:
                                b.save()
                            except: 
 #                               print boundary
                                pass
Beispiel #5
0
def findFeature(shapefile, longitude, latitude, buffer=20000, distance=2):
    pt = Point(longitude, latitude)
    circle = pt.buffer(buffer)
    radius = calcSearchRadius(latitude, longitude, distance)

##    if shapefile.geom_type in ["Point", "3D Point"]:
##        try:
##            return Feature.objects.filter(geom_point__intersects=circle, shapefile=shapefile)[:1].get()
##        except Feature.DoesNotExist:
##            return None
    if shapefile.geom_type in ["LineString", "3D LineString", "MultiLineString", "3D MultiLineString"]:
        try:
            return Feature.objects.get(geom_multilinestring__intersects=circle, shapefile=shapefile)[:1].get()
        except Feature.DoesNotExist:
            return None
    elif shapefile.geom_type in ["Polygon", "3D Polygon", "MultiPolygon", "3D MultiPolygon" ]:
        try:
            return Feature.objects.get(geom_multipolygon__contains=pt, shapefile=shapefile)
        except Feature.DoesNotExist:
            return None
    elif shapefile.geom_type in ["Point", "MultiPoint", "3D MultiPoint"]:
        try:
            return Feature.objects.filter(geom_multipoint__intersects=circle, shapefile=shapefile)[:1].get()
        except Feature.DoesNotExist:
             return None
    elif shapefile.geom_type in ["GeometryCollection", "3D GeometryCollection"]:
        try:
            return feature.objects.get(geom_geometrycollection__dwithin=(pt, radius))
        except Feature.DoesNotExist:
            return None
    else:
        print "Unsupported geometry: " + shapefile.geom_type
        return None
Beispiel #6
0
    def search(self, point, srid=None, bbox=None, width=None, height=None, resolution=None, icon_radius_px=8):
        source_srid = 4326
        # Note: Django GIS' Point.distance does NOT warn when using mixed SRID.
        # We have to manually transform first :-(
        point = point.transform(source_srid, clone=True)

        # Calculate radius, when a bbox is passed.
        if srid and bbox and width and height and not resolution:
            p1 = Point(bbox[0:2], srid=srid).transform(source_srid, clone=True)
            p2 = Point(bbox[2:4], srid=srid).transform(source_srid, clone=True)
            diagonal_dist_units = p1.distance(p2)
            diagonal_dist_px = (width**2+height**2)**0.5
            resolution = diagonal_dist_units / diagonal_dist_px
        else:
            onedim = Point((resolution, resolution), srid=srid).transform(source_srid, clone=True)
            resolution = onedim[0]
        radius = icon_radius_px * resolution

        result = []
        locations = self.nodes.filter_by_property('is_location').get()
        for location in locations:
            point2 = Point((location.location_x, location.location_y), srid=source_srid)
            distance = point.distance(point2)
            if distance < radius:
                info = {
                    'distance': distance,
                    'location': location,
                }
                result.append(info)
        result.sort(key=lambda item: item['distance'])
        return result
Beispiel #7
0
 def dehydrate_location(self, bundle):
     loc = bundle.data['location']
     coords = loc['coordinates']
     pnt = Point(coords[0], coords[1], srid=PROJECTION_SRID)
     pnt.transform(4326)
     loc['coordinates'] = [pnt.x, pnt.y]
     return loc
Beispiel #8
0
    def test_srid(self):
        "Testing the SRID property and keyword."
        # Testing SRID keyword on Point
        pnt = Point(5, 23, srid=4326)
        self.assertEqual(4326, pnt.srid)
        pnt.srid = 3084
        self.assertEqual(3084, pnt.srid)
        self.assertRaises(ctypes.ArgumentError, pnt.set_srid, "4326")

        # Testing SRID keyword on fromstr(), and on Polygon rings.
        poly = fromstr(self.geometries.polygons[1].wkt, srid=4269)
        self.assertEqual(4269, poly.srid)
        for ring in poly:
            self.assertEqual(4269, ring.srid)
        poly.srid = 4326
        self.assertEqual(4326, poly.shell.srid)

        # Testing SRID keyword on GeometryCollection
        gc = GeometryCollection(Point(5, 23), LineString((0, 0), (1.5, 1.5), (3, 3)), srid=32021)
        self.assertEqual(32021, gc.srid)
        for i in range(len(gc)):
            self.assertEqual(32021, gc[i].srid)

        # GEOS may get the SRID from HEXEWKB
        # 'POINT(5 23)' at SRID=4326 in hex form -- obtained from PostGIS
        # using `SELECT GeomFromText('POINT (5 23)', 4326);`.
        hex = "0101000020E610000000000000000014400000000000003740"
        p1 = fromstr(hex)
        self.assertEqual(4326, p1.srid)

        p2 = fromstr(p1.hex)
        self.assertIsNone(p2.srid)
        p3 = fromstr(p1.hex, srid=-1)  # -1 is intended.
        self.assertEqual(-1, p3.srid)
Beispiel #9
0
 def pull_PSI_update(self):
     """
         Pulls PSI Data
     """
     r = requests.get(self.PSI_URL)
     if (r.status_code == 200):
         root = ElementTree.fromstring(r.content)
         haze_object = {}
         pt = Point(0, 0)
         for region in root.iter('region'):
             for child in region:
                 if child.tag == 'id':
                     haze_object['districtname'] = child.text
                 elif child.tag == 'latitude':
                     pt.y = float(child.text)
                 elif child.tag == 'longitude':
                     pt.x = float(child.text)
                 elif child.tag == 'record':
                     for reading in child:
                         if reading.get('type') == 'NPSI':
                             haze_object['PSI'] = int(reading.get('value'))
                         elif reading.get('type') == 'NPSI_PM25':
                             haze_object['PM25'] = int(reading.get('value'))
                         elif reading.get('type') == 'NPSI_PM10':
                             haze_object['PM10'] = int(reading.get('value'))
             haze_object['location'] = pt
             w, created = Haze.objects.get_or_create(
                 districtname=haze_object['districtname'], defaults=haze_object)
         w.save()
         return True
     else:
         print r.status_code
         return False
Beispiel #10
0
 def filter_geom(self, src, val):
     lng, lat = val
     if not lng or not lat:
         raise ValueImportError("Empty geometry")
     geom = Point(float(lng), float(lat), srid=4326)  # WGS84
     geom.transform(settings.SRID)
     return geom
Beispiel #11
0
 def _topologypoint(cls, lng, lat, kind=None, snap=None):
     """
     Receives a point (lng, lat) with API_SRID, and returns
     a topology objects with a computed path aggregation.
     """
     from .models import Path, PathAggregation
     from .factories import TopologyFactory
     # Find closest path
     point = Point(lng, lat, srid=settings.API_SRID)
     point.transform(settings.SRID)
     if snap is None:
         closest = Path.closest(point)
         position, offset = closest.interpolate(point)
     else:
         closest = Path.objects.get(pk=snap)
         position, offset = closest.interpolate(point)
         offset = 0
     # We can now instantiante a Topology object
     topology = TopologyFactory.create(no_path=True, kind=kind, offset=offset)
     aggrobj = PathAggregation(topo_object=topology,
                               start_position=position,
                               end_position=position,
                               path=closest)
     aggrobj.save()
     point = Point(point.x, point.y, srid=settings.SRID)
     topology.geom = point
     topology.save()
     return topology
Beispiel #12
0
def condense_to_same_route(R, S):
    """Preprocess to create routes with all shared points on each route
    Keyword arguments:
    route_a -- LineString route
    route_b -- LineString route
    This method should be commutative.
    """
    S_trans = S.clone()
    S_trans.set_srid(4326)
    S_trans.transform(900913)
    R_trans = R.clone()
    R_trans.set_srid(4326)
    R_trans.transform(900913)
    # Add R points to S
    R_in_S = []
    for pair in R:
        pt = Point(pair, srid=4326)
        pt.transform(900913)
        dist = pt.distance(S_trans)
        if dist > FUZZY_DIST:
            continue
        S_len = len(S_trans)
        for index, S_start in enumerate(S_trans):
            if index == S_len - 1:
                break
            S_end = S_trans[index + 1]
            s_line = LineString([S_start, S_end])
            s_line_dist = pt.distance(s_line)
# TODO: should this be fuzzy equal?
            if s_line_dist == dist:
                R_in_S.append({'add_after':index,'value':pair})
def transform_point(x, y, from_proj=None, to_proj=None):
    """Transform x and y from from_proj to to_proj. Return a Point
    with the right srid set.

    Possible values of from_proj and to_proj are "google", "rd"
    and "wgs84".

    If from_proj or to_project aren't given, the "projection" Setting
    is used.  It makes no sense to give neither."""

    if to_proj is None:
        to_srs = Setting.get('projection', 'EPSG:900913')
        to_srid = string_to_srid[srs_to_string[to_srs]]
        to_proj = Proj(srs_to_mapnik_projection[to_srs])
    elif to_proj not in string_to_srs:
        raise ValueError("Value '%s' of to_proj invalid." % to_proj)
    else:
        to_srid = string_to_srid[to_proj]
        to_proj = Proj(srs_to_mapnik_projection[string_to_srs[to_proj]])

    if from_proj is None:
        from_proj = Setting.get('projection', 'EPSG:900913')
        from_proj = Proj(srs_to_mapnik_projection[from_proj])
    elif from_proj not in string_to_srs:
        raise ValueError("Value '%s' of from_proj invalid." % from_proj)
    else:
        from_proj = Proj(srs_to_mapnik_projection[string_to_srs[from_proj]])

    p = Point(*transform(from_proj, to_proj, x, y))
    p.srid = to_srid
    return p
Beispiel #14
0
 def filter_geom(self, src, val):
     if val['type'] == "Point":
         geom = Point(val['coordinates'], srid=4326)  # WGS84
     else:
         geom = Polygon(val['coordinates'][0], srid=4326)  # WGS84
     geom.transform(settings.SRID)
     return geom
Beispiel #15
0
    def _clean_buildings(self):
        """
        Budovy mohou byt v KML zadany budto jako polygony, nebo jako jednoduche
        body (spendliky). Body se ale zde prevedenou na male kolecka (polygony),
        protoze veskery ostatni kod s tim pocita.
        """
        # normalne rozparsujeme KML
        url, err = self._common_clean('buildings', ['polygon', 'point'])
        if not err:

            # no ale ted rozdelime vysledek na 2 hromadky: polygony a body
            points = [i for i in self.buildings_data if i['type'] == 'point']
            polys = [i for i in self.buildings_data if i['type'] != 'point']

            # hromadku bodu prevedeme na malinke polygony (male kolca)
            out = []
            ct1 = CoordTransform(SpatialReference('WGS84'), SpatialReference(102065))
            ct2 = CoordTransform(SpatialReference(102065), SpatialReference('WGS84'))
            for point in points:
                _point = Point(point['coordinates'][0]['lon'], point['coordinates'][0]['lat'], srid=4326)
                m_point = _point.transform(ct1, clone=True)
                m_point2 = m_point.buffer(3)
                m_point2.transform(ct2)
                point['coordinates'] = [dict(zip(['lon', 'lat'], i)) for i in m_point2.coords[0]]
                out.append(point)

            # no a vratime origos polygony a nase pretransformovane body na kolca
            self.buildings_data = polys + out

        return url, err
Beispiel #16
0
 def test00_GEOSIndexException(self):
     'Testing Geometry IndexError'
     p = Point(1, 2)
     for i in range(-2, 2):
         p._checkindex(i)
     self.assertRaises(IndexError, p._checkindex, 2)
     self.assertRaises(IndexError, p._checkindex, -3)
    def _get_query_geometry(query_obj):
        if query_obj.geometry == None or query_obj.geometry == '':
            return None

        geom = None
        query_obj.geometrytype = query_obj.geometrytype or DEFAULT_GEOMETRY_TYPE
        if query_obj.geometrytype == 'esriGeometryPoint':
            try:
                xy = map(float, query_obj.geometry.split(","))
            except:
                xy = DynamicObject(json.loads(query_obj.geometry))
                xy = [xy.x, xy.y]
            geom = Point(*xy)
        elif query_obj.geometrytype == 'esriGeometryPolygon':
            poly = DynamicObject(json.loads(query_obj.geometry))
            geom = Polygon(*poly.rings)
        elif query_obj.geometrytype == 'esriGeometryPolyline':
            line = DynamicObject(json.loads(query_obj.geometry))
            geom = LineString(*line.paths)
        elif query_obj.geometrytype == 'esriGeometryEnvelope':
            try:
                bbox = map(float, query_obj.geometry.split(","))
            except:
                bbox = DynamicObject(json.loads(query_obj.geometry))
                bbox = [bbox.xmin, bbox.ymin, bbox.xmax, bbox.ymax]
            geom = Polygon.from_bbox(bbox)

        # set the projection on the geometry
        if geom and query_obj.insr:
            geom.srid = int(query_obj.insr)
            if geom.srid == 102100:
                geom.srid = 900913
        return geom
Beispiel #18
0
def create_facility_from_medicine_submission(submission):
    from devices.models import FacilitySubmission
    content = submission.content
    coordinates = content.section_general.gps

    name = content.section_general.facility_name

    lat, lng, _, acc = coordinates.split()
    #if float(acc) > 50:
    #    raise ValueError('GPS accuracy too low.')
    point = Point(float(lng), float(lat), srid=4326)
    point.transform(900913)

    facility = Facility(
        name=name,
        longitude=float(lng),
        latitude=float(lat),
        point=point,
    )
    facility.save()
    
    
    FacilitySubmission.objects.create(
        submission=submission,
        facility=facility
    )
    
    return facility
    def handle(self, *args, **options):
        """
        Import practice eastings and northings, from HSCIC data.
        """
        if not options["filename"]:
            print "Please supply a filename"
        else:
            self.IS_VERBOSE = False
            if options["verbosity"] > 1:
                self.IS_VERBOSE = True

            gridall = csv.reader(open(options["filename"], "rU"))
            postcodes = {}
            for row in gridall:
                postcode = row[1].replace(" ", "").strip()
                postcodes[postcode] = [row[36], row[37]]

            wgs84 = SpatialReference(4326)
            bng = SpatialReference(27700)
            trans = CoordTransform(bng, wgs84)

            practices = Practice.objects.all().reverse()
            for practice in practices:
                practice.location = None
                postcode = practice.postcode.replace(" ", "").strip()
                if postcode in postcodes:
                    lng = postcodes[postcode][0]
                    lat = postcodes[postcode][1]
                    pnt = Point(int(lng), int(lat), srid=27700)
                    pnt.transform(trans)
                    practice.location = pnt
                practice.save()
Beispiel #20
0
    def get_queryset(self):
        queryset = super(AdministrativeDivisionViewSet, self).get_queryset()
        filters = self.request.QUERY_PARAMS

        if 'type' in filters:
            types = filters['type'].strip().split(',')
            is_name = False
            for t in types:
                # If the given type is not digits, assume it's a type name
                if not re.match(r'^[\d]+$', t):
                    is_name = True
                    break
            if is_name:
                queryset = queryset.filter(type__type__in=types)
            else:
                queryset = queryset.filter(type__in=types)

        if 'lat' in filters and 'lon' in filters:
            try:
                lat = float(filters['lat'])
                lon = float(filters['lon'])
            except ValueError:
                raise ParseError("'lat' and 'lon' need to be floating point numbers")
            point = Point(lon, lat, srid=DEFAULT_SRID)
            if DEFAULT_SRID != DATABASE_SRID:
                ct = CoordTransform(SpatialReference(DEFAULT_SRID),
                                    SpatialReference(DATABASE_SRID))
                point.transform(ct)
            geometries = AdministrativeDivisionGeometry.objects.filter(boundary__contains=point)
            queryset = queryset.filter(geometry__in=geometries).distinct()

        if 'input' in filters:
            queryset = queryset.filter(name__icontains=filters['input'].strip())

        if 'ocd_id' in filters:
            # Divisions can be specified with form:
            # division=helsinki/kaupunginosa:kallio,vantaa/äänestysalue:5
            d_list = filters['ocd_id'].lower().split(',')
            ocd_id_list = []
            for division_path in d_list:
                if division_path.startswith('ocd-division'):
                    muni_ocd_id = division_path
                else:
                    ocd_id_base = r'[\w0-9~_.-]+'
                    match_re = r'(%s)/([\w_-]+):(%s)' % (ocd_id_base, ocd_id_base)
                    m = re.match(match_re, division_path, re.U)
                    if not m:
                        raise ParseError("'ocd_id' must be of form 'muni/type:id'")

                    arr = division_path.split('/')
                    muni_ocd_id = make_muni_ocd_id(arr.pop(0), '/'.join(arr))
                ocd_id_list.append(muni_ocd_id)

            queryset = queryset.filter(ocd_id__in=ocd_id_list)

        if 'geometry' in filters:
            queryset = queryset.select_related('geometry')
        queryset = queryset.select_related('type')

        return queryset
Beispiel #21
0
def set_location(request):
    try:
        place_slug = request.GET.get('location', 'all')        
        tracker.add_event('set-location', {'location': place_slug})
        result = geocode(place_slug)
        if result: 
            try:
                user_point = Point(*result[1])
                pilot_city = City.objects.get(slug="fbn-pilot")
                request.session['fbn_pilot'] = user_point.within(pilot_city.enclosing_geometry)
            except:
                pass
            if request.city and not Point(*result[1]).within(request.city.enclosing_geometry):
                return HttpResponse(json.dumps({"status": False, 'reason': "NoCity"}), mimetype="application/json")  #redirect if place not within city limits
            request.session['bingeo'] = result[1]
            display_place_slug = place_slug.split(",")[0]
            request.session["place_slug"], request.session["display_place_slug"], request.session["delivery"], request.session["pick_up"] = place_slug, display_place_slug , True , False
        else:
            return HttpResponse(json.dumps({'reason': "NoPlace"}), mimetype="application/json")
        if "/no-such-place/" in request.referer:
            return HttpResponse(json.dumps({'reason': "NoPlace"}), mimetype="application/json")
        if "/not-in-city/" in request.referer:
            return HttpResponse(json.dumps({'reason': "NoCity"}), mimetype="application/json")
    except IndexError:
        raise 'hody'
        return HttpResponse(json.dumps({'reason': "NoPlace"}), mimetype="application/json")
    return HttpResponse(json.dumps({'display_name': request.session['place_slug']}), mimetype="application/json")
Beispiel #22
0
    def test_point_geom_3d(self):
        """
           +
          / \
         / X \
        +     +
        """
        p1 = PathFactory.create(geom=LineString((0, 0, 1000), (4, 4, 2000)))
        p2 = PathFactory.create(geom=LineString((4, 4, 2000), (8, 0, 0)))

        poi = Point(3, 1, srid=settings.SRID)
        position, distance = Path.interpolate(p1, poi)
        self.assertTrue(almostequal(0.5, position))
        self.assertTrue(almostequal(-1.414, distance))
        # Verify that deserializing this, we obtain the same original coordinates
        # (use lat/lng as in forms)
        poi.transform(settings.API_SRID)
        poitopo = Topology.deserialize({'lat': poi.y, 'lng': poi.x})
        # Computed topology properties match original interpolation
        self.assertTrue(almostequal(0.5, poitopo.aggregations.all()[0].start_position))
        self.assertTrue(almostequal(-1.414, poitopo.offset))
        # Resulting geometry
        self.assertTrue(almostequal(3, poitopo.geom.x))
        self.assertTrue(almostequal(1, poitopo.geom.y))
        self.assertTrue(almostequal(0, poitopo.geom.z))
Beispiel #23
0
 def test_point_geom_not_moving(self):
     """
     Modify path, point not moving
     +                  +
     |                  |
      \     X          /        X
      /                \
     |                  |
     +                  +
     """
     p1 = PathFactory.create(geom=LineString((0, 0, 0),
                                             (0, 5, 0),
                                             (5, 10, 0),
                                             (0, 15, 0),
                                             (0, 20, 0)))
     poi = Point(10, 10, srid=settings.SRID)
     poi.transform(settings.API_SRID)
     poitopo = Topology.deserialize({'lat': poi.y, 'lng': poi.x})
     self.assertEqual(0.5, poitopo.aggregations.all()[0].start_position)
     self.assertTrue(almostequal(-5, poitopo.offset))
     # It should have kept its position !
     self.assertTrue(almostequal(10, poitopo.geom.x))
     self.assertTrue(almostequal(10, poitopo.geom.y))
     # Change path, it should still be in the same position
     p1.geom = LineString((0, 0, 0),
                          (0, 5, 0),
                          (-5, 10, 0),
                          (0, 15, 0),
                          (0, 20, 0))
     p1.save()
     poitopo.reload()
     self.assertTrue(almostequal(10, poitopo.geom.x))
     self.assertTrue(almostequal(10, poitopo.geom.y))
Beispiel #24
0
    def handle(self, *args, **options):
        gml_file = options['gml_file']
        print('Parsing xml file ...')
        total, streets = get_streets(gml_file)
        print('Inserting into database ...')
        for (gml_id, street) in tqdm(streets, total=total):
            values = {}

            #print('Inserting {} ...'.format(gml_id))

            # I think that is an error in the xml parsing library
            if not street['positions'][0]:
                print('Could not get position for {}'.format(gml_id))
                continue

            a, b = street['positions'][0].split()
            point = Point(float(a), float(b), srid=25833)
            point.transform(4326)
            values['point'] = point
            values['strname'] = street['street_names'][0]
            values['hsnr'] = ''.join(street['street_numbers'][0])
            values['search_name'] = self._get_search_name(
                values['strname'], values['hsnr'])
            values['plz'] = street['postal_codes'][0]

            bezirk_name = street['names'][-1]
            values['bezirk'] = Bezirk.objects.get(name=bezirk_name)
            values['gml_id'] = gml_id

            addr, created = Address.objects.update_or_create(
                gml_id=gml_id, defaults=values)
            action = 'created' if created else 'updated'
Beispiel #25
0
    def validate_geom(self):
        x = self.cleaned.get(fields.trees.POINT_X, None)
        y = self.cleaned.get(fields.trees.POINT_Y, None)

        # Note, this shouldn't really happen since main
        # file validation will fail, but butter safe than sorry
        if x is None or y is None:
            self.append_error(errors.MISSING_FIELD,
                              (fields.trees.POINT_X, fields.trees.POINT_Y))
            return False

        # Simple validation
        # longitude must be between -180 and 180
        # latitude must be betwen -90 and 90
        if abs(x) > 180 or abs(y) > 90:
            self.append_error(errors.INVALID_GEOM,
                              (fields.trees.POINT_X, fields.trees.POINT_Y))
            return False

        p = Point(x, y, srid=4326)
        p.transform(3857)

        if self.import_event.instance.bounds.contains(p):
            self.cleaned[fields.trees.POINT] = p
        else:
            self.append_error(errors.GEOM_OUT_OF_BOUNDS,
                              (fields.trees.POINT_X, fields.trees.POINT_Y))
            return False

        return True
    def test_position_converted_to_lat_lon(self):
        Country.objects.all().delete()

        country1 = CountryFactory.build()
        country1.geometry = MultiPolygon([
            Polygon([
                Point(10, 10, srid=settings.WGS84_SRID),
                Point(11, 10, srid=settings.WGS84_SRID),
                Point(11, 11, srid=settings.WGS84_SRID),
                Point(10, 11, srid=settings.WGS84_SRID),
                Point(10, 10, srid=settings.WGS84_SRID),
            ]),
        ])
        country1.save()

        country2 = CountryFactory.build()
        country2.geometry = MultiPolygon([
            Polygon([
                Point(20, 20, srid=settings.WGS84_SRID),
                Point(21, 20, srid=settings.WGS84_SRID),
                Point(21, 21, srid=settings.WGS84_SRID),
                Point(20, 21, srid=settings.WGS84_SRID),
                Point(20, 20, srid=settings.WGS84_SRID),
            ]),
        ])
        country2.save()

        # Create a sample point in Country 1
        sample_point = Point(10.5, 10.5, srid=settings.WGS84_SRID)

        # Convert its coordinates into the projected spatial system
        sample_point.transform(settings.PROJECTION_SRID)

        observation_data = {
            'items': [
                {
                    'time': time.time(),
                    'tile_easting_m': sample_point.coords[0],
                    'tile_northing_m': sample_point.coords[1],
                    'observations': 100,
                },
            ],
        }

        payload = json.dumps(observation_data)

        response = self.client.post(
            reverse('contributions-create'),
            payload,
            content_type='application/json',
            HTTP_AUTHORIZATION='Bearer asdf',
        )

        self.assertEqual(response.status_code, 201)

        sample_point.transform(settings.WGS84_SRID)

        contribution = Contribution.objects.get()
        self.assertEqual(contribution.point, sample_point)
Beispiel #27
0
    def test_deprecated_point_coordinate_getters_setters(self):
        p = Point(1, 2, 3)
        self.assertEqual((p.get_x(), p.get_y(), p.get_z()), (p.x, p.y, p.z))

        p.set_x(3)
        p.set_y(2)
        p.set_z(1)
        self.assertEqual((p.x, p.y, p.z), (3, 2, 1))
Beispiel #28
0
def coord_convert(x,y,srcsrid=4326,tgtsrid=3740):
    gcoord = SpatialReference(srcsrid)
    mycoord = SpatialReference(tgtsrid)
    trans = CoordTransform(gcoord, mycoord)
    pt = Point(float(x), float(y), srid=srcsrid)
    try: pt.transform(trans)
    except: return (np.nan,np.nan)
    return (pt.x,pt.y)
Beispiel #29
0
    def test_has_itree_region_with_intersects(self):
        p1 = Point(0, 0)
        instance = make_instance(point=p1)
        instance.save()

        ITreeRegion.objects.create(geometry=MultiPolygon((p1.buffer(10))))

        self.assertEqual(instance.has_itree_region(), True)
Beispiel #30
0
def coordConvert(lat, lng, sridFrom, sridTo):
    fromCoord = SpatialReference(sridFrom)
    toCoord = SpatialReference(sridTo)
    trans = CoordTransform(fromCoord, toCoord)

    pnt = Point(lat, lng, srid=sridFrom)
    pnt.transform(trans)
    return pnt
from models import Req
from django.core.cache import cache
from django.conf import settings

import json, datetime, base64

from emailverification.models import BouncedEmail

import us

if settings.GEOIP_DB_PATH:
    from django.contrib.gis.geoip import GeoIP
    from django.contrib.gis.geos import Point
    geo_ip_db = GeoIP(settings.GEOIP_DB_PATH)
    washington_dc = Point(-77.0300, 38.8900)

# http://whois.arin.net/rest/org/ISUHR/nets
HOUSE_NET_RANGES = (
    ("143.231.0.0", "143.231.255.255"),
    ("137.18.0.0", "137.18.255.255"),
    ("143.228.0.0", "143.228.255.255"),
    ("12.185.56.0", "12.185.56.7"),
    ("12.147.170.144", "12.147.170.159"),
    ("74.119.128.0", "74.119.131.255"),
)
# http://whois.arin.net/rest/org/USSAA/nets
SENATE_NET_RANGES = (("156.33.0.0", "156.33.255.255"), )
# http://whois.arin.net/rest/org/EXOP/nets
EOP_NET_RANGES = (
    ("165.119.0.0", "165.119.255.255"),
    ("198.137.240.0", "198.137.241.255"),
Beispiel #32
0
def import_csv_data(apps, schema_editor):
    # We can't import the models directly as they may be a newer
    # version than this migration expects. We use the historical versions.
    Venue = apps.get_model('venue', 'Venue')
    BusinessType = apps.get_model('venue', 'BusinessType')

    # Build a mapping between business type and description.
    business_type_description_lookup = {
        "Community Centre": "This business is a Community Centre",
        "Health Centre": "This business is a Health Centre",
        "Youth Club": "This business is a Youth Club",
        "Library": "This business is a Library",
        "GP": "This business is a GP",
        "Public Toilet": "This business is a Public Toilet",
        "Foodbank": "This business is a Foodbank",
        "Other": "This business is undefined"
    }

    # Build a mapping between business type and BusinessType objects.
    business_type_object_lookup = {}
    for label, description in business_type_description_lookup.items():
        business_type_object_lookup[label] = BusinessType(
            label=label, description=description)
        business_type_object_lookup[label].save()

    def try_date_from_string(string):
        parts = string.split(":")
        if len(string) > 0 and len(parts) == 2:
            try:
                return datetime.time(int(parts[0]), int(parts[1]))
            except ValueError:
                print("Error parsing string: ", string, " to int")
                return None
        else:
            return None

    def timestr(string):
        if string:
            return string
        return None

    with open('/code/venue/initial_venue_data.csv') as csv_file:
        csv_reader = csv.DictReader(csv_file, delimiter=',')

        for row in csv_reader:
            # Parse the opening times into datetime objects.
            venue = Venue.objects.create(
                name=row['NAME_VENUE'],
                description=row['DESCRIPTION'],
                address_line_1=row['ADDRESS1'],
                address_line_2=row['ADDRESS2'],
                address_line_3=row['ADDRESS3'],
                city=row['CITY'],
                postcode=row['POSTCODE'],
                country=row['COUNTRY'],
                location=Point(float(row['LNG']), float(row['LAT'])),
                show_on_website=row['SHOWN_ON_MAP'] == "TRUE",
                phone=row['PHONE_VENUE'],
                email=row['EMAIL_VENUE'],
                website=row['WEBSITE'],
                twitter=row['TWITTER'],
                facebook=row['FACEBOOK'],
                contact_name=row['NAME_MAIN_CONTACT'],
                contact_phone=row['PHONE_MAIN_CONTACT'],
                contact_email=row['EMAIL_MAIN_CONTACT'],
                toilet=row['TOILET'] == 'TRUE',
                wheelchair_access=row['WHEELCHAIR_ACCESS'] == 'TRUE',
                business_type=business_type_object_lookup[
                    row['BUSINESS_TYPE']],
                product_location=row['PRODUCT_LOCATION'],
                stock=row['STOCK'] == 'TRUE',
                opening_hours=row['OPENING_HOURS'] == 'TRUE',
                monday_open=timestr(row['MON_OPEN']),
                monday_close=timestr(row['MON_CLOSE']),
                tuesday_open=timestr(row['TUE_OPEN']),
                tuesday_close=timestr(row['TUE_CLOSE']),
                wednesday_open=timestr(row['WED_OPEN']),
                wednesday_close=timestr(row['WED_CLOSE']),
                thursday_open=timestr(row['THU_OPEN']),
                thursday_close=timestr(row['THU_CLOSE']),
                friday_open=timestr(row['FRI_OPEN']),
                friday_close=timestr(row['FRI_CLOSE']),
                saturday_open=timestr(row['SAT_OPEN']),
                saturday_close=timestr(row['SAT_CLOSE']),
                sunday_open=timestr(row['SUN_OPEN']),
                sunday_close=timestr(row['SUN_CLOSE']),
            )
            venue.save()
Beispiel #33
0
    def import_city(self):
        if 'filenames' in settings.files['city']:
            filenames = settings.files['city']['filenames']
            for index in range(len(filenames)):
                self.download('city', key_index=index)
        else:
            self.download('city')

        data = self.get_data('city')

        total = sum(1 for _ in data)

        data = self.get_data('city')

        self.build_country_index()
        self.build_region_index()

        for item in tqdm(data, disable=self.options.get('quiet'), total=total, desc="Importing cities"):
            if not self.call_hook('city_pre', item):
                continue

            if item['featureCode'] not in city_types:
                continue

            try:
                city_id = int(item['geonameid'])
            except KeyError:
                self.logger.warning("City has no geonameid: {} -- skipping".format(item))
                continue
            except ValueError:
                self.logger.warning("City has non-numeric geonameid: {} -- skipping".format(item['geonameid']))
                continue

            defaults = {
                'name': item['name'],
                'kind': item['featureCode'],
                'name_std': item['asciiName'],
                'location': Point(float(item['longitude']), float(item['latitude'])),
                'population': int(item['population']),
                'timezone': item['timezone'],
            }

            try:
                defaults['elevation'] = int(item['elevation'])
            except (KeyError, ValueError):
                pass

            country_code = item['countryCode']
            try:
                country = self.country_index[country_code]
                defaults['country'] = country
            except KeyError:
                self.logger.warning("City: %s: Cannot find country: '%s' -- skipping",
                                    item['name'], country_code)
                continue

            region_code = item['admin1Code']
            try:
                region_key = country_code + "." + region_code
                region = self.region_index[region_key]
                defaults['region'] = region
            except KeyError:
                self.logger.debug('SKIP_CITIES_WITH_EMPTY_REGIONS: %s', str(SKIP_CITIES_WITH_EMPTY_REGIONS))
                if SKIP_CITIES_WITH_EMPTY_REGIONS:
                    self.logger.debug("%s: %s: Cannot find region: '%s' -- skipping",
                                      country_code, item['name'], region_code)
                    continue
                else:
                    defaults['region'] = None

            subregion_code = item['admin2Code']
            try:
                subregion = self.region_index[country_code + "." + region_code + "." + subregion_code]
                defaults['subregion'] = subregion
            except KeyError:
                try:
                    with transaction.atomic():
                        defaults['subregion'] = Subregion.objects.get(
                            Q(name=subregion_code) |
                            Q(name=subregion_code.replace(' (undefined)', '')),
                            region=defaults['region'])
                except Subregion.DoesNotExist:
                    try:
                        with transaction.atomic():
                            defaults['subregion'] = Subregion.objects.get(
                                Q(name_std=subregion_code) |
                                Q(name_std=subregion_code.replace(' (undefined)', '')),
                                region=defaults['region'])
                    except Subregion.DoesNotExist:
                        if subregion_code:
                            self.logger.debug("%s: %s: Cannot find subregion: '%s'",
                                              country_code, item['name'], subregion_code)
                        defaults['subregion'] = None

            city, created = City.objects.update_or_create(id=city_id, defaults=defaults)

            if not self.call_hook('city_post', city, item):
                continue

            self.logger.debug("%s city: %s",
                              "Added" if created else "Updated", city)
Beispiel #34
0
 def calculate_walk_start(self):
     return Point(srid=self.route.srid, x=self.route[0][0], y=self.route[0][1],)
Beispiel #35
0
def mergeDLog2Incid(dlogDict, ocResult, nowDate):
    '''Combine dlog dictionary with OakCrime match including existing ocResult incident
	PREFER ocResult fields
	if ocResult==None, fill only fields available in dlog
	'''

    newOC = OakCrime()

    # NB: include froot as part of dlogSrc
    dlogSrc = 'DLog_' + nowDate + '_' + dlogDict['froot']

    if ocResult == None:
        # New incident indicated to save() by null idx
        newOC.idx = None
        # NB: missing existing ocResult, use cid from dlog
        newOC.opd_rd = dlogDict['rptno']
        # NB:  oidx must be non-null! - 171205
        newOC.oidx = 0

        # NB: parseOPDLog_PDForm.regularizeIncidTbl() can't regularize bad dates!
        dlogDate = dlogDict['reg_date']
        # NB: parse_OPDLog_PDForm.regularizeIncidTbl() only includes good times
        if 'reg_time' not in dlogDict or dlogDict['reg_time'] == '':
            dlogTime = time()
        else:
            dlogTime = dlogDict['reg_time']

        newDateTime = awareDT(datetime.combine(dlogDate, dlogTime))
        newOC.cdateTime = newDateTime

        newOC.desc = ''  # NB: no description in logs, nature used for pclist
        newOC.ctype = ''
        if 'reg_pc' in dlogDict:
            newOC.crimeCat = classifyPC(dlogDict['reg_pc'])
        else:
            newOC.crimeCat = ''

        newOC.beat = dlogDict['reg_beat']
        newOC.addr = dlogDict['location1'].upper()

        newOC.xlng = float(
            dlogDict['XLng']) if dlogDict['XLng'] != '' else None
        newOC.ylat = float(
            dlogDict['YLat']) if dlogDict['YLat'] != '' else None

        if newOC.xlng != None and newOC.ylat != None:
            try:
                newpt = Point(newOC.xlng, newOC.ylat, srid=SRS_default)
                newpt.transform(SRS_10N)
                newOC.point = newpt
            except Exception as e:
                print(
                    'mergeDLog2Incid: cant make point for dlog?! %s %s %s\n\t%s'
                    % (newOC.opd_rd, newOC.xlng, newOC.ylat, e))
                newOC.point = None

        newOC.source = dlogSrc

    else:
        incid = ocResult['incid']
        # NB: existing ocResult, use cid from it
        # 		and make sure to steal its primary key!
        newOC.idx = incid.idx
        newOC.opd_rd = incid.opd_rd
        newOC.oidx = incid.oidx

        ## PREFER all ocResult fields

        newOC.cdateTime = incid.cdateTime

        if incid.desc != '':
            newOC.desc = incid.desc
        else:
            newOC.desc = ''

        if newOC.ctype != '':
            newOC.ctype = incid.ctype
        else:
            newOC.ctype = ''

        if incid.beat != '':
            newOC.beat = incid.beat
        else:
            newOC.beat = dlogDict['reg_beat']

        if incid.addr != '':
            newOC.addr = incid.addr
        else:
            newOC.addr = dlogDict['location1'].upper()

        if incid.xlng != None:
            newOC.xlng = incid.xlng
        elif dlogDict['XLng'] != '':
            newOC.xlng = float(dlogDict['XLng'])
        else:
            newOC.xlng = None

        if incid.ylat != None:
            newOC.ylat = incid.ylat
        elif dlogDict['YLat'] != '':
            newOC.ylat = float(dlogDict['YLat'])
        else:
            newOC.ylat = None

        if incid.point != None:
            newOC.point = incid.point
        elif newOC.xlng != None and newOC.ylat != None:
            try:
                newpt = Point(newOC.xlng, newOC.ylat, srid=SRS_default)
                newpt.transform(SRS_10N)
                newOC.point = newpt
            except Exception as e:
                print(
                    'mergeDLog2Incid: cant add point from dlog?! %s %s %s\n\t%s'
                    % (incid.opd_rd, newOC.xlng, newOC.ylat, e))
                newOC.point = None
        else:
            newOC.point = None

        # 2do: new classify(newOC.ctype,newOC.desc, PC)

        # NB: prefer previous crimeCat, then (re-)try to classify based on ctype,desc
        # 		finally exploit dlog reg_pc

        if incid.crimeCat != '':
            newOC.crimeCat = incid.crimeCat
        elif incid.ctype != '' or incid.desc != '':
            cc = classify(incid.ctype, incid.desc)
            if cc == '' and 'reg_pc' in dlogDict:
                cc = classifyPC(dlogDict['reg_pc'])
            newOC.crimeCat = cc
        else:
            newOC.crimeCat = ''

        newOC.source = incid.source + '+' + dlogSrc

    # 2do: Retrain to produce pseudo-UCR, pseudo-PC
    newOC.ucr = ''

    ## 2do: Geo-locate wrt/ zip, beat, census tract
    newOC.zip = None
    newOC.geobeat = None
    newOC.ctractGeoID = None

    # add dlog features
    newOC.dlogData = True
    # 2do HACK: parse_OPDLog_PDForm.regularizeIncidTbl() doesn't always provide these fields(:
    newOC.lossList = dlogDict['reg_loss'] if ('reg_loss' in dlogDict) else []
    # NB: parse_OPDLog_PDForm.regularizeIncidTbl() only includes 'reg_gsw' from some injuries
    newOC.gswP = 'reg_gsw' in dlogDict
    newOC.weapon = dlogDict['reg_weapon'] if ('reg_weapon' in dlogDict) else ''
    newOC.callout = dlogDict['reg_callout'] if ('reg_callout'
                                                in dlogDict) else 'no'
    newOC.ncustody = dlogDict['reg_ncustody'] if ('reg_ncustody'
                                                  in dlogDict) else 0
    newOC.nsuspect = dlogDict['reg_nsuspect'] if ('reg_nsuspect'
                                                  in dlogDict) else 0
    newOC.nvictim = dlogDict['reg_nvictim'] if ('reg_nvictim'
                                                in dlogDict) else 0
    newOC.nhospital = dlogDict['reg_nhospital'] if ('reg_nhospital'
                                                    in dlogDict) else 0
    # 2do HACK: parse_OPDLog_PDForm.regularizeIncidTbl()  WHY WOULD reg_ro and reg_pc be missing?!
    newOC.roList = dlogDict['reg_ro'] if ('reg_ro' in dlogDict) else []
    newOC.pcList = dlogDict['reg_pc'] if ('reg_pc' in dlogDict) else []

    return newOC
Beispiel #36
0
 def test_distance_order_by(self):
     qs = SouthTexasCity.objects.annotate(distance=Distance('point', Point(3, 3, srid=32140))).order_by(
         'distance'
     ).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
     self.assertSequenceEqual(qs, ['San Antonio', 'Pearland'])
Beispiel #37
0
 def test_line_locate_point(self):
     pos_expr = functions.LineLocatePoint(
         LineString((0, 0), (0, 3), srid=4326), Point(0, 1, srid=4326))
     self.assertAlmostEqual(
         State.objects.annotate(pos=pos_expr).first().pos, 0.3333333)
Beispiel #38
0
        try:
            pp._getXYCData(strideFlag=False,
                           latlonFlag=True,
                           returnIDs=returnIDs,
                           sampleFlag=sampleFlag)
        except PPDatabaseException, e:
            if platform or startDatetime or endDatetime:
                raise NoPPDataException(
                    "No (%s, %s) data from (%s) between %s and %s" %
                    (xParm, yParm, platform, startDatetime, endDatetime))
            else:
                raise NoPPDataException("No (%s, %s) data returned" %
                                        (xParm, yParm))

        for lon, lat in zip(pp.lon, pp.lat):
            points.append(Point(lon, lat))

        if returnIDs:
            return pp.x_id, pp.y_id, pp.x, pp.y, points
        else:
            return pp.x, pp.y, points

    def _getActivityExtent(self, platform=None):
        '''
        Get details of the Activities that the platform(s) ha{s,ve}.  Set those details to member variables and
        also return them as a tuple.  Polymorphic: if platform is a list or tuple return spatial temporal
        extent for all of the platforms.
        '''
        # Get start and end datetimes, color and geographic extent of the activity
        # If multiple platforms use them all to get overall start & end times and extent and se tcolor to black
        if platform:
Beispiel #39
0
def api_album_nearest(request):
    form = ApiAlbumNearestForm(request.data)
    profile = request.user.profile
    content = {'state': str(int(round(time.time() * 1000)))}
    photos = []
    if form.is_valid():
        album = form.cleaned_data["id"]
        if album:
            content["title"] = album.name
            photos_qs = album.photos.all()
            for sa in album.subalbums.exclude(atype=Album.AUTO):
                photos_qs = photos_qs | sa.photos.filter()
        else:
            photos_qs = Photo.objects.all()
        lat = round(form.cleaned_data["latitude"], 4)
        lon = round(form.cleaned_data["longitude"], 4)
        ref_location = Point(lon, lat)
        if form.cleaned_data["range"]:
            nearby_range = form.cleaned_data["range"]
        else:
            nearby_range = API_DEFAULT_NEARBY_PHOTOS_RANGE
        album_nearby_photos = photos_qs.filter(
            lat__isnull=False,
            lon__isnull=False,
            rephoto_of__isnull=True,
            geography__distance_lte=(ref_location, D(
                m=nearby_range))).distance(ref_location).annotate(
                    rephoto_count=Count('rephotos')).order_by(
                        'distance')[:API_DEFAULT_NEARBY_MAX_PHOTOS]
        for p in album_nearby_photos:
            date = None
            if p.date:
                iso = p.date.isoformat()
                date_parts = iso.split('T')[0].split('-')
                date = date_parts[2] + '-' + date_parts[1] + '-' + date_parts[0]
            elif p.date_text:
                date = p.date_text
            photos.append({
                "id":
                p.id,
                "image":
                request.build_absolute_uri(
                    reverse("project.ajapaik.views.image_thumb",
                            args=(p.id, ))) + '[DIM]/',
                "width":
                p.width,
                "height":
                p.height,
                "title":
                p.description,
                "date":
                date,
                "author":
                p.author,
                "source": {
                    'name': p.source.description + ' ' + p.source_key,
                    'url': p.source_url
                } if p.source else {
                    'url': p.source_url
                },
                "latitude":
                p.lat,
                "longitude":
                p.lon,
                "rephotos":
                p.rephoto_count,
                "uploads":
                p.rephotos.filter(user=profile).count(),
            })
        content["photos"] = photos
    else:
        content["error"] = 2

    return Response(content)
Beispiel #40
0
def getBestMatch(dlog,
                 dlogCID,
                 logStream,
                 cidFilter=False,
                 requireMajorCrime=True):
    '''query existing OakCrime database for exact opd_rd match,
	then approx dateTime+location similarity
	  - logStream: non-None is a stream to write matching detailed log
	  - cidFilter: pre-filter against matches with CID > MaxCIDDiff
	  - requireMajorCrime: only consider incidents with ctype in MajorCrimes list
	ASSUME dlog contains date and xlng,ylat 
	'''

    CloseRadius = 1000  # 1km
    DateRange = 7

    LocationScale = 0.5
    DateScale = 0.5

    if 'reg_date' not in dlog or dlog['reg_date'] == None:
        # nmissDate += 1
        return 'missDate'
    else:
        dlogDate = dlog['reg_date']

    if 'reg_time' not in dlog or dlog['reg_time'] == '':
        dlogTime = time()
    else:
        dlogTime = dlog['reg_time']

    dlogDateTime = awareDT(datetime.combine(dlogDate, dlogTime))

    # ASSUME 4am cutoff ala 29 Sept 17 "0400hrs / Friday (29SEP17) - 0400hrs / Saturday (30SEP17)"
    # 		ie goes 4 hours into next day's incidents
    if dlogTime.hour <= 4:
        dlogDateTime += timedelta(days=1)

    minDate = (dlogDateTime - timedelta(days=DateRange))
    maxDate = (dlogDateTime + timedelta(days=DateRange))

    if not('location1' in dlog and dlog['location1'] != '' and \
      'XLng' in dlog and dlog['XLng'] != ''):
        # nmissGC += 1
        return 'missGC'

    dlXLng = dlog['XLng']
    dlYLat = dlog['YLat']

    dlPt = Point(dlXLng, dlYLat, srid=SRS_default)

    result = OakCrime.objects.filter(cdateTime__gte=minDate) \
           .filter(cdateTime__lte=maxDate) \
           .exclude(point__isnull=True) \
           .filter(point__distance_lte=(dlPt, D(m=CloseRadius)))

    matchTbl = {}
    for i, incid in enumerate(result):

        opd_rd = incid.opd_rd
        match = {'cid': opd_rd}

        idDist = editdistance.eval(opd_rd, dlogCID)

        if cidFilter and idDist > MaxCIDDiff:
            continue

        match['idDist'] = idDist
        cdateTime = incid.cdateTime
        # match['cdate'] = cdate
        dateDiff = cdateTime - dlogDateTime
        dateDiffSeconds = dateDiff.total_seconds()
        dayDiff = float(dateDiffSeconds) / 60 / 60 / 24
        match['dayDiff'] = dayDiff

        XLng = incid.xlng
        YLat = incid.ylat

        if XLng == None or YLat == None:
            print('getBestMatch: missing coord in matching incid?! dlog: %s %s %s ; %s %s %s' % \
              (dlogCID,dlXLng,dlYLat,opd_rd,XLng,YLat))
            continue

        incidPt = incid.point

        distDegree = incidPt.distance(dlPt)  # degrees!

        # EarthEquatorialRadius = 6378000
        Degree2Meter = 111195  # EarthEquatorialRadius * Pi / 180
        distMeter = distDegree * Degree2Meter

        distw = distWgt(distMeter, CloseRadius)

        match['dist'] = distMeter

        datew = dateDiffWgt(dayDiff, DateRange)

        matchScore = LocationScale * distw + DateScale * datew

        ## 181223: PatrolLogs generally only report crimes in PC2CrimeCatTbl
        # NB: only keeping first PCode in match
        majorCrime = False
        for pc in dlog['reg_pc']:
            if incid.ctype in MajorCrimes:
                majorCrime = True
                break
        match['majorCrime'] = majorCrime

        match['mscore'] = matchScore

        # include all of OakCrime incident features
        match['incid'] = incid

        matchTbl[opd_rd] = match

        if logStream:
            # dRptNo,dLoc,dxlng,dylat,dDT,dPC,iCID,iAddr,ixlng,iylat,iDT,iCC,iCType,iDesc,matchScore,idDist,distMeter,dayDiff,majorCrime

            logFlds = [dlog['rptno'],dlog['location1'],dlXLng,dlYLat,dlogDateTime,dlog['reg_pc'], \
               incid.opd_rd,incid.addr,incid.xlng,incid.ylat,incid.cdateTime,incid.crimeCat,incid.ctype,incid.desc, \
               matchScore,idDist,distMeter,dayDiff,majorCrime]
            logStrFlds = ['"' + str(f) + '"' for f in logFlds]
            outline = ','.join(logStrFlds)
            logStream.write(outline + '\n')

    allMatch = list(matchTbl.keys())
    bestMatch = None
    bestMatchScore = 0.

    # select exact match CID result,
    for opd_rd in allMatch:
        match = matchTbl[opd_rd]
        # NB: parse_OPDLoPDF.mergeDailyLogs() adds suffix to cid for duplicate rptno
        # exact match of either allowed
        if opd_rd == dlogCID or opd_rd == dlog['rptno']:
            bestMatch = match
            break

    # or best-matching
    if not bestMatch:
        majorCrimes = []
        if requireMajorCrime:
            for opd_rd in allMatch:
                match = matchTbl[opd_rd]
                if match['majorCrime']:
                    majorCrimes.append(opd_rd)

            # NB: only match against majorCrime if it is unique
            if len(majorCrimes) > 1:
                # print('getBestMatch: multiple majorCrime match!',majorCrimes)
                pass
            elif len(majorCrimes) == 1:
                bestMatch = matchTbl[majorCrimes[0]]
        else:
            for opd_rd in allMatch:
                match = matchTbl[opd_rd]
                if match['mscore'] > bestMatchScore:
                    bestMatch = match
                    bestMatchScore = match['mscore']

    if bestMatch and logStream:
        # dRptNo,dLoc,dxlng,dylat,dDT,dPC,iCID,iAddr,ixlng,iylat,iDT,iCC,iCType,iDesc,matchScore,idDist,distMeter,dayDiff,majorCrime
        incid = bestMatch['incid']
        # NB: prefix best's CID with star!
        logFlds = [dlog['rptno'],dlog['location1'],dlXLng,dlYLat,dlogDateTime,dlog['reg_pc'], \
           incid.opd_rd,incid.addr,incid.xlng,incid.ylat,incid.cdateTime,incid.crimeCat,incid.ctype,incid.desc, \
           matchScore,idDist,distMeter,dayDiff,majorCrime]
        logStrFlds = ['"' + str(f) + '"' for f in logFlds]
        outline = ','.join(logStrFlds)
        logStream.write(outline + '\n')

    return bestMatch
Beispiel #41
0
 def _load_city_data(self):
     for name, pnt_data in city_data:
         City3D.objects.create(name=name, point=Point(*pnt_data, srid=4326))
Beispiel #42
0
#
#   ebpub is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#   GNU General Public License for more details.
#
#   You should have received a copy of the GNU General Public License
#   along with ebpub.  If not, see <http://www.gnu.org/licenses/>.
#

from django.test import TestCase
from django.contrib.gis.geos import Point
from ebpub.metros.models import Metro
import logging

pt_in_chicago = Point(
    (-87.68489561595398, 41.852929331184384))  # point in center of Chicago
pt_in_chi_bbox = Point(
    (-87.83384627077956, 41.85365447332586
     ))  # point just west of Chicago's border but due south of O'Hare
pt_in_lake_mi = Point(
    (-86.99514699540548, 41.87468001919902))  # point way out in Lake Michigan


class MetroTest(TestCase):
    fixtures = ['metros.json']

    def test_point_in_metro(self):
        # Tests finding a metro with a point contained by its boundary
        self.assertEquals(
            Metro.objects.containing_point(pt_in_chicago).name, 'Chicago')
Beispiel #43
0
 def create_vehicle_location(self, item):
     return VehicleLocation(
         latlong=Point(item.vehicle.position.longitude, item.vehicle.position.latitude)
     )
Beispiel #44
0
    def import_district(self):
        self.download('city')
        data = self.get_data('city')

        total = sum(1 for _ in data)

        data = self.get_data('city')

        self.build_country_index()
        self.build_region_index()
        self.build_hierarchy()

        city_index = {}
        for obj in tqdm(City.objects.all(),
                        disable=self.options.get('quiet'),
                        total=City.objects.all().count(),
                        desc="Building city index"):
            city_index[obj.id] = obj

        for item in tqdm(data, disable=self.options.get('quiet'), total=total, desc="Importing districts"):
            if not self.call_hook('district_pre', item):
                continue

            _type = item['featureCode']
            if _type not in district_types:
                continue

            defaults = {
                'name': item['name'],
                'name_std': item['asciiName'],
                'location': Point(float(item['longitude']), float(item['latitude'])),
                'population': int(item['population']),
            }

            if hasattr(District, 'code'):
                defaults['code'] = item['admin3Code'],

            geonameid = int(item['geonameid'])

            # Find city
            city = None
            try:
                city = city_index[self.hierarchy[geonameid]]
            except KeyError:
                self.logger.debug("District: %d %s: Cannot find city in hierarchy, using nearest", geonameid, defaults['name'])
                city_pop_min = 100000
                # we are going to try to find closet city using native
                # database .distance(...) query but if that fails then
                # we fall back to degree search, MYSQL has no support
                # and Spatialite with SRID 4236.
                try:
                    if django_version < (1, 9):
                        city = City.objects.filter(population__gt=city_pop_min)\
                                   .distance(defaults['location'])\
                                   .order_by('distance')[0]
                    else:
                        city = City.objects.filter(
                            location__distance_lte=(defaults['location'], D(km=1000))
                        ).annotate(
                            distance=Distance('location', defaults['location'])
                        ).order_by('distance').first()
                except City.DoesNotExist as e:
                    self.logger.warning(
                        "District: %s: DB backend does not support native '.distance(...)' query "
                        "falling back to two degree search",
                        defaults['name']
                    )
                    search_deg = 2
                    min_dist = float('inf')
                    bounds = Envelope(
                        defaults['location'].x - search_deg, defaults['location'].y - search_deg,
                        defaults['location'].x + search_deg, defaults['location'].y + search_deg)
                    for e in City.objects.filter(population__gt=city_pop_min).filter(
                            location__intersects=bounds.wkt):
                        dist = geo_distance(defaults['location'], e.location)
                        if dist < min_dist:
                            min_dist = dist
                            city = e
            else:
                self.logger.debug("Found city in hierarchy: %s [%d]", city.name, geonameid)

            if not city:
                self.logger.warning("District: %s: Cannot find city -- skipping", defaults['name'])
                continue

            defaults['city'] = city

            try:
                with transaction.atomic():
                    district = District.objects.get(city=defaults['city'], name=defaults['name'])
            except District.DoesNotExist:
                # If the district doesn't exist, create it with the geonameid
                # as its id
                district, created = District.objects.update_or_create(id=item['geonameid'], defaults=defaults)
            else:
                # Since the district already exists, but doesn't have its
                # geonameid as its id, we need to update all of its attributes
                # *except* for its id
                for key, value in defaults.items():
                    setattr(district, key, value)
                district.save()
                created = False

            if not self.call_hook('district_post', district, item):
                continue

            self.logger.debug("%s district: %s", "Added" if created else "Updated", district)
 def coordinates(self):
     return Point(self.longitude, self.latitude)
Beispiel #46
0
from .models import Shop, Link, Link5
from django.views.generic.base import TemplateView
from shapely.geometry import LineString as ShLineString, mapping
from .forms import ContactForm
import json
from django.http import JsonResponse
from django.core.serializers import serialize
from django.views.generic.base import TemplateView
from django.shortcuts import render
from django.http import JsonResponse
from django.contrib.auth.models import User

longitude = 17.166573
latitude = 48.172558

user_location = Point(longitude, latitude, srid=4326)


class Home(generic.ListView):
    model = Shop
    context_object_name = 'shops'
    queryset = Shop.objects.annotate(
        distance=Distance('location', user_location)).order_by('distance')[0:6]
    template_name = 'shops/index.html'


class MapView(TemplateView):
    template_name = "shops/map.html"

    def get_context_data(self, **kwargs):
        """Return the view context data."""
Beispiel #47
0
 def test_distance_function_raw_result(self):
     distance = Interstate.objects.annotate(
         d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
     ).first().d
     self.assertEqual(distance, 1)
Beispiel #48
0
 def setUp(self):
     Peak.objects.create(name="Peak1", altitude=0, location=Point(0, 0))
     Peak.objects.create(name="Peak2", altitude=0, location=Point(10, 10))
     Peak.objects.create(name="PeakOut", altitude=0, location=Point(50, 50))
Beispiel #49
0
 def test_aswkt(self):
     wkt = City.objects.annotate(wkt=functions.AsWKT(Point(
         1, 2, srid=4326)), ).first().wkt
     self.assertEqual(
         wkt, 'POINT (1.0 2.0)' if connection.ops.oracle else 'POINT(1 2)')
Beispiel #50
0
def asset_record_position(request, asset, mission):
    """
    Record the current position of an asset.

    Only allows recording of the assets position by the owner.
    Accepts get requests because some assets are very basic.

    Return the last command that applies to an object
    """
    lat = ''
    lon = ''
    fix = None
    alt = None
    heading = None

    if request.method == 'GET':
        lat = request.GET.get('lat')
        lon = request.GET.get('lon')
        fix = request.GET.get('fix')
        alt = request.GET.get('alt')
        heading = request.GET.get('heading')
    elif request.method == 'POST':
        lat = request.POST.get('lat')
        lon = request.POST.get('lon')
        fix = request.POST.get('fix')
        alt = request.POST.get('alt')
        heading = request.POST.get('heading')
    else:
        return HttpResponseBadRequest("Unsupport method")

    point = None
    try:
        point = Point(float(lon), float(lat))
    except (ValueError, TypeError):
        pass

    try:
        fix = int(fix)
    except (TypeError, ValueError):
        fix = None
    try:
        heading = int(heading)
    except (TypeError, ValueError):
        heading = None
    try:
        alt = float(alt)
    except (TypeError, ValueError):
        alt = None

    if point:
        AssetPointTime(asset=asset,
                       point=point,
                       creator=request.user,
                       alt=alt,
                       heading=heading,
                       fix=fix,
                       mission=mission).save()
    else:
        return HttpResponseBadRequest("Invalid lat/lon (%s,%s)" % (lat, lon))

    asset_command = AssetCommand.last_command_for_asset(asset, mission)
    if asset_command:
        data = {
            'action': asset_command.command,
            'action_txt': asset_command.get_command_display(),
            'reason': asset_command.reason,
            'issued': asset_command.issued,
        }
        if asset_command.position:
            data['latitude'] = asset_command.position.y
            data['longitude'] = asset_command.position.x
        return JsonResponse(data)

    return HttpResponse("Continue")
Beispiel #51
0
class Zona(models.Model):
    """
        Representan la division de una ciudad en hexagonos
        ej. hay multiples bahias en una zona
    """
    coordenadas = models.PointField(geography=True, default=Point(0.0, 0.0))
Beispiel #52
0
 def save(self, *args, **kwargs):
     try:
         self.point = Point(self.address.lon, self.address.lat)
     except:  # noqa
         self.point = Point(0., 0.)
     super().save(*args, **kwargs)
Beispiel #53
0
    def create(self, request, **kwargs):

        serializer = self.get_serializer(data=request.data)

        latitude = self.request.POST.get('latitude', None)
        longitude = self.request.POST.get('longitude', None)

        if serializer.is_valid():

            state = get_object_or_404(State, pk=1)

            upload_files = request.FILES.getlist('upload_image')

            if latitude is None:
                return Response({"latitude": "Not have latitude"},
                                status=status.HTTP_400_BAD_REQUEST)

            if longitude is None:
                return Response({"longitude": "Not have longitude"},
                                status=status.HTTP_400_BAD_REQUEST)

            race = self.request.POST.get('race', None)

            if race is None and request.data['category'] != 1:
                race = get_object_or_404(Race, pk=10000)
                product = serializer.save(seller=request.user.userdetail,
                                          location=Point(
                                              float(longitude),
                                              float(latitude)),
                                          active=True,
                                          state=state,
                                          race=race)
            else:
                product = serializer.save(seller=request.user.userdetail,
                                          location=Point(
                                              float(longitude),
                                              float(latitude)),
                                          active=True,
                                          state=state)

            session = Session(
                aws_access_key_id='AKIAJYDV7TEBJS6JWEEQ',
                aws_secret_access_key='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'
            )

            s3 = session.resource('s3')
            bucket = s3.Bucket('walladog')

            for index in range(len(upload_files)):

                uuid_id = uuid.uuid4()
                up_file = upload_files[index]
                key_file = str(uuid_id) + ".jpeg"
                bucket.put_object(ACL='public-read',
                                  Key=key_file,
                                  Body=up_file,
                                  ContentType='image/jpeg')

                photo_url = "https://s3.amazonaws.com/walladog/" + str(
                    uuid_id) + ".jpeg"
                photo_thumbnail_url = "https://s3.amazonaws.com/walladog/thumbnails/" + str(
                    uuid_id) + ".png"
                image_product = Image(name=str(uuid_id),
                                      product=product,
                                      photo_url=photo_url,
                                      photo_thumbnail_url=photo_thumbnail_url)
                image_product.save()

            serialize_list = ProductsListSerializer(product)

            return Response(serialize_list.data,
                            status=status.HTTP_201_CREATED)

        else:
            return Response(serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)
Beispiel #54
0
def load_cashfin(cashfin_file, verbose=False):
    with open(cashfin_file, 'r') as f:
        reader = csv.reader(f, delimiter="\t")
        reader.next()
        skip_lookup = False
        if CashFin.objects.count() == 0:
            skip_lookup = True
        for row in reader:
            pin = '{:0>14}'.format(int(Decimal(row[1])))
            doc = row[2]
            date_doc = spss_to_posix(row[3])
            date_rec = spss_to_posix(row[4])
            try:
                year = int(row[5])
            except:
                year = None
            try:
                amount_prime = float(row[6])
            except:
                amount_prime = None
            likely_distressed = True if int(row[7]) == 1 else False
            likely_cash = True if int(row[8]) == 1 else False
            buyer = row[9].strip()
            buyer_type = row[10].strip()
            seller = row[11].strip()
            seller_type = row[12].strip()
            apt = row[13].strip()
            direction = row[14].strip()
            houseno = row[15].strip()
            street = row[16].strip()
            suffix = row[17].strip()
            addr_final = row[18].strip()
            city_final = row[19].strip()
            try:
                lat_y = float(row[20])
            except:
                lat_y = None
            try:
                long_x = float(row[21])
            except:
                long_x = None
            try:
                tract_fix = float(row[22])
            except:
                tract_fix = None
            no_tract_info = True if int(row[23]) == 1 else False
            try:
                ca_num = int(row[24])
            except:
                ca_num = None
            ca_name = row[25].strip()
            place = row[26].strip()
            gisdate = row[27].strip()
            try:
                ptype_id = int(row[28])
            except:
                ptype_id = None
            try:
                residential = int(row[29])
            except:
                residential = None
            loc = None if row[20] == '' else Point(
                (Decimal(row[21]), Decimal(row[20])))
            try:
                if skip_lookup:
                    raise Exception('no lookup')
                cashfin =  CashFin.objects.get(\
                pin = pin\
                ,doc = doc\
                ,date_doc = date_doc\
         ,date_rec = date_rec\
         ,year = year\
         ,amount_prime = amount_prime\
         ,likely_distressed  = likely_distressed\
         ,likely_cash = likely_cash\
         ,buyer = buyer\
         ,buyer_type = buyer_type\
         ,seller = seller\
         ,seller_type = seller_type\
         ,apt = apt\
         ,direction = direction\
         ,houseno = houseno\
         ,street = street\
         ,suffix = suffix\
         ,addr_final = addr_final\
         ,city_final = city_final\
         ,lat_y = lat_y\
         ,long_x = long_x\
         ,tract_fix = tract_fix\
         ,no_tract_info = no_tract_info\
         ,ca_num = ca_num\
         ,ca_name = ca_name\
         ,place = place\
         ,gisdate = gisdate\
         ,ptype_id = ptype_id\
         ,residential = residential\
                ,loc = loc\
                )
            except:
                cashfin =  CashFin(\
                pin = pin\
                ,doc = doc\
                ,date_doc = date_doc\
         ,date_rec = date_rec\
         ,year = year\
         ,amount_prime = amount_prime\
         ,likely_distressed  = likely_distressed\
         ,likely_cash = likely_cash\
         ,buyer = buyer\
         ,buyer_type = buyer_type\
         ,seller = seller\
         ,seller_type = seller_type\
         ,apt = apt\
         ,direction = direction\
         ,houseno = houseno\
         ,street = street\
         ,suffix = suffix\
         ,addr_final = addr_final\
         ,city_final = city_final\
         ,lat_y = lat_y\
         ,long_x = long_x\
         ,tract_fix = tract_fix\
         ,no_tract_info = no_tract_info\
         ,ca_num = ca_num\
         ,ca_name = ca_name\
         ,place = place\
         ,gisdate = gisdate\
         ,ptype_id = ptype_id\
         ,residential = residential\
                ,loc = loc\
                )
            cashfin.save()
Beispiel #55
0
    def test_openwisp(self):
        """ test OpenWisp synchronizer """
        layer = Layer.objects.external()[0]
        layer.new_nodes_allowed = False
        layer.save()
        layer = Layer.objects.get(pk=layer.pk)

        external = LayerExternal(layer=layer)
        external.synchronizer_path = 'nodeshot.interop.sync.synchronizers.OpenWisp'
        external._reload_schema()
        external.url = '%s/openwisp-georss.xml' % TEST_FILES_PATH
        external.full_clean()
        external.save()

        output = capture_output(management.call_command, ['sync', 'vienna'],
                                kwargs={'verbosity': 0})

        # ensure following text is in output
        self.assertIn('43 nodes added', output)
        self.assertIn('0 nodes changed', output)
        self.assertIn('43 total external', output)
        self.assertIn('43 total local', output)

        # start checking DB too
        nodes = layer.node_set.all()

        # ensure all nodes have been imported
        self.assertEqual(nodes.count(), 43)

        # check one particular node has the data we expect it to have
        node = Node.objects.get(slug='podesta1-ced')
        self.assertEqual(node.name, 'Podesta1 CED')
        self.assertEqual(node.address, 'Test WISP')
        point = Point(8.96166, 44.4185)
        self.assertTrue(node.geometry.equals(point))
        self.assertEqual(node.updated.strftime('%Y-%m-%d'), '2013-07-10')
        self.assertEqual(node.added.strftime('%Y-%m-%d'), '2011-08-24')

        # --- with the following step we expect some nodes to be deleted --- #

        external.url = '%s/openwisp-georss2.xml' % TEST_FILES_PATH
        external.full_clean()
        external.save()

        output = capture_output(management.call_command, ['sync', 'vienna'],
                                kwargs={'verbosity': 0})

        # ensure following text is in output
        self.assertIn('5 nodes unmodified', output)
        self.assertIn('38 nodes deleted', output)
        self.assertIn('0 nodes changed', output)
        self.assertIn('5 total external', output)
        self.assertIn('5 total local', output)

        # ensure all nodes have been imported
        self.assertEqual(nodes.count(), 5)

        # check one particular node has the data we expect it to have
        node = Node.objects.get(slug='lercari2-42')
        self.assertEqual(node.name, 'Lercari2 42')
        self.assertEqual(node.address, 'Test WISP')
        point = Point(8.96147, 44.4076)
        self.assertTrue(node.geometry.equals(point))
        self.assertEqual(node.updated.strftime('%Y-%m-%d'), '2013-07-10')
        self.assertEqual(node.added.strftime('%Y-%m-%d'), '2013-06-14')
Beispiel #56
0
    def _getMeasuredPPData(self, startDatetime, endDatetime, platform, xParm,
                           yParm):
        '''
        Use the SQL template to retrieve the X and Y values and other ancillary information from the database
        for the passed in platform, xParm and yParm names.
        '''
        # SQL template copied from STOQS UI Parameter-Parameter -> sql tab
        sql_template = '''SELECT DISTINCT stoqs_measurement.depth,
                mp_x.datavalue AS x, mp_y.datavalue AS y,
                ST_X(stoqs_measurement.geom) AS lon, ST_Y(stoqs_measurement.geom) AS lat,
                stoqs_instantpoint.timevalue 
            FROM stoqs_activity
            INNER JOIN stoqs_platform ON stoqs_platform.id = stoqs_activity.platform_id
            INNER JOIN stoqs_instantpoint ON stoqs_instantpoint.activity_id = stoqs_activity.id
            INNER JOIN stoqs_measurement ON stoqs_measurement.instantpoint_id = stoqs_instantpoint.id
            INNER JOIN stoqs_measurement m_y ON m_y.instantpoint_id = stoqs_instantpoint.id
            INNER JOIN stoqs_measuredparameter mp_y ON mp_y.measurement_id = m_y.id
            INNER JOIN stoqs_parameter p_y ON mp_y.parameter_id = p_y.id
            INNER JOIN stoqs_measurement m_x ON m_x.instantpoint_id = stoqs_instantpoint.id
            INNER JOIN stoqs_measuredparameter mp_x ON mp_x.measurement_id = m_x.id
            INNER JOIN stoqs_parameter p_x ON mp_x.parameter_id = p_x.id
            WHERE (p_x.name = '{pxname}')
                AND (p_y.name = '{pyname}')
                {platform_clause}
                {time_clause}
                {depth_clause}
                {day_night_clause}
            ORDER BY stoqs_instantpoint.timevalue '''

        # Get connection to database; self.args.database must be defined in privateSettings
        cursor = connections[self.args.database].cursor()

        # Apply platform constraint if specified
        platformSQL = ''
        if platform:
            platformSQL += "AND stoqs_platform.name IN ('%s')" % platform

        # Apply time constraints if specified
        timeSQL = ''
        if startDatetime:
            timeSQL += "AND stoqs_instantpoint.timevalue >= '%s' " % startDatetime
        if endDatetime:
            timeSQL += "AND stoqs_instantpoint.timevalue <= '%s'" % endDatetime

        # Apply depth constraints if specified
        depthSQL = ''
        if self.args.minDepth:
            depthSQL += 'AND stoqs_measurement.depth >= %f ' % self.args.minDepth
        if self.args.maxDepth:
            depthSQL += 'AND stoqs_measurement.depth <= %f' % self.args.maxDepth

        # Apply SQL where clause to restrict to just do day or night measurements
        daytimeHours = (17, 22)
        nighttimeHours = (5, 10)
        dnSQL = ''
        if self.args.daytime:
            dnSQL = "AND date_part('hour', stoqs_instantpoint.timevalue) > %d AND date_part('hour', stoqs_instantpoint.timevalue) < %d" % daytimeHours
        if self.args.nighttime:
            dnSQL = "AND date_part('hour', stoqs_instantpoint.timevalue) > %d AND date_part('hour', stoqs_instantpoint.timevalue) < %d" % nighttimeHours

        sql = sql_template.format(pxname=xParm,
                                  pyname=yParm,
                                  platform_clause=platformSQL,
                                  time_clause=timeSQL,
                                  depth_clause=depthSQL,
                                  day_night_clause=dnSQL)
        if self.args.verbose > 1:
            print "sql =", sql

        x = []
        y = []
        points = []
        cursor.execute(sql)
        for row in cursor:
            x.append(float(row[1]))
            y.append(float(row[2]))
            points.append(Point(float(row[3]), float(row[4])))

        if not points:
            raise NoPPDataException(
                "No (%s, %s) data from (%s) between %s and %s" %
                (xParm, yParm, platform, startDatetime, endDatetime))

        return x, y, points
from django.conf import settings
from django.contrib.gis.geos import LineString, MultiPolygon, Point, Polygon
from rest_framework.test import APITestCase

from traffic_control.models import Lifecycle, MountType, Owner, TrafficControlDeviceType
from users.models import User

test_point = Point(10.0, 10.0, srid=settings.SRID)
test_point_2 = Point(0.0, 0.0, srid=settings.SRID)
test_point_3 = Point(100.0, 100.0, srid=settings.SRID)
test_point_4 = Point(-44.3, 60.1, srid=4326)
test_line = LineString((0.0, 0.0), (50.0, 0.0), srid=settings.SRID)
test_line_2 = LineString((20.0, 20.0), (30.0, 30.0), srid=settings.SRID)
test_line_3 = LineString((40.0, 40.0), (60.0, 60.0), srid=settings.SRID)
test_line_4 = LineString((500.0, 500.0), (500.0, 550.0), srid=settings.SRID)
test_polygon = Polygon(
    ((0.0, 0.0), (0.0, 50.0), (50.0, 50.0), (50.0, 0.0), (0.0, 0.0)),
    srid=settings.SRID)
test_polygon_2 = Polygon(
    (
        (1000.0, 1000.0),
        (1000.0, 1050.0),
        (1050.0, 1050.0),
        (1050.0, 1000.0),
        (1000.0, 1000.0),
    ),
    srid=settings.SRID,
)
test_polygon_3 = Polygon(
    (
        (100.0, 100.0),
Beispiel #58
0
def near_place_list(request):
    lat = float(request.GET.get('lat'))
    lng = float(request.GET.get('lng'))
    point = Point(lng, lat)
    return Place.objects.filter(point__distance_lt=(point, Distance(km=1)))
Beispiel #59
0
    def import_postal_code(self):
        self.download('postal_code')
        data = self.get_data('postal_code')

        total = sum(1 for _ in data)

        data = self.get_data('postal_code')

        self.build_country_index()
        self.build_region_index()
        if VALIDATE_POSTAL_CODES:
            self.build_postal_code_regex_index()

        districts_to_delete = []

        query_statistics = [0 for i in range(8)]
        num_existing_postal_codes = PostalCode.objects.count()
        if num_existing_postal_codes == 0:
            self.logger.debug("Zero postal codes found - using only-create "
                              "postal code optimization")
        for item in tqdm(data, disable=self.options.get('quiet'), total=total, desc="Importing postal codes"):
            if not self.call_hook('postal_code_pre', item):
                continue

            country_code = item['countryCode']
            if country_code not in settings.postal_codes and 'ALL' not in settings.postal_codes:
                continue

            try:
                code = item['postalCode']
            except KeyError:
                self.logger.warning("Postal code has no code: {} -- skipping".format(item))
                continue

            # Find country
            try:
                country = self.country_index[country_code]
            except KeyError:
                self.logger.warning("Postal code '%s': Cannot find country: %s -- skipping", code, country_code)
                continue

            # Validate postal code against the country
            code = item['postalCode']
            if VALIDATE_POSTAL_CODES and self.postal_code_regex_index[country_code].match(code) is None:
                self.logger.warning("Postal code didn't validate: {} ({})".format(code, country_code))
                continue

            reg_name_q = Q(region_name__iexact=item['admin1Name'])
            subreg_name_q = Q(subregion_name__iexact=item['admin2Name'])
            dst_name_q = Q(district_name__iexact=item['admin3Name'])

            if hasattr(PostalCode, 'region'):
                reg_name_q |= Q(region__code=item['admin1Code'])

            if hasattr(PostalCode, 'subregion'):
                subreg_name_q |= Q(subregion__code=item['admin2Code'])

            if hasattr(PostalCode, 'district') and hasattr(District, 'code'):
                dst_name_q |= Q(district__code=item['admin3Code'])

            try:
                location = Point(float(item['longitude']),
                                 float(item['latitude']))
            except ValueError:
                location = None

            if len(item['placeName']) >= 200:
                self.logger.warning("Postal code name has more than 200 characters: {}".format(item))

            if num_existing_postal_codes > 0:
                postal_code_args = (
                    {
                        'args': (reg_name_q, subreg_name_q, dst_name_q),
                        'country': country,
                        'code': code,
                        'location': location,
                    }, {
                        'args': (reg_name_q, subreg_name_q, dst_name_q),
                        'country': country,
                        'code': code,
                    }, {
                        'args': (reg_name_q, subreg_name_q, dst_name_q),
                        'country': country,
                        'code': code,
                        'name__iexact': re.sub("'", '', item['placeName']),
                    }, {
                        'args': tuple(),
                        'country': country,
                        'region__code': item['admin1Code'],
                    }, {
                        'args': tuple(),
                        'country': country,
                        'code': code,
                        'name': item['placeName'],
                        'region__code': item['admin1Code'],
                        'subregion__code': item['admin2Code'],
                    }, {
                        'args': tuple(),
                        'country': country,
                        'code': code,
                        'name': item['placeName'],
                        'region__code': item['admin1Code'],
                        'subregion__code': item['admin2Code'],
                        'district__code': item['admin3Code'],
                    }, {
                        'args': tuple(),
                        'country': country,
                        'code': code,
                        'name': item['placeName'],
                        'region_name': item['admin1Name'],
                        'subregion_name': item['admin2Name'],
                    }, {
                        'args': tuple(),
                        'country': country,
                        'code': code,
                        'name': item['placeName'],
                        'region_name': item['admin1Name'],
                        'subregion_name': item['admin2Name'],
                        'district_name': item['admin3Name'],
                    }
                )

                # We do this so we don't have to deal with exceptions being thrown
                # in the middle of transactions
                for args_dict in postal_code_args:
                    num_pcs = PostalCode.objects.filter(
                        *args_dict['args'],
                        **{k: v for k, v in args_dict.items() if k != 'args'})\
                        .count()
                    if num_pcs == 1:
                        pc = PostalCode.objects.get(
                            *args_dict['args'],
                            **{k: v for k, v in args_dict.items() if k != 'args'})
                        break
                    elif num_pcs > 1:
                        pcs = PostalCode.objects.filter(
                            *args_dict['args'],
                            **{k: v for k, v in args_dict.items() if k != 'args'})
                        self.logger.debug("item: {}\nresults: {}".format(item, pcs))
                        # Raise a MultipleObjectsReturned exception
                        PostalCode.objects.get(
                            *args_dict['args'],
                            **{k: v for k, v in args_dict.items() if k != 'args'})
                else:
                    self.logger.debug("Creating postal code: {}".format(item))
                    pc = PostalCode(
                        country=country,
                        code=code,
                        name=item['placeName'],
                        region_name=item['admin1Name'],
                        subregion_name=item['admin2Name'],
                        district_name=item['admin3Name'])
            else:
                self.logger.debug("Creating postal code: {}".format(item))
                pc = PostalCode(
                    country=country,
                    code=code,
                    name=item['placeName'],
                    region_name=item['admin1Name'],
                    subregion_name=item['admin2Name'],
                    district_name=item['admin3Name'])

            if pc.region_name != '':
                try:
                    with transaction.atomic():
                        pc.region = Region.objects.get(
                            Q(name_std__iexact=pc.region_name) |
                            Q(name__iexact=pc.region_name),
                            country=pc.country)
                except Region.DoesNotExist:
                    pc.region = None
            else:
                pc.region = None

            if pc.subregion_name != '':
                try:
                    with transaction.atomic():
                        pc.subregion = Subregion.objects.get(
                            Q(region__name_std__iexact=pc.region_name) |
                            Q(region__name__iexact=pc.region_name),
                            Q(name_std__iexact=pc.subregion_name) |
                            Q(name__iexact=pc.subregion_name),
                            region__country=pc.country)
                except Subregion.DoesNotExist:
                    pc.subregion = None
            else:
                pc.subregion = None

            if pc.district_name != '':
                try:
                    with transaction.atomic():
                        pc.district = District.objects.get(
                            Q(city__region__name_std__iexact=pc.region_name) |
                            Q(city__region__name__iexact=pc.region_name),
                            Q(name_std__iexact=pc.district_name) |
                            Q(name__iexact=pc.district_name),
                            city__country=pc.country)
                except District.MultipleObjectsReturned as e:
                    self.logger.debug("item: {}\ndistricts: {}".format(
                        item,
                        District.objects.filter(
                            Q(city__region__name_std__iexact=pc.region_name) |
                            Q(city__region__name__iexact=pc.region_name),
                            Q(name_std__iexact=pc.district_name) |
                            Q(name__iexact=pc.district_name),
                            city__country=pc.country).values_list('id', flat=True)))
                    # If they're both part of the same city
                    if District.objects.filter(Q(city__region__name_std__iexact=pc.region_name) |
                                               Q(city__region__name__iexact=pc.region_name),
                                               Q(name_std__iexact=pc.district_name) |
                                               Q(name__iexact=pc.district_name),
                                               city__country=pc.country)\
                               .values_list('city').distinct().count() == 1:
                        # Use the one with the lower ID
                        pc.district = District.objects.filter(
                            Q(city__region__name_std__iexact=pc.region_name) |
                            Q(city__region__name__iexact=pc.region_name),
                            Q(name_std__iexact=pc.district_name) |
                            Q(name__iexact=pc.district_name),
                            city__country=pc.country).order_by('city__id').first()

                        districts_to_delete.append(District.objects.filter(
                            Q(city__region__name_std__iexact=pc.region_name) |
                            Q(city__region__name__iexact=pc.region_name),
                            Q(name_std__iexact=pc.district_name) |
                            Q(name__iexact=pc.district_name),
                            city__country=pc.country).order_by('city__id').last().id)
                    else:
                        raise e
                except District.DoesNotExist:
                    pc.district = None
            else:
                pc.district = None

            if pc.district is not None:
                pc.city = pc.district.city
            else:
                pc.city = None

            try:
                pc.location = Point(float(item['longitude']), float(item['latitude']))
            except Exception as e:
                self.logger.warning("Postal code %s (%s) - invalid location ('%s', '%s'): %s",
                                    pc.code, pc.country, item['longitude'],
                                    item['latitude'], str(e))
                pc.location = None

            pc.save()

            if not self.call_hook('postal_code_post', pc, item):
                continue

            self.logger.debug("Added postal code: %s, %s", pc.country, pc)

        if num_existing_postal_codes > 0 and max(query_statistics) > 0:
            width = int(math.log10(max(query_statistics)))

            stats_str = ""
            for i, count in enumerate(query_statistics):
                stats_str = "{{}}\n{{:>2}} [{{:>{}}}]: {{}}".format(width)\
                    .format(stats_str, i, count,
                            ''.join(['=' for i in range(count)]))

                self.logger.info("Postal code query statistics:\n{}".format(stats_str))

        if districts_to_delete:
            self.logger.debug('districts to delete:\n{}'.format(districts_to_delete))
class CollectPointSerializer(CountryFieldMixin, serializers.ModelSerializer):
    """採集地点モデル用シリアライザ"""

    user = serializers.HiddenField(default=serializers.CurrentUserDefault())
    country = CountryField(required=False)
    location = PointField(default=Point(0.0, 0.0, srid=4326))
    longitude = serializers.DecimalField(required=False,
                                         max_digits=9,
                                         decimal_places=6,
                                         read_only=True)
    latitude = serializers.DecimalField(required=False,
                                        max_digits=9,
                                        decimal_places=6,
                                        read_only=True)
    coordinate_precision = serializers.FloatField(required=False)
    minimum_elevation = serializers.FloatField(required=False)
    maximum_elevation = serializers.FloatField(required=False)
    minimum_depth = serializers.FloatField(required=False)
    maximum_depth = serializers.FloatField(required=False)
    image1 = Base64ImageField(required=False)
    image2 = Base64ImageField(required=False)
    image3 = Base64ImageField(required=False)
    image4 = Base64ImageField(required=False)
    image5 = Base64ImageField(required=False)

    class Meta:
        model = CollectPoint
        fields = '__all__'
        read_only_fields = ('created_at', 'id')
        extra_kwargs = {
            'contient': {
                'validators':
                [RegexValidator(r'^[!-~ ]+$', message='半角英数記号のみ使用可')]
            },
            'island_group': {
                'validators': [
                    RegexValidator(r'^[!-~ À-ÖØ-öø-ÿāīūēōȳĀĪŪĒŌȲ]+$',
                                   message='半角英数記号およびアクセント記号付き文字のみ使用可')
                ]
            },
            'island': {
                'validators': [
                    RegexValidator(r'^[!-~ À-ÖØ-öø-ÿāīūēōȳĀĪŪĒŌȲ]+$',
                                   message='半角英数記号およびアクセント記号付き文字のみ使用可')
                ]
            },
            'state_provice': {
                'validators': [
                    RegexValidator(r'^[!-~ À-ÖØ-öø-ÿāīūēōȳĀĪŪĒŌȲ]+$',
                                   message='半角英数記号およびアクセント記号付き文字のみ使用可')
                ]
            },
            'county': {
                'validators': [
                    RegexValidator(r'^[!-~ À-ÖØ-öø-ÿāīūēōȳĀĪŪĒŌȲ]+$',
                                   message='半角英数記号およびアクセント記号付き文字のみ使用可')
                ]
            },
            'municipality': {
                'validators': [
                    RegexValidator(r'^[!-~ À-ÖØ-öø-ÿāīūēōȳĀĪŪĒŌȲ]+$',
                                   message='半角英数記号およびアクセント記号付き文字のみ使用可')
                ]
            },
        }