Example #1
0
    def test_encode_pairs(self):
        pairs = (
            (38.5, -120.2),
            (40.7, -120.95),
            (43.252, -126.453),
            (40.7, -120.95),
        )
        expected_encoding = '_p~iF~ps|U_ulLnnqC_mqNvxq`@~lqNwxq`@', 'BBBB'
        self.assertEqual(encode_pairs(pairs), expected_encoding)

        pairs = (
            (37.4419, -122.1419),
            (37.4519, -122.1519),
            (37.4619, -122.1819),
        )
        expected_encoding = 'yzocFzynhVq}@n}@o}@nzD', 'B@B'
        self.assertEqual(encode_pairs(pairs), expected_encoding)
Example #2
0
    def path(self, lat1, lng1, lat2, lng2, transfer_penalty=0, walking_speed=1.0, hill_reluctance=20, narrative=True, jsoncallback=None):
        
        t0 = time.time()
        origin = "osm-%s"%self.osmdb.nearest_node( lat1, lng1 )[0]
        dest = "osm-%s"%self.osmdb.nearest_node( lat2, lng2 )[0]
        endpoint_find_time = time.time()-t0
        
        print origin, dest
        
        t0  = time.time()
        wo = WalkOptions()
        #wo.transfer_penalty=transfer_penalty
        #wo.walking_speed=walking_speed
        wo.walking_speed=4
        wo.walking_overage = 0
        wo.hill_reluctance = 20
        wo.turn_penalty = 15 
        
        edgepayloads = self.ch.shortest_path( origin, dest, State(1,0), wo )
        
        wo.destroy()
        
        route_find_time = time.time()-t0
        
        t0 = time.time()
        names = []
        geoms = []
        
        profile = Profile()
        total_dist = 0
        total_elev = 0
        
        if narrative:
            names, total_dist = get_full_route_narrative( self.osmdb, edgepayloads )
        
        for edgepayload in edgepayloads:
            geom, profile_seg = self.shortcut_cache.get( edgepayload.external_id )
            
            #geom = get_ep_geom( self.osmdb, edgepayload )
            #profile_seg = get_ep_profile( self.profiledb, edgepayload )

            geoms.extend( geom )
            profile.add( profile_seg )
            
        route_desc_time = time.time()-t0

        ret = json.dumps( (names, 
                           encode_pairs( [(lat, lon) for lon, lat in geoms] ), 
                           profile.concat(300),
                           { 'route_find_time':route_find_time,
                             'route_desc_time':route_desc_time,
                             'endpoint_find_time':endpoint_find_time,},
                           { 'total_dist':total_dist,
                             'total_elev':total_elev}) )
        if jsoncallback:
            return "%s(%s)"%(jsoncallback,ret)
        else:
            return ret
Example #3
0
def database_add(request):
    """
    Загрузка координат из *.kml в БД
    """
    #Кооординаты Cornfield

    if request.method == 'POST':
            form = AddField(request.POST, request.FILES)
            if form.is_valid():
                all_geometry = Cornfield.objects.filter(use_user = request.user)

                #fname = 'KML/' + request.FILES['append_field'].name[:-4] + "_" + str(request.user) + "_" + datetime.now().strftime("%d_%m_%Y_%H.%M.%S") + ".kml"
                fname = 'KML/' + str(request.user) + "_" + datetime.now().strftime("%d_%m_%Y_%H.%M.%S") + ".kml"
                f = open(os.path.join(MEDIA_ROOT, fname), 'wb+')
                for chunk in request.FILES['append_field'].chunks():
                       f.write(chunk)
                f.close()
                try:
                    ds = DataSource(os.path.join(MEDIA_ROOT, fname))
                    #ds = DataSource(os.path.join(MEDIA_ROOT, 'ag_fields3.kml'))
                    layer = ds[0]
                    i = 0
                    for feat in layer:

                      geom = feat.geom
                      #print geom
                      if geom:
                        new_line = []
                        for line in geom:
                            new_point = []
                            for point in line:
                                new_point.append(Point(point[0],point[1]))
                            new_point.append(new_point[0])
                            new_line = LinearRing(new_point)
                            if not new_line.ring:
                                new_point.append(new_point[0])
                                new_line = LinearRing(new_point)
                        name = "e%d"%i
                        poly = Polygon(new_line)
                        polygon_equals = True
                        for geom in all_geometry:
                            if poly.equals_exact(geom.mpoly[0], 0.001):
                                polygon_equals = False
                                break
                        if polygon_equals:
                            c = Cornfield(use_user = request.user, name_field=name, area = round(poly.area*1000000,2), mpoly = MultiPolygon(poly),
                                mpoly_coding_paths = encode_pairs(new_line)[0], mpoly_coding_levels = encode_pairs(new_line)[1])
                            c.save()
                            i+=1
                except:
                    pass


                return redirect('show_map')
Example #4
0
def get_polys(path):
    "Returns a dict of tuples: shape_id -> (encoded polyline, encoded levels)"

    i = open(path)
    r = csv.reader(i)
    r.next() #skips header

    last_shape = None
    points = []
    polys = {}

    for row in r:
        if row[0] != last_shape:
            if last_shape != None:
                polys[last_shape] = encode_pairs(coords)
            last_shape = row[0]
            coords = []
        else:
            coords.append((float(row[1]), float(row[2])))

    polys[last_shape] = encode_pairs(coords)

    return polys
Example #5
0
 def contour(self, lat, lon, year, month, day, hour, minute, second, cutoff, step=60*15, encoded=False, speed=0.85):
     if step is not None and step < 600:
         raise Exception( "Step cannot be less than 600 seconds" )
     
     starttime = TimeHelpers.localtime_to_unix( year, month, day, hour, minute, second, "America/Los_Angeles" )
     
     #=== get osm vertex ==
     print( "getting nearest vertex" )
     
     #find osmid of origin intersection
     t0 = time.time()
     range = 0.001
     bbox = (lon-range, lat-range, lon+range, lat+range)
     candidates = self.index.intersection( bbox )
     vlabel, vlat, vlon, vdist = self.osmdb.nearest_of( lat, lon, candidates )
     t1 = time.time()
     print( "done, took %s seconds"%(t1-t0) )
     
     #vlabel, vlat, vlon, vdist = self.osmdb.nearest_node( lat, lon )
     
     if vlabel is None:
         return json.dumps( "NO NEARBY INTERSECTION" )
     
     print( "found - %s"%vlabel )
     
     contours = self._contour( "osm"+vlabel, starttime, cutoff, step, speed )
     
     if encoded:
         encoded_contours = []
         for contour in contours:
             encoded_contour = []
             for ring in contour:
                 encoded_contour.append( encode_pairs( [(lat,lon) for lon,lat in ring] ) )
             encoded_contours.append( encoded_contour )
             
         contours = encoded_contours
     
     return json.dumps( contours )
Example #6
0
def parsegpx(database):
    session=database.db_session()
    db_country=database.db_country
    db_country_shapes=database.db_country_shapes
    tree = etree.parse('borders_world_proper.gpx')
    root = tree.getroot()
    segments=root.getiterator("rte")
    i=0
    countrylist=list()
    for segment in segments:
        latlonlist=list()
        isocode=segment.find('cmt').text
        print isocode
        try:
            q = session.query(db_country).filter(db_country.iso_numcode==isocode)
            country=q.one()
            print country.iso_countryname
#            q = session.query(db_country_shapes).filter(db_country_shapes.iso==country.iso_numcode)
            for trkpt in segment:
               i=i+1
               try:
                   lat=trkpt.attrib['lat']
                   lon=trkpt.attrib['lon']
                   latlonlist.append((float(lat),float(lon)))
               except KeyError:
                   pass
            gencpoly=glineenc.encode_pairs(latlonlist)
            q = session.query(db_country_shapes).filter(and_(db_country_shapes.country_id==isocode,db_country_shapes.gencpoly_pts==gencpoly[0].replace('\\','\\\\')))
            if q.count()>1:
                country_shape=q.one()
                print country_shape.id,country_shape.iso_numcode
            else:
                session.add(db_country_shapes(country.iso_numcode,gencpoly[0].replace('\\','\\\\'),gencpoly[1]))
                session.commit()
        except:
            print 'Error:'+str(isocode)
            print segment.find('name').text
Example #7
0
def get_encoded_ep_geom( osmdb, edgepayload ):
    return encode_pairs( [(lat, lon) for lon, lat in get_ep_geom( osmdb, edgepayload )] )
Example #8
0
 def test_encode_one_pair(self):
     pairs = [(38.5, -120.2)]
     expected_encoding = '_p~iF~ps|U', 'B'
     self.assertEqual(encode_pairs(pairs), expected_encoding)
Example #9
0
def gpx2database(trackpath,wteapi_key,database,trk_color,lat,lon,createdate):
    print 'FUNCTION GPX2DATABASE'
    session=database.db_session()
    db_track=database.db_track
    db_trackpoint=database.db_trackpoint
    db_country=database.db_country
    trk_ptnum=dict()
    trk_ptnum[0]=0
    trk_distance=dict()
    trk_distance[0]=0
    trk_span=dict()
    trk_span[0]=timedelta(hours=0,minutes=0,seconds=0)
    trkptlist=list()
    latlonlist=list()
    
    if os.listdir(trackpath)==[]:
        print 'No Trackfile, checking for single trackpoint'
        if lat and lon:
            country=get_country(lat,lon,database)
            location=talk2flickr.findplace(lat,lon,11)
            q = session.query(db_trackpoint).filter(and_(db_trackpoint.latitude==lat,db_trackpoint.longitude==lon,db_trackpoint.timestamp==createdate))
            if q.count()>0:
                print 'Trackpoint already exists: '+str(q.one().id)
            else:
                tz_detail=get_timezone(database,lat,lon,createdate,wteapi_key)
                session.add(db_trackpoint(None,tz_detail.id,country.iso_numcode,lat,lon,None,None,None,None,None,createdate,True,location))
                session.commit()
            if q.count() == 1:
                trackpoint=q.one()
                infomarker_id=trackpoint.id
    else:
        for gpxfile in os.listdir(trackpath):
            if gpxfile.lower().endswith('.gpx'):
                tree = etree.parse(trackpath+gpxfile)
                gpx_ns = "http://www.topografix.com/GPX/1/1"
                ext_ns = "http://gps.wintec.tw/xsd/"
                root = tree.getroot()
                fulltrack = root.getiterator("{%s}trk"%gpx_ns)
                trackSegments = root.getiterator("{%s}trkseg"%gpx_ns)
                i=1
                for trk in fulltrack:
                    print 'gpxfile trk no.' + str(i)
                    track_desc=trk.find('{%s}desc'% gpx_ns).text #get the desc-tag from the gpx-file
                    trk_ptnum[i]=trk_ptnum[i-1]+int(track_desc.split()[3][:-1])	     #cut out the value from the string e.g. "Total track points: 112."
                    trk_rspan=track_desc.split()[6][:-1]	     #cut out the value from the string e.g. "Total time: 0h18m25s."
                    trk_distance[i]=trk_distance[i-1]+float(track_desc.split()[8][:-2])	     #cut out the value from the string e.g. "Journey: 4.813Km"
                    trk_tspan=re.compile(r'(?P<h>\d+)h(?P<m>\d+)m(?P<s>\d+)s').match(trk_rspan) #find the values of h,m,s and add them to "groups"
                    trk_span[i]=trk_span[i-1]+timedelta(hours=int(trk_tspan.group("h")), minutes=int(trk_tspan.group("m")),seconds=int(trk_tspan.group("s"))) #get the values from groups "h","m","s" and save them in a timeformat
                    i=i+1
                    
                for trackSegment in trackSegments:
                    for trackPoint in trackSegment:
                        lat=trackPoint.attrib['lat']
                        lon=trackPoint.attrib['lon']
                        altitude=trackPoint.find('{%s}ele'% gpx_ns).text
                        time=trackPoint.find('{%s}time'% gpx_ns).text.replace('T',' ')[:-1] #replace the "T" with " " and remove the "Z" from the end of the string
                        desc=trackPoint.find('{%s}desc'% gpx_ns).text.split(', ') #split the description to get "speed" and "direction"-values
                        velocity=0
                        direction=0
                        for value in desc:
    			               if value.split('=')[0] == 'Speed':
    			                   velocity=value.split('=')[1][:-4]
    			               elif value.split('=')[0] == 'Course':
    			                   direction=value.split('=')[1][:-4]
                        try:
    			               temperature=trackPoint.find("{%s}extensions/{%s}TrackPointExtension/{%s}Temperature" % (gpx_ns,ext_ns,ext_ns)).text
    			               pressure=trackPoint.find("{%s}extensions/{%s}TrackPointExtension/{%s}Pressure" % (gpx_ns,ext_ns,ext_ns)).text
                        except AttributeError:
    			               temperature=None
    			               pressure=None
                        #print lat,lon,time
                        trkptlist.append((lat,lon,altitude,velocity,temperature,direction,pressure,time))
                        latlonlist.append((float(lat),float(lon)))
       
         
        #get the last value of each "desc"-segment, this value represents the total from the several gpx-files
        trk_ptnumtotal=trk_ptnum[i-1]
        trk_distancetotal=trk_distance[i-1]
        trk_spantotal=trk_span[i-1]
        print 'Total Trackpoints found: '+str(trk_ptnumtotal)
        
        #create an encoded polyline from the latitude-longitude-list
        gencpoly=glineenc.encode_pairs(latlonlist)
        
        trkpt_firsttimestamp=trkptlist[0][7] #first timestamp in the trackpoint-list
        query_track=session.query(db_track).filter(and_(db_track.date==trkpt_firsttimestamp,db_track.trkptnum==trk_ptnumtotal,db_track.distance==trk_distancetotal,db_track.timespan==trk_spantotal,db_track.gencpoly_pts==gencpoly[0].replace('\\','\\\\'),db_track.gencpoly_levels==gencpoly[1]))
        if query_track.count() == 1:
            for detail in query_track.all():
                track_detail=detail
                print 'track found - id:'+ str(track_detail.id)# + ' - details:' + str(track_detail)
        elif query_track.count() > 1:
            for detail in query_track.all():
                track_detail=detail
                print 'more than one track found! - id:'#+ str(track_detail.id) + ' - details:' + str(track_detail)
        else:
            session.add(db_track(trkpt_firsttimestamp,trk_ptnumtotal,trk_distancetotal,trk_spantotal,gencpoly[0].replace('\\','\\\\'),gencpoly[1],trk_color,None,None,None,None))
            session.commit()
            for detail in query_track.all():
                track_detail=detail
                print 'track created! - id:'+ str(track_detail.id)# + ' - details:' + str(track_detail)
    
        i=0
        print "\nAdding trackpoints to database:\n"
        pb=progress.progressbarClass(track_detail.trkptnum,"=")
        for trkpt in trkptlist:
            lat,lon,altitude,velocity,temperature,direction,pressure,time=trkptlist[i]
            query_trackpoint=session.query(db_trackpoint).filter(and_(db_trackpoint.track_id==track_detail.id,db_trackpoint.latitude==lat,db_trackpoint.longitude==lon,db_trackpoint.timestamp==time))
            if query_trackpoint.count() == 1:
                for detail in query_trackpoint.all():
                    trkpt_detail=detail
                    #print 'Trackpoint already exists - id:'+ str(trkpt_detail.id) + ' details:' + str(trkpt_detail)
            elif query_trackpoint.count() > 1:
                for detail in query_trackpoint.all():
                    trkpt_detail=detail
                    print 'trackpoint duplicate found! - id:'+ str(trkpt_detail.id) + ' - details:' + str(trkpt_detail)
            else:
                #trackpoints are unique, insert them now
                session.add(db_trackpoint(track_detail.id,None,None,lat,lon,float(altitude),velocity,temperature,direction,pressure,time,False,None))
                session.commit()
                for detail in query_trackpoint.all():
                    trkpt_detail=detail
                    #print 'trackpoint added! - id:'+ str(trkpt_detail.id) + ' - details:' + str(trkpt_detail)
            #in the middle of the track, we set the current infomarker.trackpoint_id to true as this is our infomarker-point
            if i==track_detail.trkptnum/2:
                for column in query_trackpoint.all():
                    column.infomarker=True
                    session.commit()
                    infomarker_id=trkpt_detail.id
            pb.progress(i)
            i=i+1
        print "infomarker_id="+str(infomarker_id)
    return infomarker_id	
Example #10
0
    for pt in pts:
        line.append((float(pt.get('lat')),float(pt.get('lon'))))
    data.append(line)

center=[0.,0.]
totN=0
pointsText=""
for i,line in enumerate(data):    
    pairs = [(pt[0],pt[1]) for pt in line]
    # no sense keeping more than 2K pairs:
    if len(pairs)>maxPairs:
        skip = len(pairs)//maxPairs
        pairs = pairs[:len(pairs):skip]
    txtPairs=["%.6f %.6f"%(x,y) for x,y in pairs]
    ptsPkl = base64.b64encode(zlib.compress(str(txtPairs).encode('utf-8')))
    encoding,levels = glineenc.encode_pairs(pairs)
    encoding=encoding.replace('\\','\\\\')
    for pt in line:
        latit = pt[0]
        longit = pt[1]
        center[0] += latit
        center[1] += longit
        totN+=1
    if len(data)==1:
      pointsText+="""var polyline_%(idx)d=new google.maps.Polyline.fromEncoded({opacity:"%(lineOpacity).1f",color:"%(lineColor)s",weight:%(lineWidth)d,points:"%(encoding)s",levels:"%(levels)s",zoomFactor:32,numLevels:4}); var ptsArchive="%(ptsPkl)s"; /*decode from python with: zlib.decompress(base64.b64decode(ptsArchive))*/"""%locals()
    else:
      pointsText+="""var polyline_%(idx)d_%(i)d=new google.maps.Polyline.fromEncoded({opacity:"%(lineOpacity).1f",color:"%(lineColor)s",weight:%(lineWidth)d,points:"%(encoding)s",levels:"%(levels)s",zoomFactor:32,numLevels:4}); var ptsArchive_%(i)d="%(ptsPkl)s"; /*decode from python with: zlib.decompress(base64.b64decode(ptsArchive))*/"""%locals()
        

latit,longit=center
latit /= totN
Example #11
0
    def path(self,
             lat1,
             lng1,
             lat2,
             lng2,
             transfer_penalty=0,
             walking_speed=1.0,
             hill_reluctance=20,
             narrative=True,
             jsoncallback=None):

        t0 = time.time()
        origin = "osm-%s" % self.osmdb.nearest_node(lat1, lng1)[0]
        dest = "osm-%s" % self.osmdb.nearest_node(lat2, lng2)[0]
        endpoint_find_time = time.time() - t0

        print origin, dest

        t0 = time.time()
        wo = WalkOptions()
        #wo.transfer_penalty=transfer_penalty
        #wo.walking_speed=walking_speed
        wo.walking_speed = 4
        wo.walking_overage = 0
        wo.hill_reluctance = 20
        wo.turn_penalty = 15

        edgepayloads = self.ch.shortest_path(origin, dest, State(1, 0), wo)

        wo.destroy()

        route_find_time = time.time() - t0

        t0 = time.time()
        names = []
        geoms = []

        profile = Profile()
        total_dist = 0
        total_elev = 0

        if narrative:
            names, total_dist = get_full_route_narrative(
                self.osmdb, edgepayloads)

        for edgepayload in edgepayloads:
            geom, profile_seg = self.shortcut_cache.get(
                edgepayload.external_id)

            #geom = get_ep_geom( self.osmdb, edgepayload )
            #profile_seg = get_ep_profile( self.profiledb, edgepayload )

            geoms.extend(geom)
            profile.add(profile_seg)

        route_desc_time = time.time() - t0

        ret = json.dumps(
            (names, encode_pairs([(lat, lon) for lon, lat in geoms]),
             profile.concat(300), {
                 'route_find_time': route_find_time,
                 'route_desc_time': route_desc_time,
                 'endpoint_find_time': endpoint_find_time,
             }, {
                 'total_dist': total_dist,
                 'total_elev': total_elev
             }))
        if jsoncallback:
            return "%s(%s)" % (jsoncallback, ret)
        else:
            return ret
Example #12
0
import md_hoods
import MySQLdb
from decodegmap import encode_points, decode_points
from glineenc import encode_pairs


def flatten(pts):
    x = []
    for lat, long in pts:
        x.append(lat)
        x.append(long)
    return x


for name, pts in md_hoods.hoods.iteritems():
    b, l = encode_pairs(pts)
    conn = MySQLdb.connect(host="localhost", user="******", db="sqft")
    cur = conn.cursor()
    cur.execute(
        """insert into zillow_neighborhoods(name, encodedBorder, encodedLevels)
                   values (%s, %s, %s)""", (name, b, l))
    cur.close()
conn.commit()
Example #13
0
from glineenc import encode_pairs
from cPickle import dump

h = [x.strip().split("\t") for x in file("csa_latlon.csv").readlines()]
hoods = {}
for _, lon, lat, __, ___, hood, ____, _____ in h[1:]:
    hoods.setdefault(hood, []).append((float(lat), float(lon)))

borders = {}
for hood, pairs in hoods.iteritems():
    borders[hood] = encode_pairs(pairs)

dump(borders, file("bnia_hoods.pkl", "w"))
Example #14
0
import os
from lxml import etree
from xml.etree import ElementTree
import os
import string
import glineenc


latlonlist=list()
trackpath='/srv/trackdata/bydate/2009-09-01/trackfile/'


for gpxfile in os.listdir(trackpath):
    if gpxfile.lower().endswith('.gpx'):
        tree = etree.parse(trackpath+gpxfile)
        gpx_ns = "http://www.topografix.com/GPX/1/1"
        ext_ns = "http://gps.wintec.tw/xsd/"
        root = tree.getroot()
        fulltrack = root.getiterator("{%s}trk"%gpx_ns)
        trackSegments = root.getiterator("{%s}trkseg"%gpx_ns)
        
        for trackSegment in trackSegments:
            for trackPoint in trackSegment:
                lat=trackPoint.attrib['lat']
                lon=trackPoint.attrib['lon']
                latlonlist.append((float(lat),float(lon)))
        gencpoly=glineenc.encode_pairs(latlonlist)
        print gencpoly[0].replace('\\','\\\\')
        print gencpoly[1]