Пример #1
0
    def save(self):
        #print "Saving tripname:",request.params
        if not self.validate(exception=False,
                             tripname=request.params.get('tripname', False)):
            return ""
        try:
            waypoints = meta.Session.query(Waypoint).filter(
                sa.and_(Waypoint.user == tripuser(),
                        Waypoint.trip == c.trip.trip)).order_by(
                            Waypoint.ordering).all()
            #print "REquest:",request.params
            c.userobj.realname = request.params.get('realname',
                                                    c.userobj.realname)

            for idx, way in enumerate(waypoints):
                dof_s = "date_of_flight_%d" % (way.id, )
                dep_s = "departure_time_%d" % (way.id, )
                fuel_s = "fuel_%d" % (way.id, )
                persons_s = "persons_%d" % (way.id, )

                name_s = "name%d" % (way.id, )
                way.waypoint = request.params.get(name_s, way.waypoint)

                if dof_s in request.params:
                    #possibly add new stay
                    if not way.stay:
                        #print "Adding stay: ord/id",way.ordering,way.id
                        way.stay = Stay(tripuser(), c.trip.trip, way.id)
                    if re.match(ur"\d{4}-?\d{2}\-?\d{2}",
                                request.params.get(dof_s, '')):
                        way.stay.date_of_flight = request.params.get(dof_s, '')
                    else:
                        way.stay.date_of_flight = ''

                    if re.match(ur"\d{2}:?\d{2}",
                                request.params.get(dep_s, '')):
                        way.stay.departure_time = request.params.get(dep_s, '')
                    else:
                        way.stay.departure_time = ''

                    try:
                        way.stay.nr_persons = int(request.params[persons_s])
                    except Exception:
                        way.stay.nr_persons = None
                    way.stay.fuel = None
                    way.stay.fueladjust = None
                    try:
                        fuelstr = request.params.get(fuel_s, '').strip()
                        if fuelstr.startswith("+") or fuelstr.startswith("-"):
                            way.stay.fueladjust = float(fuelstr)
                        else:
                            way.stay.fuel = float(fuelstr)
                    except Exception:
                        pass
                    way.altitude = unicode(
                        int(
                            get_terrain_elev.get_terrain_elev(
                                mapper.from_str(way.pos))))
                else:
Пример #2
0
def get_pos_elev(latlon):
    for airf in cache.get_airfields():
        #print "Considering:",airf
        apos=mapper.from_str(airf['pos'])
        dx=apos[0]-latlon[0]
        dy=apos[1]-latlon[1]
        if abs(dx)+abs(dy)<0.25*1.0/60.0 and 'elev' in airf:
            return airf['elev']
    return get_terrain_elev(latlon)
Пример #3
0
 def save(self):
     #print "Saving tripname:",request.params
     if not self.validate(exception=False,tripname=request.params.get('tripname',False)):
         return ""
     try:
         waypoints=meta.Session.query(Waypoint).filter(sa.and_(
              Waypoint.user==tripuser(),
              Waypoint.trip==c.trip.trip)).order_by(Waypoint.ordering).all()
         #print "REquest:",request.params
         c.userobj.realname=request.params.get('realname',c.userobj.realname)
                             
         for idx,way in enumerate(waypoints):
             dof_s="date_of_flight_%d"%(way.id,)
             dep_s="departure_time_%d"%(way.id,)
             fuel_s="fuel_%d"%(way.id,)
             persons_s="persons_%d"%(way.id,)
             
             name_s="name%d"%(way.id,)
             way.waypoint=request.params.get(name_s,way.waypoint)
             
             if dof_s in request.params:
                 #possibly add new stay
                 if not way.stay:
                     #print "Adding stay: ord/id",way.ordering,way.id
                     way.stay=Stay(tripuser(),c.trip.trip,way.id)
                 if re.match(ur"\d{4}-?\d{2}\-?\d{2}",request.params.get(dof_s,'')):
                     way.stay.date_of_flight=request.params.get(dof_s,'')
                 else:
                     way.stay.date_of_flight=''
                     
                 if re.match(ur"\d{2}:?\d{2}",request.params.get(dep_s,'')):
                     way.stay.departure_time=request.params.get(dep_s,'')
                 else:
                     way.stay.departure_time=''
                     
                 try:
                     way.stay.nr_persons=int(request.params[persons_s])
                 except Exception:
                     way.stay.nr_persons=None
                 way.stay.fuel=None
                 way.stay.fueladjust=None
                 try:
                     fuelstr=request.params.get(fuel_s,'').strip()
                     if fuelstr.startswith("+") or fuelstr.startswith("-"):
                         way.stay.fueladjust=float(fuelstr)
                     else:
                         way.stay.fuel=float(fuelstr)
                 except Exception:
                     pass                
                 way.altitude=unicode(int(get_terrain_elev.get_terrain_elev(mapper.from_str(way.pos))))
             else:
Пример #4
0
def parse_airfields():
    out = []
    for item in csv.reader(open("fplan/extract/denmark.airfields.csv")):
        print item
        icao, empty, ICAO, name, d1, d2, pos, elev, owner, phone, d4, d5, webside = item
        if not pos[-1] in ['E', 'W']:
            pos = pos + "E"
        print "ICAO:", icao
        assert icao.upper() == ICAO
        name = coding(name)
        lat, lon = mapper.from_str(mapper.parsecoord(pos))
        nasaelev = get_terrain_elev((lat, lon))
        if elev == '':
            elev = nasaelev
        if nasaelev != 9999:
            assert abs(float(elev) - nasaelev) < 100
        ad = dict(icao=ICAO,
                  name=name,
                  pos=mapper.to_str((lat, lon)),
                  date="2010-01-01T00:00:00Z",
                  elev=int(elev))
        out.append(ad)
    return out
Пример #5
0
def parse_airfields():
    out=[]
    for item in csv.reader(open("fplan/extract/denmark.airfields.csv")):
        print item
        icao,empty,ICAO,name,d1,d2,pos,elev,owner,phone,d4,d5,webside=item
        if not pos[-1] in ['E','W']:
            pos=pos+"E"
        print "ICAO:",icao
        assert icao.upper()==ICAO
        name=coding(name)
        lat,lon=mapper.from_str(mapper.parsecoord(pos))
        nasaelev=get_terrain_elev((lat,lon))
        if elev=='':
            elev=nasaelev        
        if nasaelev!=9999:
            assert abs(float(elev)-nasaelev)<100
        ad=dict(
            icao=ICAO,
            name=name,
            pos=mapper.to_str((lat,lon)),
            date="2010-01-01T00:00:00Z",
            elev=int(elev))
        out.append(ad)
    return out
Пример #6
0
def osm_airfields_parse():
    nameicao=parse_info()
    f=open("adnames.txt","w")
    for name,icao in sorted(nameicao,key=lambda x:x[0]):        
        f.write("%s: %s\n"%(icao.encode('utf8'),name.encode('utf8')))
    f.close()
    ads=[]
    hits=0
    misses=[]
    
    name2icao=[]
    for name,icao in nameicao:
        name2icao.append((normalize(name),name,icao))
    
    dupecheck=set()    
    
    for lon,lat,name in csv.reader(open("fplan/extract/aerodromes.txt")):
        name=unicode(name,"utf8")
        if not name.strip(): continue
        
        icao=None
        n1=normalize(name)
        if frozenset(n1) in dupecheck:
            continue
        dupecheck.add(frozenset(n1))
        lastquality=0
        lastname=None
        for n2,iname,iicao in name2icao:
            #print repr(n1),"==",repr(n2)
            minlen=min(len(n1),len(n2))
            if minlen>2:
                minlen=minlen-1
            quality=len(n1.intersection(n2))
            if quality>=minlen and quality>lastquality:
                if icao!=None and icao!=iicao:
                    print "For name:",name,"Previous match:,",icao,"New match:",iicao,"Name:",iname,"last name:",lastname,"quality:",quality,"last:",lastquality,"minlen:",minlen
                    #assert icao==None
                icao=iicao
                lastquality=quality
                lastname=iname
        #print "Ap with name:",name
        if icao:
            hits+=1
            icao=icao
        else:
            icao='ZZZZ'
            misses.append(name)
            #print "Missed:",name
        d=dict(
                icao=icao,
                name=name,
                pos=mapper.to_str((float(lat),float(lon))),
                elev=int(gte.get_terrain_elev((float(lat),float(lon))))
                )
            
        if hits%10==0:
            print "Hits: %d, Misses: %d, Perc: %.1f"%(hits,len(misses),100.0*(float(hits)/(float(hits)+len(misses))))
        ads.append(d)
    print "Misses:"
    f=open("missedads.txt","w")        
    for miss in sorted(misses):
        f.write((miss+" - "+repr(normalize(miss))+u"\n").encode('utf8'))
    f.close()    
    f=open("foundads.txt","w")        
    for norm,name,icao in name2icao:
        f.write((name+" - "+repr(norm)+u"\n").encode('utf8'))
    f.close()    
    print "Hits: ",hits
    print "Misses: ",len(misses)
    
    return ads
Пример #7
0
def extract_airfields(filtericao=lambda x:True,purge=True):
    #print getxml("/AIP/AD/AD 1/ES_AD_1_1_en.pdf")
    ads=[]
    p=Parser("/AIP/AD/AD 1/ES_AD_1_1_en.pdf")
    points=dict()
    startpage=None
    for pagenr in xrange(p.get_num_pages()):
        page=p.parse_page_to_items(pagenr)
        if page.count("Aerodrome directory"):
            startpage=pagenr
            break
    if startpage==None:
        raise Exception("Couldn't find aerodrome directory in file")
    #print "Startpage: %d"%(startpage,)
    #nochartf=open("nochart.txt","w")
    for pagenr in xrange(startpage,p.get_num_pages()):
        row_y=[]
        page=p.parse_page_to_items(pagenr)
        allines=[x for x in (page.get_lines(page.get_partially_in_rect(0,0,15,100))) if x.strip()]
        for item,next in zip(allines,allines[1:]+[""]):
            #print "item:",item
            
            m=re.match(ur"^\s*[A-ZÅÄÖ]{3,}(?:/.*)?\b.*",item)
            if m:
                #print "Candidate, next is:",next
                if re.match(r"^\s*[A-Z]{4}\b.*",next):
                    #print "Matched:",item
                    #print "y1:",item.y1                    
                    row_y.append(item.y1)
        for y1,y2 in zip(row_y,row_y[1:]+[100.0]):
            #print "Extacting from y-range: %f-%f"%(y1,y2)
            items=list(page.get_partially_in_rect(0,y1-0.25,5.0,y2+0.25,ysort=True))
            if len(items)>=2:
                #print "Extract items",items
                ad=dict(name=unicode(items[0].text).strip(),
                        icao=unicode(items[1].text).strip()                    
                        )
                #print "Icao:",ad['icao']
                assert re.match(r"[A-Z]{4}",ad['icao'])
                if not filtericao(ad): continue
                if len(items)>=3:
                    #print "Coord?:",items[2].text
                    m=re.match(r".*(\d{6}N)\s*(\d{7}E).*",items[2].text)
                    if m:
                        lat,lon=m.groups()
                        ad['pos']=parse_coords(lat,lon)           
                        #print "Items3:",items[3:]   
                        elev=re.findall(r"(\d{1,5})\s*ft"," ".join(t.text for t in items[3:]))
                        #print "Elev:",elev
                        assert len(elev)==1
                        ad['elev']=int(elev[0])                        
                                     
                ads.append(ad)

        
    big_ad=set()        
    for ad in ads:
        if not ad.has_key('pos'):
            big_ad.add(ad['icao'])
            
    for ad in ads:        
        icao=ad['icao']
        if icao in big_ad:            
            if icao in ['ESIB','ESNY','ESCM','ESPE']:
                continue                    
            
            try:
                p=Parser("/AIP/AD/AD 2/%s/ES_AD_2_%s_6_1_en.pdf"%(icao,icao))
            except:
                p=Parser("/AIP/AD/AD 2/%s/ES_AD_2_%s_6-1_en.pdf"%(icao,icao))

            ad['aipvacurl']=p.get_url()
            for pagenr in xrange(p.get_num_pages()):
                page=p.parse_page_to_items(pagenr)
                
                """
                for altline in exitlines:
                    m=re.match(r"(\w+)\s+(\d+N)\s*(\d+E.*)",altline)
                    if not m: continue
                    name,lat,lon=m.groups()
                    try:
                        coord=parse_coords(lat,lon)
                    except Exception:
                        continue
                    points.append(dict(name=name,pos=coord))
                """
                
                for kind in xrange(2):
                    if kind==0:
                        hits=page.get_by_regex(r"H[Oo][Ll][Dd][Ii][Nn][Gg]")
                        kind="holding point"
                    if kind==1:
                        hits=page.get_by_regex(r"[Ee]ntry.*[Ee]xit.*point")                    
                        kind="entry/exit point"
                    if len(hits)==0: continue
                    for holdingheading in hits:

                        items=sorted(page.get_partially_in_rect(holdingheading.x1+2.0,holdingheading.y2+0.1,holdingheading.x1+0.5,100),
                            key=lambda x:x.y1)
                        items=[x for x in items if not x.text.startswith(" ")]
                        #print "Holding items:",items
                        for idx,item in enumerate(items):
                            print "Holding item",item
                            y1=item.y1
                            if idx==len(items)-1:
                                y2=100
                            else:
                                y2=items[idx+1].y1
                            items2=[x for x in page.get_partially_in_rect(item.x1+1,y1+0.3,item.x1+40,y2-0.1) if x.x1>=item.x1-0.25 and x.y1>=y1-0.05 and x.y1<y2-0.05]
                            s=(" ".join(page.get_lines(items2))).strip()
                            print "Holding lines:",repr(page.get_lines(items2))
                            #if s.startswith("ft Left/3"): #Special case for ESOK
                            #    s,=re.match("ft Left/3.*?([A-Z]{4,}.*)",s).groups()
                            #m=re.match("ft Left/\d+.*?([A-Z]{4,}.*)",s)
                            #if m:
                            #    s,=m.groups()
                                
                            if s.startswith("LjUNG"): #Really strange problem with ESCF
                                s=s[0]+"J"+s[2:]
                            if s.lower().startswith("holding"):
                                sl=s.split(" ",1)
                                if len(sl)>1:
                                    s=sl[1]
                            s=s.strip()
                            if kind=="entry/exit point" and s.startswith("HOLDING"):
                                continue #reached HOLDING-part of VAC
                                
                            #Check for other headings
                            #Fixup strange formatting of points in some holding items: (whitespace between coord and 'E')                            
                            s=re.sub(ur"(\d+)\s*(N)\s*(\d+)\s*(E)",lambda x:"".join(x.groups()),s)

                            m=re.match(r"([A-Z]{2,}).*?(\d+N)\s*(\d+E).*",s)
                            if not m:                                
                                m=re.match(r".*?(\d+N)\s*(\d+E).*",s) 
                                if not m:
                                    continue
                                assert m
                                lat,lon=m.groups()
                                #skavsta
                                if icao=="ESKN":
                                    if s.startswith(u"Hold north of T"):
                                        name="NORTH"
                                    elif s.startswith(u"Hold south of B"):
                                        name="SOUTH"                     
                                    else:
                                        assert 0
                                #add more specials here            
                                else:
                                    continue
                            else:
                                name,lat,lon=m.groups()
                            try:
                                coord=parse_coords(lat,lon)
                            except Exception:
                                print "Couldn't parse:",lat,lon
                                continue
                            #print name,lat,lon,mapper.format_lfv(*mapper.from_str(coord))
                            
                            if name.count("REMARK") or len(name)<=2:
                                print "Suspicious name: ",name
                                #sys.exit(1)
                                continue
                            points[icao+' '+name]=dict(name=icao+' '+name,icao=icao,pos=coord,kind=kind)


    #for point in points.items():
    #    print point


    #sys.exit(1)

    def fixhex11(s):
        out=[]
        for c in s:
            i=ord(c)
            if i>=0x20:
                out.append(c)
                continue
            if i in [0x9,0xa,0xd]:
                out.append(c)
                continue
            out.append(' ')
            
        return "".join(out)
        
    for ad in ads:
        icao=ad['icao']
        if icao in big_ad:
            #print "Parsing ",icao
            p=Parser("/AIP/AD/AD 2/%s/ES_AD_2_%s_en.pdf"%(icao,icao),loadhook=fixhex11)
            ad['aiptexturl']=p.get_url()
            firstpage=p.parse_page_to_items(0)
            te="\n".join(firstpage.get_all_lines())                        
            #print te
            coords=re.findall(r"ARP.*(\d{6}N)\s*(\d{7}E)",te)
            if len(coords)>1:
                raise Exception("First page of airport info (%s) does not contain exactly ONE set of coordinates"%(icao,))
            if len(coords)==0:
                print "Couldn't find coords for ",icao
            #print "Coords:",coords
            ad['pos']=parse_coords(*coords[0])

            elev=re.findall(r"Elevation.*?(\d{1,5})\s*ft",te,re.DOTALL)
            if len(elev)>1:
                raise Exception("First page of airport info (%s) does not contain exactly ONE elevation in ft"%(icao,))
            if len(elev)==0:
                print "Couldn't find elev for ",icao                
            ad['elev']=int(elev[0])
            freqs=[]
            found=False
            thrs=[]
            #uprint("-------------------------------------")
            for pagenr in xrange(p.get_num_pages()):
                page=p.parse_page_to_items(pagenr)
                #uprint("Looking on page %d"%(pagenr,))
                if 0: #opening hours are no longer stored in a separate document for any airports. No need to detect which any more (since none are).
                    for item in page.get_by_regex(".*OPERATIONAL HOURS.*"):
                        lines=page.get_lines(page.get_partially_in_rect(0,item.y2+0.1,100,100))
                        for line in lines:
                            things=["ATS","Fuelling","Operating"]
                            if not line.count("AIP SUP"): continue
                            for thing in things:
                                if line.count(thing):
                                    ad['aipsup']=True
                        
                    
                for item in page.get_by_regex(".*\s*RUNWAY\s*PHYSICAL\s*CHARACTERISTICS\s*.*"):
                    #uprint("Physical char on page")
                    lines=page.get_lines(page.get_partially_in_rect(0,item.y2+0.1,100,100))
                    seen_end_rwy_text=False
                    for line,nextline in izip(lines,lines[1:]+[None]):
                        #uprint("MAtching: <%s>"%(line,))
                        if re.match(ur"AD\s+2.13",line): break
                        if line.count("Slope of"): break
                        if line.lower().count("end rwy:"): seen_end_rwy_text=True
                        if line.lower().count("bgn rwy:"): seen_end_rwy_text=True
                        m=re.match(ur".*(\d{6}\.\d+)[\s\(\)\*]*(N).*",line)
                        if not m:continue
                        m2=re.match(ur".*(\d{6,7}\.\d+)\s*[\s\(\)\*]*(E).*",nextline)                            
                        if not m2:continue
                        latd,n=m.groups()
                        lond,e=m2.groups()
                        assert n=="N"
                        assert e=="E"
                        lat=latd+n
                        lon=lond+e
                        rwytxts=page.get_lines(page.get_partially_in_rect(0,line.y1+0.05,12,nextline.y2-0.05))
                        uprint("Rwytxts:",rwytxts)
                        rwy=None
                        for rwytxt in rwytxts:
                            #uprint("lat,lon:%s,%s"%(lat,lon))
                            #uprint("rwytext:",rwytxt)
                            m=re.match(ur"\s*(\d{2}[LRCM]?)\b.*",rwytxt)
                            if m:
                                assert rwy==None
                                rwy=m.groups()[0]
                        if rwy==None and seen_end_rwy_text:
                            continue
                        print "Cur airport:",icao
                        already=False
                        assert rwy!=None
                        seen_end_rwy_text=False
                        for thr in thrs:
                            if thr['thr']==rwy:
                                raise Exception("Same runway twice on airfield:"+icao)
                        thrs.append(dict(pos=mapper.parse_coords(lat,lon),thr=rwy))
            assert len(thrs)>=2
            for pagenr in xrange(0,p.get_num_pages()):
                page=p.parse_page_to_items(pagenr)                                              
                
                matches=page.get_by_regex(r".*ATS\s+COMMUNICATION\s+FACILITIES.*")
                #print "Matches of ATS COMMUNICATION FACILITIES on page %d: %s"%(pagenr,matches)
                if len(matches)>0:
                    commitem=matches[0]
                    curname=None
                    
                    callsign=page.get_by_regex_in_rect(ur"Call\s*sign",0,commitem.y1,100,commitem.y2+8)[0]
                    
                    
                    for idx,item in enumerate(page.get_lines(page.get_partially_in_rect(callsign.x1-0.5,commitem.y1,100,100),fudge=0.3,order_fudge=15)):
                        if item.strip()=="":
                            curname=None
                        if re.match(".*RADIO\s+NAVIGATION\s+AND\s+LANDING\s+AIDS.*",item):
                            break
                        #print "Matching:",item
                        m=re.match(r"(.*?)\s*(\d{3}\.\d{1,3})\s*MHz.*",item)
                        #print "MHZ-match:",m
                        if not m: continue
                        #print "MHZ-match:",m.groups()
                        who,sfreq=m.groups()
                        freq=float(sfreq)
                        if abs(freq-121.5)<1e-4:
                            if who.strip():
                                curname=who
                            continue #Ignore emergency frequency, it is understood
                        if not who.strip():
                            if curname==None: continue
                        else:
                            curname=who
                        freqs.append((curname.strip().rstrip("/"),freq))


            for pagenr in xrange(0,p.get_num_pages()):
                page=p.parse_page_to_items(pagenr)                                              
                                
                matches=page.get_by_regex(r".*ATS\s*AIRSPACE.*")
                #print "Matches of ATS_AIRSPACE on page %d: %s"%(pagenr,matches)
                if len(matches)>0:
                    heading=matches[0]
                    desigitem,=page.get_by_regex("Designation and lateral limits")
                    vertitem,=page.get_by_regex("Vertical limits")
                    airspaceclass,=page.get_by_regex("Airspace classification")
                    
                    lastname=None
                    subspacelines=dict()
                    subspacealts=dict()
                    for idx,item in enumerate(page.get_lines(page.get_partially_in_rect(desigitem.x2+1,desigitem.y1,100,vertitem.y1-1))):
                        
                        if item.count("ATS airspace not established"):
                            assert idx==0
                            break
                            
                        if item.strip()=="": continue
                        m=re.match(r"(.*?)(\d{6}N\s+.*)",item)
                        if m:
                            name,coords=m.groups()                            
                            name=name.strip()
                        else:
                            name=item.strip()
                            coords=None
                        if name:
                            lastname=name
                        if coords:
                            subspacelines.setdefault(lastname,[]).append(coords)
                        assert lastname
                    lastname=None

                    #print "Spaces:",subspacelines
                    #print "ICAO",ad['icao']
                    #altlines=page.get_lines(page.get_partially_in_rect(vertitem.x2+1,vertitem.y1,100,airspaceclass.y1-0.2))
                    
                    #print "Altlines:",altlines
                    subspacealts=dict()
                    subspacekeys=subspacelines.keys()
                        
                    allaltlines=" ".join(page.get_lines(page.get_partially_in_rect(vertitem.x1+0.5,vertitem.y1+0.5,100,airspaceclass.y1-0.2)))
                    single_vertlim=False
                    totalts=list(mapper.parse_all_alts(allaltlines))
                    #print "totalts:",totalts 
                    if len(totalts)==2:
                        single_vertlim=True
                    
                    for subspacename in subspacekeys:
                        ceil=None
                        floor=None
                        subnames=[subspacename]
                        if subspacename.split(" ")[-1].strip() in ["TIA","TIZ","CTR","CTR/TIZ"]:
                            subnames.append(subspacename.split(" ")[-1].strip())
                        #print "Parsing alts for ",subspacename,subnames
                        try:                        
                            for nametry in subnames:
                                if single_vertlim: #there's only one subspace, parse all of vertical limits field for this single one.
                                    items=[vertitem]
                                else:
                                    items=page.get_by_regex_in_rect(nametry,vertitem.x2+1,vertitem.y1,100,airspaceclass.y1-0.2)
                                for item in items: 
                                    alts=[]
                                    for line in page.get_lines(page.get_partially_in_rect(item.x1+0.5,item.y1+0.5,100,airspaceclass.y1-0.2)):
                                        #print "Parsing:",line
                                        line=line.replace(nametry,"").lower().strip()
                                        parsed=list(mapper.parse_all_alts(line))
                                        if len(parsed):
                                            alts.append(mapper.altformat(*parsed[0]))
                                        if len(alts)==2: break
                                    if alts:
                                        #print "alts:",alts
                                        ceil,floor=alts
                                        raise StopIteration
                        except StopIteration:
                            pass
                        assert ceil and floor
                        subspacealts[subspacename]=dict(ceil=ceil,floor=floor)             
                        
                    spaces=[]                                        
                    for spacename in subspacelines.keys():
                        altspacename=spacename
                        #print "Altspacename: %s, subspacesalts: %s"%(altspacename,subspacealts)
                        space=dict(
                            name=spacename,
                            ceil=subspacealts[altspacename]['ceil'],
                            floor=subspacealts[altspacename]['floor'],
                            points=parse_coord_str(" ".join(subspacelines[spacename])),
                            freqs=list(set(freqs))
                            )
                        
                        if True:
                            vs=[]
                            for p in space['points']:
                                x,y=mapper.latlon2merc(mapper.from_str(p),13)
                                vs.append(Vertex(int(x),int(y)))                    
                            p=Polygon(vvector(vs))
                            if p.calc_area()<=30*30:
                                pass#print space
                                pass#print "Area:",p.calc_area()
                            assert p.calc_area()>30*30
                            #print "Area: %f"%(p.calc_area(),)
                        
                        spaces.append(space)
                        #print space
                    ad['spaces']=spaces
                    found=True
                if found:
                    break
            assert found                            
            ad['runways']=rwy_constructor.get_rwys(thrs)
                            
                            
            #Now find any ATS-airspace
    chartblobnames=[]
    for ad in ads:        
        icao=ad['icao']
        if icao in big_ad:          
            parse_landing_chart.help_plc(ad,"/AIP/AD/AD 2/%s/ES_AD_2_%s_2-1_en.pdf"%(icao,icao),
                            icao,ad['pos'],"se",variant="")
            parse_landing_chart.help_plc(ad,"/AIP/AD/AD 2/%s/ES_AD_2_%s_6-1_en.pdf"%(icao,icao),
                            icao,ad['pos'],"se",variant="vac")

            parse_landing_chart.help_plc(ad,"/AIP/AD/AD 2/%s/ES_AD_2_%s_2-3_en.pdf"%(icao,icao),
                            icao,ad['pos'],"se",variant="parking")
            
            
            #aip_text_documents.help_parse_doc(ad,"/AIP/AD/AD 2/%s/ES_AD_2_%s_6_1_en.pdf"%(icao,icao),
            #            icao,"se",title="General Information",category="general")
                                    
            
            aip_text_documents.help_parse_doc(ad,"/AIP/AD/AD 2/%s/ES_AD_2_%s_en.pdf"%(icao,icao),
                        icao,"se",title="General Information",category="general")
            
                  

    
    #if purge:
    #    parse_landing_chart.purge_old(chartblobnames,country="se")        
    
    #sys.exit(1)

    for extra in extra_airfields.extra_airfields:
        if filtericao(extra):
            ads.append(extra)
    print
    print
    for k,v in sorted(points.items()):
        print k,v,mapper.format_lfv(*mapper.from_str(v['pos']))
        
    #print "Num points:",len(points)
    
    origads=list(ads)    
    for flygkartan_id,name,lat,lon,dummy in csv.reader(open("fplan/extract/flygkartan.csv"),delimiter=";"):
        found=None
        lat=float(lat)
        lon=float(lon)
        if type(name)==str:
            name=unicode(name,'utf8')
        mercf=mapper.latlon2merc((lat,lon),13)
        for a in origads:
            merca=mapper.latlon2merc(mapper.from_str(a['pos']),13)
            dist=math.sqrt((merca[0]-mercf[0])**2+(merca[1]-mercf[1])**2)
            if dist<120:
                found=a
                break
        if found:
            found['flygkartan_id']=flygkartan_id
        else:
            d=dict(
                    icao='ZZZZ',
                    name=name,
                    pos=mapper.to_str((lat,lon)),
                    elev=int(get_terrain_elev((lat,lon))),
                    flygkartan_id=flygkartan_id)
            if filtericao(d):
                ads.append(d)
                    
    minor_ad_charts=extra_airfields.minor_ad_charts
        
                    
    for ad in ads:     
        if ad['name'].count(u"Långtora"):            
            ad['pos']=mapper.to_str(mapper.from_aviation_format("5944.83N01708.20E"))
            
        if ad['name'] in minor_ad_charts:
            charturl=minor_ad_charts[ad['name']]
            arp=ad['pos']
            if 'icao' in ad and ad['icao'].upper()!='ZZZZ':
                icao=ad['icao'].upper()
            else:
                icao=ad['fake_icao']
                
            parse_landing_chart.help_plc(ad,charturl,icao,arp,country='raw',variant="landing")
            """
            assert icao!=None
            lc=parse_landing_chart.parse_landing_chart(
                    charturl,
                    icao=icao,
                    arppos=arp,country="raw")
            assert lc
            if lc:
                ad['adcharturl']=lc['url']
                ad['adchart']=lc
            """
            
    #print ads
    for ad in ads:
        print "%s: %s - %s (%s ft) (%s)"%(ad['icao'],ad['name'],ad['pos'],ad['elev'],ad.get('flygkartan_id','inte i flygkartan'))
        for space in ad.get('spaces',[]):
            for freq in space.get('freqs',[]):
                print "   ",freq
        #if 'spaces' in ad:
        #    print "   spaces: %s"%(ad['spaces'],)
        #if 'aiptext' in ad:
        #    print "Aip texts:",ad['aiptext']
        #else:
        #    print "No aiptext"
    print "Points:"
    for point in sorted(points.values(),key=lambda x:x['name']):
        print point
        
    f=codecs.open("extract_airfields.regress.txt","w",'utf8')    
    for ad in ads:
        r=repr(ad)
        d=md5.md5(r).hexdigest()
        f.write("%s - %s - %s\n"%(ad['icao'],ad['name'],d))
    f.close()
    f=codecs.open("extract_airfields.regress-details.txt","w",'utf8')    
    for ad in ads:
        r=repr(ad)
        f.write(u"%s - %s - %s\n"%(ad['icao'],ad['name'],r))
    f.close()
    
    return ads,points.values()
Пример #8
0
            name = unicode(name, "utf8")
        mercf = mapper.latlon2merc((lat, lon), 13)
        for a in origads:
            merca = mapper.latlon2merc(mapper.from_str(a["pos"]), 13)
            dist = math.sqrt((merca[0] - mercf[0]) ** 2 + (merca[1] - mercf[1]) ** 2)
            if dist < 120:
                found = a
                break
        if found:
            found["flygkartan_id"] = flygkartan_id
        else:
            d = dict(
                icao="ZZZZ",
                name=name,
                pos=mapper.to_str((lat, lon)),
                elev=int(get_terrain_elev((lat, lon))),
                flygkartan_id=flygkartan_id,
            )
            if filtericao(d):
                ads.append(d)

    minor_ad_charts = extra_airfields.minor_ad_charts

    for ad in ads:
        if ad["name"].count(u"Långtora"):
            ad["pos"] = mapper.to_str(mapper.from_aviation_format("5944.83N01708.20E"))

        if ad["name"] in minor_ad_charts:
            charturl = minor_ad_charts[ad["name"]]
            arp = ad["pos"]
            if "icao" in ad and ad["icao"].upper() != "ZZZZ":
Пример #9
0
    def get_airspace(self):
        utcnow = datetime.utcnow()
        try:
            zoomlevel = int(request.params['zoom'])
        except Exception:
            zoomlevel = session.get('zoom', 5)
        lat = float(request.params.get('lat'))
        lon = float(request.params.get('lon'))
        clickmerc = mapper.latlon2merc((lat, lon), zoomlevel)
        user = session.get('user', None)
        out = []
        spaces = chain(get_airspaces(lat, lon),
                       userdata.get_airspaces(lat, lon, user))
        print "Spaces:", spaces

        def anydate(s):
            if not 'date' in s: return ""
            d = s['date']
            age = utcnow - d
            if age > timedelta(367):
                return "<span style=\"font-size:10px\">[%d]</span>" % (
                    d.year, )
            if age > timedelta(2):
                return "<span style=\"font-size:10px\">[%d%02d%02d]</span>" % (
                    d.year, d.month, d.day)
            return ""

        spacelist = spaces
        spaces = u"".join(u"<li><b>%s</b>%s: %s - %s%s</li>" %
                          (space['name'], anydate(space), space['floor'],
                           space['ceiling'], format_freqs(space['freqs']))
                          for space in sorted(spacelist, key=sort_airspace_key)
                          if space['type'] != 'sector')

        #sectors=u"".join(u"<li><b>%s</b>%s: %s - %s%s</li>"%(
        #        space['name'],anydate(space),space['floor'],space['ceiling'],format_freqs(space['freqs'])) for space in sorted(
        #            spacelist,key=sort_airspace_key) if space['type']=='sectoasdfr')

        try:
            sectors = u"".join(
                u"<li><b>%s</b>%s: %s - %s%s</li>" %
                (space['name'], anydate(space), space['floor'],
                 space['ceiling'], format_freqs(space['freqs']))
                for space in sorted(spacelist, key=sort_airspace_key)
                if space['type'] == 'sector')
            if sectors != "":
                sectors = "<b>Sectors:</b><ul>" + sectors + "</ul>"
        except:
            print traceback.format_exc()
            sectors = ""

        if spaces == "":
            spaces = "No airspace found"

        mapviewurl = h.url_for(controller="mapview", action="index")

        notamlist = chain(get_notam_areas(lat, lon),
                          get_notampoints(lat, lon, zoomlevel))
        notams = dict([(n['notam'].strip(), (n['notam_ordinal'],
                                             n['notam_line']))
                       for n in notamlist])

        notamareas = "".join(
            "<li>%s <b><u><a href=\"javascript:navigate_to('%s#notam')\">Link</a></u></b></li>"
            % (text,
               h.url_for(controller="notam",
                         action="show_ctx",
                         backlink=mapviewurl,
                         notam=notam,
                         line=line)) for text, (notam, line) in notams.items())
        if notamareas != "":
            notamareas = "<b>Area Notams:</b><ul>" + notamareas + "</ul>"

        aip_sup_strs = "".join([
            "<li>%s <a href=\"%s\">link</a></li>" %
            (x['name'], x['url'].replace(" ", "%20"))
            for x in get_aip_sup_areas(lat, lon)
        ])
        if aip_sup_strs:
            aip_sup_strs = "<b>AIP SUP:</b><ul>" + aip_sup_strs + "</ul>"

        obstbytype = dict()
        for obst in chain(get_obstacles(lat, lon, zoomlevel),
                          userdata.get_obstacles(lat, lon, zoomlevel, user)):
            obstbytype.setdefault(obst['kind'], []).append(obst)
            print "processing", obst
        obstacles = []
        if len(obstbytype):
            for kind, obsts in sorted(obstbytype.items()):
                obstacles.append("<b>" + kind + ":</b>")
                obstacles.append(u"<ul>")
                for obst in obsts:
                    obstacles.append(u"<li><b>%s</b>: %s ft</li>" %
                                     (obst['name'], obst['elev']))
                obstacles.append(u"</ul>")

        tracks = []
        if session.get('showtrack', None) != None:
            track = session.get('showtrack')
            #print "%d points"%(len(track.points))
            mindiff = 1e30
            found = dict()
            hdg = 0
            speed = 0
            clickvec = Vertex(int(clickmerc[0]), int(clickmerc[1]))
            if len(track.points) > 0 and len(track.points[0]) == 2:
                pass  #Old style track, not supported anymore
            else:
                for a, b in izip(track.points, track.points[1:]):
                    merc = mapper.latlon2merc(a[0], zoomlevel)
                    nextmerc = mapper.latlon2merc(b[0], zoomlevel)
                    l = Line(Vertex(int(merc[0]), int(merc[1])),
                             Vertex(int(nextmerc[0]), int(nextmerc[1])))
                    diff = l.approx_dist(clickvec)

                    if diff < mindiff:
                        mindiff = diff
                        found = (a, b)

                if mindiff < 10:
                    tracks.append(
                        u"<b>GPS track:</b><ul><li>%(when)s - %(altitude)d ft hdg:%(heading)03d spd: %(speed)d kt</li></ul>"
                        % (get_stats(*found)))

        airports = []
        fields = list(
            chain(get_airfields(lat, lon, zoomlevel),
                  userdata.get_airfields(lat, lon, zoomlevel, user)))
        if len(fields):
            airports.append("<b>Airfield:</b><ul>")
            for airp in fields:
                #print "clicked on ",airp
                linksstr = ""
                links = []
                if 'flygkartan_id' in airp:
                    links.append(('http://www.flygkartan.se/0%s' %
                                  (airp['flygkartan_id'].strip(), ),
                                  'www.flygkartan.se'))
                if 'aiptexturl' in airp:
                    links.append((airp['aiptexturl'], 'AIP Text'))
                if 'aipvacurl' in airp:
                    links.append(
                        (airp['aipvacurl'], 'AIP Visual Approach Chart'))

                if False and 'aipsup' in airp:
                    #Using AIP SUP for opening hours has stopped.
                    links.append((extracted_cache.get_se_aip_sup_hours_url(),
                                  "AIP SUP Opening Hours"))
                weather = ""
                if airp.get('icao', 'ZZZZ').upper() != 'ZZZZ':
                    icao = airp['icao'].upper()
                    metar = metartaf.get_metar(icao)
                    taf = metartaf.get_taf(icao)
                    weather = "<table>"

                    def colorize(item, colfac=1):
                        if item == None:
                            col = "ffffff"
                            agestr = ""
                        else:
                            age = metartaf.get_data_age(item)
                            if age == None:
                                col = "ffffff"
                                agestr = ""
                            elif age < timedelta(0, 60 * 35 * colfac):
                                col = "c5c5c5"
                                agestr = "%d minutes" % (int(age.seconds / 60))
                            elif age < timedelta(0, 60 * 60 * colfac):
                                col = "ffff30"
                                agestr = "%d minutes" % (int(age.seconds / 60))
                            else:
                                col = "ff3030"
                                if age < timedelta(2):
                                    if age < timedelta(0, 3600 * 1.5):
                                        agestr = "%d minutes" % int(
                                            age.seconds / 60)
                                    else:
                                        agestr = "%d hours" % int(
                                            0.5 + (age.seconds) / 3600.0)
                                else:
                                    agestr = "%d days" % (int(age.days))

                        return "style=\"background:#" + col + "\" title=\"" + agestr + " old.\""

                    if taf and taf.text:
                        weather += "<tr valign=\"top\"><td>TAF:</td><td " + colorize(
                            taf, 5) + ">" + taf.text + "</td></tr>"
                    if metar and metar.text:
                        weather += "<tr valign=\"top\"><td>METAR:</td><td " + colorize(
                            metar) + ">" + metar.text + "</td></tr>"

                    ack_cnt = meta.Session.query(
                        NotamAck.appearnotam, NotamAck.appearline,
                        sa.func.count('*').label('acks')).filter(
                            NotamAck.user == session.get(
                                'user', None)).group_by(
                                    NotamAck.appearnotam,
                                    NotamAck.appearline).subquery()
                    notams = meta.Session.query(
                        NotamUpdate, ack_cnt.c.acks, Notam.downloaded
                    ).outerjoin(
                        (ack_cnt,
                         sa.and_(
                             NotamUpdate.appearnotam == ack_cnt.c.appearnotam,
                             NotamUpdate.appearline == ack_cnt.c.appearline))
                    ).outerjoin(
                        (Notam,
                         Notam.ordinal == NotamUpdate.appearnotam)).order_by(
                             sa.desc(Notam.downloaded)).filter(
                                 sa.and_(
                                     NotamUpdate.disappearnotam == sa.null(),
                                     NotamUpdate.category.like(icao.upper() +
                                                               "/%"))).all()
                    print "notams:", repr(notams)
                    if notams:
                        nots = []
                        for notam, ack, downloaded in notams:
                            nots.append(
                                "<div style=\"border:1px solid;margin:3px;border-color:#B8B8B8;padding:3px;\">%s</div>"
                                % (cgi.escape(notam.text)))

                        weather += "<tr valign=\"top\"><td colspan=\"2\">NOTAM:</td></tr><tr><td colspan=\"2\">%s</td></tr>" % (
                            "".join(nots))

                    weather += "</table>"
                    meta.Session.flush()
                    meta.Session.commit()

                if len(links) > 0 and 'icao' in airp:
                    linksstr = helpers.foldable_links(airp['icao'] + "links",
                                                      links)

                rwys = []
                if 'runways' in airp:
                    for rwy in airp['runways']:
                        ends = []
                        for end in rwy['ends']:
                            ends.append(end['thr'])
                        surf = ""
                        if 'surface' in rwy:
                            surf = "(" + rwy['surface'] + ")"
                        rwys.append("/".join(ends) + surf)

                if len(rwys):
                    rwys = ["<b> Runways</b>: "] + [", ".join(rwys)]
                remark = ""
                if airp.get('remark'):
                    remark = "<div style=\"border:1px solid;margin:3px;border-color:#B8B8B8;padding:3px;background-color:#ffffe0\"><b>Remark:</b> " + cgi.escape(
                        airp['remark']) + "</div>"

                airports.append(u"<li><b>%s</b> - %s%s%s%s%s</li>" %
                                (airp.get('icao', 'ZZZZ'), airp['name'],
                                 linksstr, remark, " ".join(rwys), weather))
            airports.append("</ul>")

        sigpoints = []
        sigps = list(
            chain(get_sigpoints(lat, lon, zoomlevel),
                  userdata.get_sigpoints(lat, lon, zoomlevel, user)))
        if len(sigps):
            sigpoints.append("<b>Sig. points</b><ul>")
            for sigp in sigps:
                sigpoints.append(
                    u"<li><b>%s</b>(%s)</li>" %
                    (sigp['name'], sigp.get('kind', 'unknown point')))
            sigpoints.append("</ul>")

        firs = []
        for fir in list(get_firs(
            (lat, lon))) + list(userdata.get_firs(lat, lon, user)):
            if 'icao' in fir:
                firs.append("%s (%s)" % (fir['name'], fir['icao']))
        if not firs:
            firs.append("Unknown")

        variation = '?'
        terrelev = get_terrain_elev((lat, lon), zoomlevel)
        try:
            varf = geomag.calc_declination((lat, lon), utcnow,
                                           (terrelev + 1000))
            variation = u"%+.1f°" % (varf, )
        except Exception:
            pass

        weather = ""
        try:
            when, valid, fct = gfs_weather.get_prognosis(datetime.utcnow())
            qnh = fct.get_qnh(lat, lon)
            out = [
                "<b>Weather</b><br/>Forecast: %sZ, valid: %sZ<br />" %
                (when.strftime("%Y-%m-%d %H:%M"), valid.strftime("%H:%M"))
            ]
            try:
                out.append("Surface wind: %.0f deg, %.1f knots<br />" %
                           fct.get_surfacewind(lat, lon))
                out.append("Surface RH: %.0f%%<br />" %
                           (fct.get_surfacerh(lat, lon), ))
            except:
                print traceback.format_exc()
            out.append("<ul>")
            for fl, dir, st, temp in fct.get_winds(lat, lon):
                out.append("<li>FL%02d: %03d deg, %.1fkt, %.1f &#176;C" %
                           (int(fl), int(dir), float(st), temp))
            out.append("</ul>QNH: %d<br/><br/>" % (qnh, ))
            weather = "".join(out)
        except Exception:
            print traceback.format_exc()

        return "<b>Airspace:</b><ul><li><b>FIR:</b> %s</li>%s</ul>%s%s%s%s%s%s%s<br/>%s<b>Terrain: %s ft, Var: %s</b>" % (
            ", ".join(firs), spaces, sectors, aip_sup_strs, "".join(obstacles),
            "".join(airports), "".join(tracks), "".join(sigpoints), notamareas,
            weather, terrelev, variation)
Пример #10
0
def osm_airfields_parse():
    nameicao = parse_info()
    f = open("adnames.txt", "w")
    for name, icao in sorted(nameicao, key=lambda x: x[0]):
        f.write("%s: %s\n" % (icao.encode('utf8'), name.encode('utf8')))
    f.close()
    ads = []
    hits = 0
    misses = []

    name2icao = []
    for name, icao in nameicao:
        name2icao.append((normalize(name), name, icao))

    dupecheck = set()

    for lon, lat, name in csv.reader(open("fplan/extract/aerodromes.txt")):
        name = unicode(name, "utf8")
        if not name.strip(): continue

        icao = None
        n1 = normalize(name)
        if frozenset(n1) in dupecheck:
            continue
        dupecheck.add(frozenset(n1))
        lastquality = 0
        lastname = None
        for n2, iname, iicao in name2icao:
            #print repr(n1),"==",repr(n2)
            minlen = min(len(n1), len(n2))
            if minlen > 2:
                minlen = minlen - 1
            quality = len(n1.intersection(n2))
            if quality >= minlen and quality > lastquality:
                if icao != None and icao != iicao:
                    print "For name:", name, "Previous match:,", icao, "New match:", iicao, "Name:", iname, "last name:", lastname, "quality:", quality, "last:", lastquality, "minlen:", minlen
                    #assert icao==None
                icao = iicao
                lastquality = quality
                lastname = iname
        #print "Ap with name:",name
        if icao:
            hits += 1
            icao = icao
        else:
            icao = 'ZZZZ'
            misses.append(name)
            #print "Missed:",name
        d = dict(icao=icao,
                 name=name,
                 pos=mapper.to_str((float(lat), float(lon))),
                 elev=int(gte.get_terrain_elev((float(lat), float(lon)))))

        if hits % 10 == 0:
            print "Hits: %d, Misses: %d, Perc: %.1f" % (
                hits, len(misses), 100.0 * (float(hits) /
                                            (float(hits) + len(misses))))
        ads.append(d)
    print "Misses:"
    f = open("missedads.txt", "w")
    for miss in sorted(misses):
        f.write((miss + " - " + repr(normalize(miss)) + u"\n").encode('utf8'))
    f.close()
    f = open("foundads.txt", "w")
    for norm, name, icao in name2icao:
        f.write((name + " - " + repr(norm) + u"\n").encode('utf8'))
    f.close()
    print "Hits: ", hits
    print "Misses: ", len(misses)

    return ads
Пример #11
0
    def get_airspace(self):
        utcnow=datetime.utcnow()
        try:
            zoomlevel=int(request.params['zoom'])
        except Exception:
            zoomlevel=session.get('zoom',5)
        lat=float(request.params.get('lat'))
        lon=float(request.params.get('lon'))
        clickmerc=mapper.latlon2merc((lat,lon),zoomlevel)
        user=session.get('user',None)
        out=[]
        spaces=chain(get_airspaces(lat,lon),userdata.get_airspaces(lat,lon,user))
        print "Spaces:",spaces
        def anydate(s):
            if not 'date' in s: return ""
            d=s['date']
            age=utcnow-d            
            if age>timedelta(367):
                return "<span style=\"font-size:10px\">[%d]</span>"%(d.year,)
            if age>timedelta(2):                
                return "<span style=\"font-size:10px\">[%d%02d%02d]</span>"%(
                        d.year,d.month,d.day)
            return ""
        spacelist=spaces
        spaces=u"".join(u"<li><b>%s</b>%s: %s - %s%s</li>"%(
                space['name'],anydate(space),space['floor'],space['ceiling'],format_freqs(space['freqs'])) for space in sorted(
                    spacelist,key=sort_airspace_key) if space['type']!='sector')


        #sectors=u"".join(u"<li><b>%s</b>%s: %s - %s%s</li>"%(
        #        space['name'],anydate(space),space['floor'],space['ceiling'],format_freqs(space['freqs'])) for space in sorted(
        #            spacelist,key=sort_airspace_key) if space['type']=='sectoasdfr')

        try:
            sectors=u"".join(u"<li><b>%s</b>%s: %s - %s%s</li>"%(
                   space['name'],anydate(space),space['floor'],space['ceiling'],format_freqs(space['freqs'])) for space in sorted(
                  spacelist,key=sort_airspace_key) if space['type']=='sector')
            if sectors!="":
                sectors="<b>Sectors:</b><ul>"+sectors+"</ul>"
        except:
            print traceback.format_exc()
            sectors=""

        if spaces=="":
            spaces="No airspace found"

            
        mapviewurl=h.url_for(controller="mapview",action="index")

        
        notamlist=chain(get_notam_areas(lat,lon),get_notampoints(lat,lon,zoomlevel))
        notams=dict([(n['notam'].strip(),(n['notam_ordinal'],n['notam_line']) ) for n in notamlist])

        
        notamareas="".join("<li>%s <b><u><a href=\"javascript:navigate_to('%s#notam')\">Link</a></u></b></li>"%(
            text,h.url_for(controller="notam",action="show_ctx",backlink=mapviewurl,notam=notam,line=line)) for text,(notam,line) in notams.items())
        if notamareas!="":
            notamareas="<b>Area Notams:</b><ul>"+notamareas+"</ul>"

        aip_sup_strs="".join(["<li>%s <a href=\"%s\">link</a></li>"%(x['name'],x['url'].replace(" ","%20")) for x in get_aip_sup_areas(lat,lon)])
        if aip_sup_strs:
            aip_sup_strs="<b>AIP SUP:</b><ul>"+aip_sup_strs+"</ul>"
         
        obstbytype=dict()
        for obst in chain(get_obstacles(lat,lon,zoomlevel),userdata.get_obstacles(lat,lon,zoomlevel,user)):
            obstbytype.setdefault(obst['kind'],[]).append(obst)
            print "processing",obst
        obstacles=[]
        if len(obstbytype):
            for kind,obsts in sorted(obstbytype.items()):
                obstacles.append("<b>"+kind+":</b>")
                obstacles.append(u"<ul>")
                for obst in obsts:
                    obstacles.append(u"<li><b>%s</b>: %s ft</li>"%(obst['name'],obst['elev'])) 
                obstacles.append(u"</ul>")

        tracks=[]
        if session.get('showtrack',None)!=None:                
            track=session.get('showtrack')
            #print "%d points"%(len(track.points))
            mindiff=1e30
            found=dict()
            hdg=0
            speed=0
            clickvec=Vertex(int(clickmerc[0]),int(clickmerc[1]))
            if len(track.points)>0 and len(track.points[0])==2:                
                pass #Old style track, not supported anymore
            else:
                for a,b in izip(track.points,track.points[1:]): 
                    merc=mapper.latlon2merc(a[0],zoomlevel)
                    nextmerc=mapper.latlon2merc(b[0],zoomlevel)
                    l=Line(Vertex(int(merc[0]),int(merc[1])),Vertex(int(nextmerc[0]),int(nextmerc[1])))
                    diff=l.approx_dist(clickvec)                

                    if diff<mindiff:
                        mindiff=diff
                        found=(a,b)

                if mindiff<10:
                    tracks.append(u"<b>GPS track:</b><ul><li>%(when)s - %(altitude)d ft hdg:%(heading)03d spd: %(speed)d kt</li></ul>"%(get_stats(*found)))
                                                  

        airports=[]
        fields=list(chain(get_airfields(lat,lon,zoomlevel),userdata.get_airfields(lat,lon,zoomlevel,user)))
        if len(fields):
            airports.append("<b>Airfield:</b><ul>")
            for airp in fields:
                #print "clicked on ",airp
                linksstr=""
                links=[]
                if 'flygkartan_id' in airp:
                    links.append(('http://www.flygkartan.se/0%s'%(airp['flygkartan_id'].strip(),),
                      'www.flygkartan.se'))
                if 'aiptexturl' in airp:
                    links.append((airp['aiptexturl'],
                      'AIP Text'))
                if 'aipvacurl' in airp:
                    links.append((airp['aipvacurl'],
                      'AIP Visual Approach Chart'))
                    
                if False and 'aipsup' in airp:
                    #Using AIP SUP for opening hours has stopped.
                    links.append((
                        extracted_cache.get_se_aip_sup_hours_url(),
                        "AIP SUP Opening Hours"))
                weather=""
                if airp.get('icao','ZZZZ').upper()!='ZZZZ':
                    icao=airp['icao'].upper()
                    metar=metartaf.get_metar(icao)
                    taf=metartaf.get_taf(icao)
                    weather="<table>"
                    
                    def colorize(item,colfac=1):
                        if item==None:
                            col="ffffff"
                            agestr=""
                        else:
                            age=metartaf.get_data_age(item)
                            if age==None:
                                col="ffffff"
                                agestr=""                                
                            elif age<timedelta(0,60*35*colfac):
                                col="c5c5c5"
                                agestr="%d minutes"%(int(age.seconds/60))
                            elif age<timedelta(0,60*60*colfac):
                                col="ffff30"
                                agestr="%d minutes"%(int(age.seconds/60))
                            else:
                                col="ff3030"
                                if age<timedelta(2):
                                    if age<timedelta(0,3600*1.5):
                                        agestr="%d minutes"%int(age.seconds/60)
                                    else:
                                        agestr="%d hours"%int(0.5+(age.seconds)/3600.0)
                                else:
                                    agestr="%d days"%(int(age.days))
                            
                        return "style=\"background:#"+col+"\" title=\""+agestr+" old.\""
                
                    if taf and taf.text:
                        weather+="<tr valign=\"top\"><td>TAF:</td><td "+colorize(taf,5)+">"+taf.text+"</td></tr>"
                    if metar and metar.text:
                        weather+="<tr valign=\"top\"><td>METAR:</td><td "+colorize(metar)+">"+metar.text+"</td></tr>"
                        
                        
                    ack_cnt = meta.Session.query(NotamAck.appearnotam,NotamAck.appearline,sa.func.count('*').label('acks')).filter(NotamAck.user==session.get('user',None)).group_by(NotamAck.appearnotam,NotamAck.appearline).subquery()
                    notams=meta.Session.query(NotamUpdate,ack_cnt.c.acks,Notam.downloaded).outerjoin(
                        (ack_cnt,sa.and_(
                            NotamUpdate.appearnotam==ack_cnt.c.appearnotam,
                            NotamUpdate.appearline==ack_cnt.c.appearline))).outerjoin(
                        (Notam,Notam.ordinal==NotamUpdate.appearnotam)
                         ).order_by(sa.desc(Notam.downloaded)).filter(
                                sa.and_(NotamUpdate.disappearnotam==sa.null(),
                                        NotamUpdate.category.like(icao.upper()+"/%")
                                        )).all()
                    print "notams:",repr(notams)
                    if notams:
                        nots=[]
                        for notam,ack,downloaded in notams:
                            nots.append("<div style=\"border:1px solid;margin:3px;border-color:#B8B8B8;padding:3px;\">%s</div>"%(cgi.escape(notam.text)))
                        
                        weather+="<tr valign=\"top\"><td colspan=\"2\">NOTAM:</td></tr><tr><td colspan=\"2\">%s</td></tr>"%("".join(nots))
                    
                    weather+="</table>"
                    meta.Session.flush()
                    meta.Session.commit()
                    
                if len(links)>0 and 'icao' in airp:                    
                    linksstr=helpers.foldable_links(airp['icao']+"links",links)

                rwys=[]
                if 'runways' in airp:
                    for rwy in airp['runways']:
                        ends=[]                        
                        for end in rwy['ends']:
                            ends.append(end['thr'])
                        surf=""
                        if 'surface' in rwy:
                            surf="("+rwy['surface']+")"
                        rwys.append("/".join(ends)+surf)
                            
                if len(rwys):                    
                    rwys=["<b> Runways</b>: "]+[", ".join(rwys)]
                remark=""
                if airp.get('remark'):
                    remark="<div style=\"border:1px solid;margin:3px;border-color:#B8B8B8;padding:3px;background-color:#ffffe0\"><b>Remark:</b> "+cgi.escape(airp['remark'])+"</div>"
                
                
                
                airports.append(u"<li><b>%s</b> - %s%s%s%s%s</li>"%(airp.get('icao','ZZZZ'),airp['name'],linksstr,remark," ".join(rwys),weather))
            airports.append("</ul>")
        
        sigpoints=[]
        sigps=list(chain(get_sigpoints(lat,lon,zoomlevel),userdata.get_sigpoints(lat,lon,zoomlevel,user)))
        if len(sigps):
            sigpoints.append("<b>Sig. points</b><ul>")
            for sigp in sigps:
                sigpoints.append(u"<li><b>%s</b>(%s)</li>"%(sigp['name'],sigp.get('kind','unknown point')))
            sigpoints.append("</ul>")
       

        firs=[]        
        for fir in list(get_firs((lat,lon)))+list(userdata.get_firs(lat,lon,user)):
            if 'icao' in fir:
                firs.append("%s (%s)"%(fir['name'],fir['icao']))        
        if not firs:
            firs.append("Unknown")
            
        variation='?'
        terrelev=get_terrain_elev((lat,lon),zoomlevel)
        try:
            varf=geomag.calc_declination((lat,lon),utcnow,(terrelev+1000))
            variation=u"%+.1f°"%(varf,)
        except Exception:
            pass
        
        weather=""
        try:
            when,valid,fct=gfs_weather.get_prognosis(datetime.utcnow())
            qnh=fct.get_qnh(lat,lon)
            out=["<b>Weather</b><br/>Forecast: %sZ, valid: %sZ<br />"%(when.strftime("%Y-%m-%d %H:%M"),valid.strftime("%H:%M"))]
            try:                
                out.append("Surface wind: %.0f deg, %.1f knots<br />"%fct.get_surfacewind(lat,lon))
                out.append("Surface RH: %.0f%%<br />"%(fct.get_surfacerh(lat,lon),))
            except:
                print traceback.format_exc()
            out.append("<ul>")
            for fl,dir,st,temp in fct.get_winds(lat,lon):
                out.append("<li>FL%02d: %03d deg, %.1fkt, %.1f &#176;C"%(int(fl),int(dir),float(st),temp))            
            out.append("</ul>QNH: %d<br/><br/>"%(qnh,))
            weather="".join(out)
        except Exception:
            print traceback.format_exc()
        
        return "<b>Airspace:</b><ul><li><b>FIR:</b> %s</li>%s</ul>%s%s%s%s%s%s%s<br/>%s<b>Terrain: %s ft, Var: %s</b>"%(", ".join(firs),spaces,sectors,aip_sup_strs,"".join(obstacles),"".join(airports),"".join(tracks),"".join(sigpoints),notamareas,weather,terrelev,variation)