def parse_latvian_sigpoints(self): "latvian sig points" return dict(sig_points=ev_parse_sigpoints())
def get_aipdata(cachefile="aipdata.cache", generate_if_missing=False): global aipdata global aipdatalookup global loaded_aipdata_cachefiledate global last_timestamp_check lock.acquire() try: if aipdata and os.path.exists(cachefile) and aipdata.get( 'version', None) == version: if datetime.utcnow() - last_timestamp_check > timedelta( 0, 15) and os.path.exists(cachefile): last_timestamp_check = datetime.utcnow() filestamp = get_filedate(cachefile) print "Timestamp of loaded aipdata: %s, Timestamp of aipdata on disk: %s" % ( loaded_aipdata_cachefiledate, filestamp) if filestamp != loaded_aipdata_cachefiledate: try: print "Loading new aipdata" newaipdata = pickle.load(open(cachefile)) if newaipdata.get('version', None) != version: raise Exception("Bad aipdata version") loaded_aipdata_cachefiledate = get_filedate(cachefile) newaipdatalookup = gen_bsptree_lookup(newaipdata) aipdata, aipdatalookup = newaipdata, newaipdatalookup return aipdata except Exception, cause: print "Tried to load new aipdata from disk, but failed" return aipdata try: aipdata = pickle.load(open(cachefile)) aipdatalookup = gen_bsptree_lookup(aipdata) if aipdata.get('version', None) != version: raise Exception("Bad aipdata version") loaded_aipdata_cachefiledate = get_filedate(cachefile) return aipdata except Exception, cause: if not generate_if_missing: raise Exception( "You must supply generate_if_missing-parameter for aip-data parsing and generation to happen" ) airspaces = [] bad_airfields = [] airfields = [] sig_points = [] obstacles = [] seenpoints = dict() def sig_points_extend(points): for point in points: name, pos = point['name'], point['pos'] samename = seenpoints.setdefault(name, []) already = False for s in samename: if s['pos'] == pos: already = True samename.append(point) if not already: sig_points.append(point) a = True if 0: #poland ads, spaces = ep_parse_airfields() airfields.extend(ads) airspaces.extend(spaces) airspaces.extend(ep_parse_tma()) airspaces.extend(ep_parse_tra()) if 0: #lithuania ads, spaces = ey_parse_airfields() airspaces.extend(spaces) airfields.extend(ads) sig_points.extend(ey_parse_sigpoints()) airspaces.extend(ey_parse_tma()) if 0: #estonia ads, spaces = ee_parse_airfields2() airfields.extend(ads) airspaces.extend(spaces) sig_points.extend(ee_parse_sigpoints2()) #airspaces.extend(ee_parse_restrictions()) airspaces.extend(ee_parse_tma2()) airspaces.extend(ee_parse_r_and_tsa2()) if 0: #latvia #airspaces.extend(ee_parse_restrictions()) #airspaces.extend(ev_parse_tma()) #airspaces.extend(ev_parse_r()) #obstacles.extend(ev_parse_obst()) sig_points.extend(ev_parse_sigpoints()) airspaces.extend(evspaces) airfields.extend(evads) if True: #not is_devcomp(): class SpaceLoader(object): def parse_trusted_userdata(self): "Data added by users, only trusted users" return userdata.get_trusted_data() def parse_osm_airfields(self): return dict( bad_airfields=osm_airfields.osm_airfields_parse()) def parse_latvian_tma(self): "latvian tma" return dict(airspaces=ev_parse_tma()) def parse_latvian_r(self): "latvian r" return dict(airspaces=ev_parse_r()) def parse_latvian_obst(self): "latvian obst" return dict(obstacles=ev_parse_obst()) def parse_latvian_sigpoints(self): "latvian sig points" return dict(sig_points=ev_parse_sigpoints()) def parse_latvian_airfields(self): "latvian ad" evads, evspaces = ev_parse_airfields() return dict(airspaces=evspaces, airfields=evads) def parse_estonian_airfields(self): "Estonian Airfields" ads, spaces = ee_parse_airfields2() return dict(airspaces=spaces, airfields=ads) def parse_estonian_sigpoints(self): "Estonian sig points" return dict(sig_points=ee_parse_sigpoints2()) def parse_estonian_tma(self): "Estonian TMA" return dict(airspaces=ee_parse_tma2()) def parse_estonian_r_and_tsa(self): "Estonian R and TSA" return dict(airspaces=ee_parse_r_and_tsa2()) def parse_norwegian_obstacles(self): "Norwegian obstacles" return dict(obstacles=no_obstacles()) #def parse_denmark(self): # "denmark" # raise Exception("CUrrently disabled") #if not is_devcomp() or a: #denmark # denmark=parse_denmark() # return dict(airspaces=denmark['airspace'], # airfields=denmark['airfields']) #def fi_parse_tma(self):"Finnish TMA";return dict(airspaces=fi_parse_tma()) #def fi_parse_sigpoints(self): "Finnish significant points";return dict(sig_points=fi_parse_sigpoints()) def fi_parse_obstacles(self): "Finnish obstacles" return dict(obstacles=fi_parse_obstacles()) def fi_parse_parse_airfields(self): "Finnish major airfields" fi_airfields, fi_spaces, fi_ad_points = fi_parse_airfields( ) return dict(airfields=fi_airfields, airspaces=[]) #def fi_parse_restrictions(self):"Finnish R-areas";return dict(airspaces=fi_parse_restrictions()) #def fi_parse_small_airfields(self):"Finnish small airfields";return dict(airfields=fi_parse_small_airfields()) def fi_parse_new(self): "Finnish data" spaces, points = new_finland.load_finland() return dict(airspaces=spaces, sig_points=points) def se_parse_airfields(self): "Swedish Major airports" se_airfields, se_points = extract_airfields() return dict(airfields=se_airfields, sig_points=se_points) def se_parse_sigpoints(self): "Swedish significant points" return dict(sig_points=parse_sig_points()) def se_parse_tma(self): "Swedish TMA" return dict(airspaces=parse_all_tma()) def se_parse_r(self): "Swedish R/D-areas" return dict(airspaces=parse_r_areas()) def se_parse_mountain(self): "Swedish mountain area" return dict(airspaces=parse_mountain_area()) def se_parse_obstacles(self): "Swedish obstacles" return dict(obstacles=parse_obstacles()) def se_parse_segel(self): "Swedish Segelsektorer" return dict(airspaces=extract_segel()) else: class SpaceLoader(object): def fi_parse_new(self): "Finnish data" spaces, points = new_finland.load_finland() return dict(airspaces=spaces, sig_points=points) def run_space_loader(loader): if not os.path.exists("data/aipdata"): os.makedirs("data/aipdata") report = dict() for method in [ x for x in dir(loader) if not x.startswith("_") ]: filename = os.path.join("data/aipdata", method + ".pickle") result = "Unknown" msg = None try: m = getattr(loader, method) d = m() temp = open(filename + ".temp", "w") pickle.dump(d, temp) temp.close() os.rename(filename + ".temp", filename) result = "Loaded new" except Exception, cause: msg = traceback.format_exc() try: d = pickle.load(open(filename)) result = "Used backup" except Exception: d = dict() result = "Backup restore failed" report[method] = dict(method=method, what=m.__doc__, result=result, msg=msg, date=utcdatetime2stamp_inexact( datetime.utcnow())) now = datetime.utcnow() for k, v in d.items(): for x in v: if not 'date' in x: x['date'] = now else: if x['date'] > now: x['date'] = now if k == "airspaces": #print "Method:",method,v #for av in v: #print #print #print av #assert 'type' in av airspaces.extend(v) elif k == "airfields": airfields.extend(v) elif k == "bad_airfields": bad_airfields.extend(v) elif k == "sig_points": sig_points.extend(v) elif k == "obstacles": obstacles.extend(v) else: raise Exception( "Bad return value from SpaceLoader:%s" % ((k, v), )) f = open("data/aipdata/result.json", "w") json.dump(report, f) f.close()
def get_aipdata(cachefile="aipdata.cache",generate_if_missing=False): global aipdata global aipdatalookup global loaded_aipdata_cachefiledate global last_timestamp_check lock.acquire() try: if aipdata and os.path.exists(cachefile) and aipdata.get('version',None)==version: if datetime.utcnow()-last_timestamp_check>timedelta(0,15) and os.path.exists(cachefile): last_timestamp_check=datetime.utcnow() filestamp=get_filedate(cachefile) print "Timestamp of loaded aipdata: %s, Timestamp of aipdata on disk: %s"%(loaded_aipdata_cachefiledate,filestamp) if filestamp!=loaded_aipdata_cachefiledate: try: print "Loading new aipdata" newaipdata=pickle.load(open(cachefile)) if newaipdata.get('version',None)!=version: raise Exception("Bad aipdata version") loaded_aipdata_cachefiledate=get_filedate(cachefile); newaipdatalookup=gen_bsptree_lookup(newaipdata) aipdata,aipdatalookup=newaipdata,newaipdatalookup return aipdata except Exception,cause: print "Tried to load new aipdata from disk, but failed" return aipdata try: aipdata=pickle.load(open(cachefile)) aipdatalookup=gen_bsptree_lookup(aipdata) if aipdata.get('version',None)!=version: raise Exception("Bad aipdata version") loaded_aipdata_cachefiledate=get_filedate(cachefile); return aipdata except Exception,cause: if not generate_if_missing: raise Exception("You must supply generate_if_missing-parameter for aip-data parsing and generation to happen") airspaces=[] bad_airfields=[] airfields=[] sig_points=[] obstacles=[] seenpoints=dict() def sig_points_extend(points): for point in points: name,pos=point['name'],point['pos'] samename=seenpoints.setdefault(name,[]) already=False for s in samename: if s['pos']==pos: already=True samename.append(point) if not already: sig_points.append(point) a=True if 0: #poland ads,spaces=ep_parse_airfields() airfields.extend(ads) airspaces.extend(spaces) airspaces.extend(ep_parse_tma()) airspaces.extend(ep_parse_tra()) if 0: #lithuania ads,spaces=ey_parse_airfields() airspaces.extend(spaces) airfields.extend(ads) sig_points.extend(ey_parse_sigpoints()) airspaces.extend(ey_parse_tma()) if 0: #estonia ads,spaces=ee_parse_airfields2() airfields.extend(ads) airspaces.extend(spaces) sig_points.extend(ee_parse_sigpoints2()) #airspaces.extend(ee_parse_restrictions()) airspaces.extend(ee_parse_tma2()) airspaces.extend(ee_parse_r_and_tsa2()) if 0: #latvia #airspaces.extend(ee_parse_restrictions()) #airspaces.extend(ev_parse_tma()) #airspaces.extend(ev_parse_r()) #obstacles.extend(ev_parse_obst()) sig_points.extend(ev_parse_sigpoints()) airspaces.extend(evspaces) airfields.extend(evads) if True: #not is_devcomp(): class SpaceLoader(object): def parse_trusted_userdata(self): "Data added by users, only trusted users" return userdata.get_trusted_data() def parse_osm_airfields(self): return dict(bad_airfields=osm_airfields.osm_airfields_parse()) def parse_latvian_tma(self): "latvian tma" return dict(airspaces=ev_parse_tma()) def parse_latvian_r(self): "latvian r" return dict(airspaces=ev_parse_r()) def parse_latvian_obst(self): "latvian obst" return dict(obstacles=ev_parse_obst()) def parse_latvian_sigpoints(self): "latvian sig points" return dict(sig_points=ev_parse_sigpoints()) def parse_latvian_airfields(self): "latvian ad" evads,evspaces=ev_parse_airfields() return dict(airspaces=evspaces,airfields=evads) def parse_estonian_airfields(self): "Estonian Airfields" ads,spaces=ee_parse_airfields2() return dict(airspaces=spaces,airfields=ads) def parse_estonian_sigpoints(self): "Estonian sig points" return dict(sig_points=ee_parse_sigpoints2()) def parse_estonian_tma(self): "Estonian TMA" return dict(airspaces=ee_parse_tma2()) def parse_estonian_r_and_tsa(self): "Estonian R and TSA" return dict(airspaces=ee_parse_r_and_tsa2()) def parse_norwegian_obstacles(self): "Norwegian obstacles" return dict(obstacles=no_obstacles()) #def parse_denmark(self): # "denmark" # raise Exception("CUrrently disabled") #if not is_devcomp() or a: #denmark # denmark=parse_denmark() # return dict(airspaces=denmark['airspace'], # airfields=denmark['airfields']) #def fi_parse_tma(self):"Finnish TMA";return dict(airspaces=fi_parse_tma()) #def fi_parse_sigpoints(self): "Finnish significant points";return dict(sig_points=fi_parse_sigpoints()) def fi_parse_obstacles(self): "Finnish obstacles";return dict(obstacles=fi_parse_obstacles()) def fi_parse_parse_airfields(self): "Finnish major airfields" fi_airfields,fi_spaces,fi_ad_points=fi_parse_airfields() return dict(airfields=fi_airfields,airspaces=[]) #def fi_parse_restrictions(self):"Finnish R-areas";return dict(airspaces=fi_parse_restrictions()) #def fi_parse_small_airfields(self):"Finnish small airfields";return dict(airfields=fi_parse_small_airfields()) def fi_parse_new(self): "Finnish data" spaces,points=new_finland.load_finland() return dict(airspaces=spaces,sig_points=points) def se_parse_airfields(self): "Swedish Major airports" se_airfields,se_points=extract_airfields() return dict(airfields=se_airfields,sig_points=se_points) def se_parse_sigpoints(self):"Swedish significant points";return dict(sig_points=parse_sig_points()) def se_parse_tma(self): "Swedish TMA" return dict(airspaces=parse_all_tma()) def se_parse_r(self):"Swedish R/D-areas";return dict(airspaces=parse_r_areas()) def se_parse_mountain(self):"Swedish mountain area";return dict(airspaces=parse_mountain_area()) def se_parse_obstacles(self):"Swedish obstacles";return dict(obstacles=parse_obstacles()) def se_parse_segel(self): "Swedish Segelsektorer" return dict(airspaces=extract_segel()) else: class SpaceLoader(object): def fi_parse_new(self): "Finnish data" spaces,points=new_finland.load_finland() return dict(airspaces=spaces,sig_points=points) def run_space_loader(loader): if not os.path.exists("data/aipdata"): os.makedirs("data/aipdata") report=dict() for method in [x for x in dir(loader) if not x.startswith("_")]: filename=os.path.join("data/aipdata",method+".pickle") result="Unknown" msg=None try: m=getattr(loader,method) d=m() temp=open(filename+".temp","w") pickle.dump(d,temp) temp.close() os.rename( filename+".temp", filename) result="Loaded new" except Exception,cause: msg=traceback.format_exc() try: d=pickle.load(open(filename)) result="Used backup" except Exception: d=dict() result="Backup restore failed" report[method]=dict(method=method,what=m.__doc__,result=result,msg=msg,date= utcdatetime2stamp_inexact(datetime.utcnow())) now=datetime.utcnow() for k,v in d.items(): for x in v: if not 'date' in x: x['date']=now else: if x['date']>now: x['date']=now if k=="airspaces": #print "Method:",method,v #for av in v: #print #print #print av #assert 'type' in av airspaces.extend(v) elif k=="airfields": airfields.extend(v) elif k=="bad_airfields": bad_airfields.extend(v) elif k=="sig_points": sig_points.extend(v) elif k=="obstacles": obstacles.extend(v) else: raise Exception("Bad return value from SpaceLoader:%s"%((k,v),)) f=open("data/aipdata/result.json","w") json.dump(report,f) f.close()