def main(): domain = Domain() # Parse command line arguments parser = argparse.ArgumentParser(description=__doc__) utils.add_standard_command_options(parser) parser.add_argument( '--site', action='store', dest='site', help='APUB-site', ) # parser.add_argument( # '-i', required=True, # action='store', dest='res', # help='.RES file of the input result', # ) # parser.add_argument( # '--ext', required=True, # action='store', dest='ext', # help='ext for the result field to be exported', # ) # parser.add_argument( # '-d', '--date', # action="store", dest='date', # type=lambda s: datetime.datetime.strptime(s, '%y%m%d%H'), # help="Time stamp to show 'YYMMDDhh, " + # "default is first timestamp of result'" # ) # parser.add_argument( # '--substance', # action='store', dest='substance', # help='Substance/ext for the result field to be imported', # ) parser.add_argument( '--areaid', action='store', dest='areaid', help='Area id' ) args = parser.parse_args() site = os.environ.get("SITE", None) or args.site if site is None: log.error("No apub site specified") sys.exit(1) with open(path.join('/var/www/html', site, 'gmapgridoverlay.htm')) as html_template: template = Template(html_template.read()) rf = ControlFile( path.join(os.environ["RSRCDIR"], "apub." + site + ".gmapgridoverlay.rf"), "HP Roman8" ) form = cgi.FieldStorage() viewports = domain.listViewports() areaid = form.getfirst('areaid', None) if areaid is not None: areaid = cgi.escape(areaid) areaid = areaid or \ args.areaid or \ viewports[0] viewport = ViewPort() viewport.read(code=areaid) proj = get_proj4(viewport.proj) data = get_latlon_bounds( viewport.xmin(), viewport.ymin(), viewport.xmax(), viewport.ymax(), proj) print(template.substitute(data))
def main(): # Parse command line arguments parser = argparse.ArgumentParser(description=__doc__) utils.add_standard_command_options(parser) parser.add_argument( '-i', required=True, action='store', dest='macro', help='Macro specifying result', ) parser.add_argument( '-t', '--timestamp', action="store", dest='timestamp', type=lambda s: datetime.datetime.strptime(s, '%y%m%d%H'), help="Hour to export as 'YYMMDDhh, " + "default is first timestamp of result'" ) parser.add_argument( "--srs", action="store", dest="srs", help="Output SRS alias (from projections.rf)" ) parser.add_argument( '--alpha', action='store', dest='alpha', type=int, help='Output alpha value (0-255)', default=0 ) parser.add_argument( '--factor', action='store', dest='factor', help="Refinement factor for resampling", type=int, default=2 ) parser.add_argument( '--no-cache', action='store_false', dest='cache', help='Do not use cached images.' ) parser.add_argument( '--norm-area', action='store_true', dest='norm_area', help='Normalize field by area (e.g. g/(s*km2))' ) args = parser.parse_args() field_def = get_field_definition_from_macro(args.macro) levels, rgba = get_colormap_from_disp_macro(args.macro, args.alpha) colormap = colormap_from_levels(levels, rgba) field = get_field(field_def, args.timestamp) ny, nx = field.shape dx = (field_def.x2 - field_def.x1) / nx dy = (field_def.y2 - field_def.y1) / ny viewport = ViewPort(field_def.area) viewport.read() inproj = get_proj4(viewport.proj) # src_srs = osr.SpatialReference() # src_srs.ImportFromProj4(inproj) # infield_def = { # 'x1': field_def.x1, # 'x2': field_def.x2, # 'y1': field_def.y1, # 'y2': field_def.y2, # 'nx': nx, # 'ny': ny, # 'dx': dx, # 'dy': dy, # 'proj': src_srs # } # tgt_srs = osr.SpatialReference() # if args.srs is not None: # tgt_srs.ImportFromProj4(args.srs) # else: # tgt_srs.SetWellKnownGeogCS('WGS84'.encode("ascii")) # coord_trans = osr.CoordinateTransformation(src_srs, tgt_srs) # # tranform srs for corners # xll, yll, _ = coord_trans.TransformPoint( # infield_def['x1'], infield_def['y1'] # ) # xul, yul, _ = coord_trans.TransformPoint( # infield_def['x1'], infield_def['y2'] # ) # xur, yur, _ = coord_trans.TransformPoint( # infield_def['x2'], infield_def['y2'] # ) # xlr, ylr, _ = coord_trans.TransformPoint( # infield_def['x2'], infield_def['y1'] # ) # # outfield_def = {} # outfield_def = { # 'x1': min(xll, xul), # 'x2': max(xlr, xur), # 'y1': min(yll, ylr), # 'y2': max(yul, yur), # 'nx': # outfield_def['ny'] = # outfield_def.proj = tgt_srs.ExportToWkt() # refine grid by factor and sort into new field with output projection # outfield = inverse_nearest_neighbour( # field, # infield_def, # coord_trans, # outfield_def, # factor=args.factor) cache_gtiff = create_cache_path(field_def, args.timestamp, '.tif') png = cache_gtiff[:-4] + '.png' if args.cache and valid_cache(field_def, args.macro, png): log.info('Using cached png: %s' % png) sys.exit(1) field2GDAL( field_def, field, cache_gtiff, proj=inproj ) warp_vrt = cache_gtiff[:-4] + '_warped.vrt' warp(cache_gtiff, warp_vrt, inproj) rgba_vrt = cache_gtiff[:-4] + '_rgba.vrt' color_relief(warp_vrt, rgba_vrt, colormap) translate(rgba_vrt, png, 'PNG')
def main(): #-----------Setting up and unsing option parser----------------------- parser=OptionParser(usage= usage, version=version) parser.add_option("-u",'--user', action="store",dest="user", help="Name of target edb user") parser.add_option("-e","--edb", action="store",dest="edb", help="Name of target edb") parser.add_option("-v","--viewports", action="store",dest="viewports", help="Comma-separated list of area id's to be cut out, default is all") parser.add_option("-y","--year", action="store",dest="year", help="Cut out for given year") parser.add_option("-l", "--loglevel", action="store",dest="loglevel",default=2, help="Sets the loglevel (0-3 where 3=full logging)") parser.add_option("-s","--suffix", action="store",dest="suffix",default="v1", help="Sets suffix to names of generated edb's to support version management, default is 'v1'") (options, args) = parser.parse_args() #--------------------Init logger----------------------- rootLogger = logger.RootLogger(level=options.loglevel) global log log = rootLogger.getLogger(sys.argv[0]) #-----------------Validating options------------------- if options.user is None: log.error("Need to specify -u <user>") return 1 if options.edb is None: log.error("Need to specify -e <edb>") return 1 if options.year is None: log.error("Need to specify -y <year>") return 1 if len(options.suffix)>4: log.error("Limit your suffix length to 4 characters") return 1 if len(options.year)!=4: log.error("Year should be given with four digits") return 1 dmn=Domain() viewports=[] if options.viewports is not None: viewportIds=options.viewports.split(",") else: viewportIds=dmn.listViewports() for vpId in viewportIds: vp=ViewPort() vp.read(path.join(dmn.wndPath(),"modell.par"),vpId) viewports.append(vp) edb=Edb(dmn,options.user,options.edb) log.info("Reading sourcedb...") sourcedb=Sourcedb(edb) sourcedb.read() log.info("Reading emfacdb...") emfacdb=Emfacdb(edb) emfacdb.read() log.info("Reading subdb...") subdb=Subdb(edb) subdb.read() edbDotRsrc=edb.rsrcPath() for vpInd,vp in enumerate(viewports): targetEdbName=vp.code+"_"+options.year+"_"+options.suffix tEdb=Edb(dmn,options.user,targetEdbName) if tEdb.exists(): log.info("Edb %s already exists, remove first to update" %targetEdbName) continue tEdb.create(edbRsrc=edbDotRsrc) log.info("Created empty edb %s" %targetEdbName) subdb.setEdb(tEdb) subdb.write() log.info("Wrote searchkeys") emfacdb.setEdb(tEdb) emfacdb.write() log.info("Wrote emfacdb") tSourcedb=Sourcedb(tEdb) log.info("Cutting out viewport %s (%i/%i)" %(vp.code,vpInd+1,len(viewports))) for srcInd,src in enumerate(sourcedb.sources): if includeShip(src,vp.code,src["Y1"],options.year): log.debug("Ship %i/%i included in %s" %(srcInd+1,len(sourcedb.sources),tEdb.name)) tSourcedb.sources.append(src) tSourcedb.write() log.info("Wrote exatracted sources to %s" %tEdb.name) tEdb.setDesc("This edb has been extracted from %s under user %s, " %(edb.name,edb.user)+ "and includes all ships that have visited the map area %s (%s) during %s\n" %(vp.code,vp.name,options.year)) log.info("Finished!") return 0