def _main(argv): "Main part of the program" gConfigFile = flConfigFile() logFile = flConfigFile(".log") verbose = False logger = logging.getLogger() try: opts, args = getopt.getopt(argv, "hc:l:v", ["help", "config=", "logfile=", "verbose"]) except getopt.GetoptError: _ShowSyntax() sys.exit(2) for opt, arg in opts: if opt in ("-h", "--help"): _ShowSyntax() sys.exit(2) elif opt in ("-c", "--config"): gConfigFile = arg elif opt in ("-l", "--logfile"): logFile = arg elif opt in ("-v", "--verbose"): verbose = True flStartLog( cnfGetIniValue(gConfigFile, "Logging", "LogFile", flConfigFile(".log")), cnfGetIniValue(gConfigFile, "Logging", "LogLevel", "INFO"), cnfGetIniValue(gConfigFile, "Logging", "Verbose", False), ) inputFile = cnfGetIniValue(gConfigFile, "Input", "File") logger.info("Processing %s" % inputFile) source = cnfGetIniValue(gConfigFile, "Input", "Source") delta = cnfGetIniValue(gConfigFile, "Output", "Delta", 0.1) nid, newtime, newdates, nLon, nLat, nthetaFm, nvFm, npCentre, npEnv, nrMax = interpolateTrack( gConfigFile, inputFile, source, delta ) header = "" outputFile = cnfGetIniValue(gConfigFile, "Output", "File") logger.info("Saving interpolated data to %s" % (outputFile)) fh = open(outputFile, "w") for i in xrange(len(newtime)): fh.write( "%d,%5.1f,%s,%6.2f,%6.2f,%6.2f,%6.2f,%7.2f,%7.2f,%5.1f\n" % ( nid[i], newtime[i], newdates[i].strftime("%Y-%m-%d %H:%M"), nLon[i], nLat[i], nthetaFm[i], nvFm[i], npCentre[i], npEnv[i], nrMax[i], ) ) fh.close() logger.info("Completed %s" % (sys.argv[0]))
def run(): """ Run the process, handling any parallelisation. """ import argparse parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", help="Configuration file", type=str) parser.add_argument("-i", "--inputfile", help="Input DEM file (ascii format)", type=str) parser.add_argument("-o", "--output", help="Output path", type=str) parser.add_argument("-v", "--verbose", help=("Verbose output (not available when invoking" "parallel run)") ) args = parser.parse_args() logfile = 'topomult.log' loglevel = 'INFO' if args.verbose: verbose = args.verbose else: verbose = False if args.config: cfg = ConfigParser.ConfigParser() cfg.read(args.config) input_file = cfg.get('Input', 'Filename') output_path = cfg.get('Output', 'Path') logfile = cfg.get('Logging', 'LogFile') loglevel = cfg.get('Logging', 'LogLevel') verbose = cfg.get('Logging', 'Verbose') if args.inputfile: input_file = args.inputfile if args.output: output_path = args.output attemptParallel() if pp.size() > 1 and pp.rank() > 0: logfile += '-' + str(pp.rank()) verbose = False # to stop output to console flStartLog(logfile, loglevel, verbose) pp.barrier() work(input_file, output_path, ['n','s','e','w','ne','nw','se','sw']) pp.barrier() pp.finalize()
# Name of the output shape file, with no extension: if not isdir(abspath(output_path)): try: os.makedirs(abspath(output_path)) except: print "Cannot create output path: {0}".format(output_path) raise # Load the exposure file: shapes, fields, records = parseShapefile(shape_file) features = AggUtils.loadZonesFromRecords(records, fields, featureid, zonefield) aggregateLoss(records, fields, features, featureid, output_path) if __name__ == '__main__': if len(sys.argv) == 1: showSyntax() __STARTTIME__ = datetime.now() LOG = flStartLog(log_file=flConfigFile('.log'), log_level='INFO', verbose=True, datestamp=True) main()
def main(): """ Main section of the script - process command line arguments and call other functions to process the data """ flStartLog(log_file=flConfigFile('.log'), log_level='INFO', verbose=True, datestamp=True) LOG.info("Parsing command line arguments") parser = argparse.ArgumentParser() parser.add_argument('-c', '--costs', help=('csv format file containing the cost data ' 'for building types and land-use groupings')) parser.add_argument('-o', '--outputpath', help=('Path to folder for storing the output' )) parser.add_argument('-s', '--shapefile', help=('Path (including extension) of the shape ' 'file holding the zone features to process ' '(e.g. land-use parcels, meshblocks, etc.)')) parser.add_argument('-v', '--vulnerability', help=('csv format file containing the mean, sigma ' 'and scale values for building ' 'vulnerability curves')) args = parser.parse_args() cost_file = args.costs output_path = args.outputpath shape_file = args.shapefile vulnerability_file = args.vulnerability # Name of the output shape file, with no extension: if not isdir(abspath(output_path)): try: os.makedirs(abspath(output_path)) except: print "Cannot create output path: {0}".format(output_path) raise if not isdir(abspath(pjoin(output_path, 'plots'))): try: os.makedirs(pjoin(abspath(output_path), 'plots')) except: print "Cannot create output path: {0}".format(pjoin(output_path, 'plots')) raise # Add a datestring to the output file name so we know when it was created curdate = datetime.now() curdatestr = curdate.strftime('%Y%m%d%H%M') output_file = pjoin(abspath(output_path), "event_loss" )#_{0}".format(curdatestr)) # Load the exposure file shapes, fields, records = parseShapefile(shape_file) # Get the projection spatial_ref = getProjection(shape_file) # Load the vulnerability file: building_types = parseVulnerability(vulnerability_file) # Process damage: output_fields, output_records = processDamage(fields, records, building_types, cost_file) # Write out the data to another shapefile writeShapefile(output_file, output_fields, shapes, output_records) writeProjectionFile(spatial_ref, output_file) # Do the damage state calclations: for state in ['slight', 'moderate', 'extensive', 'complete']: output_fields, output_records = processFragility(fields, records, building_types, state) output_fields, output_records = calculatePopulation(output_fields, output_records, building_types) output_file = pjoin(abspath(output_path), "event_damage_states") writeShapefile(output_file, output_fields, shapes, output_records) writeProjectionFile(spatial_ref, output_file) pop_fields, pop_records = dropDamageStateFields(building_types, output_fields, output_records) output_file = pjoin(abspath(output_path), "event_pop_affect") writeShapefile(output_file, pop_fields, shapes, pop_records) writeProjectionFile(spatial_ref, output_file)
def _process(argv): """ A wrapper function to provide an interface between the command line args and the actual plotField function. This function reads settings from a configuration file and then passes those arguments to the plotField function. """ if len(argv)==0: _usage() sys.exit(2) logLevel = 'INFO' verbose = False configFile = flConfigFile() try: opts, args = getopt.getopt(argv, "c:hl:v", ["config=", "help", "loglevel=", "verbose"]) except getopt.GetoptError: _usage() sys.exit(2) for opt,arg in opts: if opt in ("-h", "--help"): _usage() sys.exit(2) elif opt in ("-c", "--config"): configFile = arg elif opt in ("-l", "--loglevel"): logLevel = arg elif opt in ("-v", "--verbose"): verbose = True flStartLog(cnfGetIniValue(configFile, 'Logging', 'LogFile', flConfigFile('.log')), cnfGetIniValue(configFile, 'Logging', 'LogLevel', logLevel), cnfGetIniValue(configFile, 'Logging', 'Verbose', verbose)) # Input data: inputFile = cnfGetIniValue(configFile, 'Input', 'File') inputFormat = cnfGetIniValue(configFile, 'Input', 'Format', os.path.splitext(inputFile)[-1]) varname = cnfGetIniValue(configFile,'Input','Variable','') record = cnfGetIniValue(configFile,'Input','Record',0) lvl = cnfGetIniValue(configFile,'Input','Level',0) # Output settings - the default is to use the input filename, with # the extension replaced by the image format: # The smoothing is optional. Set it to the number of grid points to # smooth over (recommend the reciprocal of the data resolution in degrees). imgfmt = cnfGetIniValue(configFile, 'Output', 'Format','png') outputFile = cnfGetIniValue(configFile, 'Output', 'File', "%s.%s" % (os.path.splitext(inputFile)[0], imgfmt)) smoothing = cnfGetIniValue(configFile, 'Output', 'Smoothing', False) cmapName = cnfGetIniValue(configFile, 'Output', 'ColourMap', 'gist_ncar') label = cnfGetIniValue(configFile, 'Output', 'Label', '') mask = cnfGetIniValue(configFile, 'Output', 'MaskLand', False) maskocean = cnfGetIniValue(configFile, 'Output', 'MaskOcean', False) fill = cnfGetIniValue(configFile, 'Output', 'FillContours', True) title = cnfGetIniValue(configFile,'Plot','Title',None) # Load data: if inputFormat == '.txt': # Attempt to load the dataset: try: lon,lat,data = grid.grdRead(inputFile) except: logger.critical("Cannot load input file: %s"%inputFile) raise elif inputFormat == '.nc': try: ncobj = nctools.ncLoadFile(inputFile) lon = nctools.ncGetDims(ncobj,'lon') lat = nctools.ncGetDims(ncobj,'lat') data = nctools.ncGetData(ncobj,varname) mv = getattr(ncobj.variables[varname],'_FillValue') ncobj.close() except: logger.critical("Cannot load input file: %s"%inputFile) raise if len(shape(data))==3: data = data[record,:,:] elif len(shape(data))==4: data = data[record,lvl,:,:] # Create a masked array: datamask = (data==mv) data = ma.array(data,mask=datamask) else: logger.critical("Unknown data format") raise IOError # Set defaults for the extent of the map to match the data in the # input file: llLon = min(lon) urLon = max(lon) llLat = min(lat) urLat = max(lat) res = 'l' dl = 10. # Domain settings - can override the default settings: domain = cnfGetIniValue(configFile, 'Domain', 'Name', None) if domain is not None: llLon = cnfGetIniValue(configFile, domain, 'LowerLeftLon', min(lon)) llLat = cnfGetIniValue(configFile, domain, 'LowerLeftLat', min(lat)) urLon = cnfGetIniValue(configFile, domain, 'UpperRightLon', max(lon)) urLat = cnfGetIniValue(configFile, domain, 'UpperRightLat', max(lat)) res = cnfGetIniValue(configFile, domain, 'Resolution', res) dl = cnfGetIniValue(configFile, domain, 'GridInterval', dl) [x,y] = meshgrid(lon, lat) # Set the scale: scaleMin = cnfGetIniValue(configFile, 'Output', 'ScaleMin', 0) scaleMax = cnfGetIniValue(configFile, 'Output', 'ScaleMax', 101) scaleInt = cnfGetIniValue(configFile, 'Output', 'ScaleInt', 10) levels = arange(scaleMin, scaleMax, scaleInt) plotField(x,y,data, llLon, llLat, urLon, urLat, res, dl, levels, cmapName, smoothing, title=title, xlab='Longitude', ylab='Latitude', clab=label, maskland=mask, maskocean=maskocean,outputFile=outputFile,fill=fill) logger.info("Completed %s"%sys.argv[0])