Ejemplo n.º 1
0
def main():
    #-----------Setting up and unsing option parser-----------------------
    parser=OptionParser(usage= usage, version=version)
    
    parser.add_option("-u",'--user',
                      action="store",dest="user",
                      help="Name of target edb user")

    parser.add_option("-e","--edb",
                      action="store",dest="edb",
                      help="Name of target edb")

    parser.add_option("-i","--infile",
                      action="store",dest="infile",
                      help="Input csv file")
    
 #   parser.add_option("-y","--year",
 #                     action="store",dest="year",
 #                     help="Only store sources for given year")
    
    parser.add_option("-v",dest='loglevel',
                      action="store_const",default=get_loglevel(),
                      help="produce verbose output")

    parser.add_option("-t", "--template",
                      action="store",dest="cf",default=None,
                      help="Generate default controlfile")

    parser.add_option("-o", "--outfile",
                      action="store",dest="outfile",default=None,
                      help="Name of outfiles (without extension)")

    parser.add_option("-d","--delimiter",
                      action="store",dest="delimiter",default="\t",
                      help="Delimiter used in csv-file")

    parser.add_option("-c","--filterCol",
                      action="store",dest="filterCol",
                      help="Header of column to use as filter")

    parser.add_option("-f","--filterVal",
                      action="store",dest="filterVal",
                      help="Value to use in filter")


#    parser.add_option("-g", "--geocodeRasterDir",
#                      action="store",dest="geocodeRasterDir",default=None,
#                      help="Directory with geocode rasters")
    


    (options, args) = parser.parse_args()
    
    #--------------------Init logger-----------------------
#     rootLogger = logger.RootLogger(level=options.loglevel)
    logging.basicConfig(
            format='%(levelname)s:%(name)s: %(message)s',
            level=options.loglevel,
    )
    global log
#     log = rootLogger.getLogger(sys.argv[0])
    log = logging.getLogger(parser.prog)

    #-----------------Validating options-------------------

    if options.cf is not None:
        generateCf(path.abspath(options.cf),controlFileTemplate)
        log.info("Wrote default controlfile")
        return 1

    if options.user is None:
        log.error("Need to specify -u <user>")
        return 1
    if options.edb is None:
        log.error("Need to specify -e <edb>")
        return 1
#    if options.year is None:
#        log.error("Need to specify -y <year>")
#        return 1
#    if len(options.year)!=4:
#        log.error("Year should be given with four digits")
#        return 1


    if len(args)!=1:
        log.error("Controlfile should be given as argument")
        return 1

    dmn=Domain()
    edb=Edb(dmn,options.user,options.edb)
    if not edb.exists():
        log.error("Edb %s does not exist" %options.edb)
        return 1

    log.info("Parsing controlfile")

    cf=ControlFile(args[0])
    cdbPars=re.compile("companydb\.par\.(\w*?):").findall(cf.content)
    fdbPars=re.compile("facilitydb\.par\.(\w*?):").findall(cf.content)
    sdbPars=re.compile("sourcedb\.par\.(\w*?):").findall(cf.content)
    substEmisNr=re.compile("sourcedb\.subst_emis\.([0-9]*)\.emis").findall(cf.content)
    subgrpEmisNr=re.compile("sourcedb\.subgrp_emis\.([0-9]*)\.emis").findall(cf.content)

    cdbCols={}
    cdbDefaults={}
    for par in cdbPars:
        cdbCols[par]=cf.findString("companydb.par.%s:" %par)
        cdbDefaults[par]=cf.findString("companydb.par.%s.default:" %par,
                                       optional=True,default=None)
    fdbCols={}
    fdbDefaults={}
    for par in fdbPars:
        fdbCols[par]=cf.findString("facilitydb.par.%s:" %par)
        fdbDefaults[par]=cf.findString("facilitydb.par.%s.default:" %par,
                                       optional=True,default=None)

    sdbCols={}
    sdbDefaults={}
    for par in sdbPars:
        sdbCols[par]=cf.findString("sourcedb.par.%s:" %par)
        sdbDefaults[par]=cf.findString("sourcedb.par.%s.default:" %par,
                                       optional=True,default=None)

    substEmisCols={}
    substEmisDefaults={}
    if substEmisNr is not None:
        for emisNr in substEmisNr:
            cols={}
            defaults={}
            emisPars=re.compile("sourcedb\.subst_emis\.%s\.(\w*?):" %(emisNr)).findall(cf.content)
            emisDefaultPars=re.compile(
                "sourcedb\.subst_emis\.%s\.(\w*?)\.default:" %(emisNr)).findall(cf.content)
            if emisPars is not None:
                for par in emisPars:            
                    cols[par]=cf.findString("sourcedb.subst_emis.%s.%s:" %(emisNr,par))        
            if emisDefaultPars is not None:        
                for par in emisDefaultPars:    
                    defaults[par]=cf.findString("sourcedb.subst_emis.%s.%s.default:" %(emisNr,par),
                                                optional=True,default=None)
            substEmisCols[emisNr]=cols
            substEmisDefaults[emisNr]=defaults

    subgrpEmisCols={}
    subgrpEmisDefaults={}
    if subgrpEmisNr is not None:
        for emisNr in subgrpEmisNr:
            cols={}
            defaults={}
            emisPars=re.compile("sourcedb\.subgrp_emis\.%s\.(\w*?):" %(emisNr)).findall(cf.content)
            emisDefaultPars=re.compile(
                "sourcedb\.subgrp_emis\.%s\.(\w*?)\.default:" %(emisNr)).findall(cf.content)
            if emisPars is not None:
                for par in emisPars:            
                    cols[par]=cf.findString("sourcedb.subgrp_emis.%s.%s:" %(emisNr,par))        
            if emisDefaultPars is not None:        
                for par in emisDefaultPars:    
                    defaults[par]=cf.findString("sourcedb.subgrp_emis.%s.%s.default:" %(emisNr,par),
                                                optional=True,default=None)
            subgrpEmisCols[emisNr]=cols
            subgrpEmisDefaults[emisNr]=defaults
        

    log.info("Reading subdb...")
    subdb=Subdb(edb)
    subdb.read()

    log.info("Reading companydb...")
    companydb=Companydb(edb)
    companydb.read()

    log.info("Reading sourcedb...")
#     source_stream = SourceStream(edb, 'w')
    source_stream = open(options.outfile, 'w')
    source_writer = ModelWriter(source_stream,encoding="HP Roman8")


    log.info("Reading facilitydb...")
    facilitydb=Facilitydb(edb)
    facilitydb.read()


    log.info("Reading subgrpdb")
    subgrpdb=Subgrpdb(edb)

    subgrpdb.read()

    log.info("Reading edb.rsrc")
    rsrc=Rsrc(edb.rsrcPath())
    
    acCodeTables=[]
    for i in range(rsrc.numberOfCodeTrees("ac")):
        acCodeTables.append(CodeTable(rsrc.path,codeType="ac",codeIndex=i+1))

    gcCodeTables=[]
    for i in range(rsrc.numberOfCodeTrees("gc")):
        gcCodeTables.append(CodeTable(rsrc.path,codeType="gc",codeIndex=i+1))
        

    geocodeRasters=[]
    rast1=Raster()
    rast1.read("/usr/airviro/data/geo/topdown/dynamicRasters/dynamic__GEOCODE__1.txt")
    rast2=Raster()
    rast2.read("/usr/airviro/data/geo/topdown/dynamicRasters/dynamic__GEOCODE__2.txt")
    geocodeRasters.append(rast1)
    geocodeRasters.append(rast2)

    log.info("Reading csv-file")
    table=DataTable()
    table.read(options.infile,delimiter=options.delimiter,encoding="ISO-8859-15")

    if options.filterCol is not None:
        if options.filterCol not in table.colIndex:
            log.error("Filter column header not found in table")
            sys.exit(1)

    invalid=False
    nFiltered=0
    nRows=0

    log.info("Processing rows")
    for rowInd,row in enumerate(table.data):
        nRows+=1
        if options.filterCol is not None:
            filterVal=row[table.colIndex[options.filterCol]]
            if options.filterVal!=str(filterVal):
                nFiltered+=1
                continue

        comp = Company()
        for par in comp.parOrder:
            val=cdbDefaults.get(par,None)
            if par in cdbCols:
                colId=cdbCols[par]
                try:
                    tableVal=row[table.colIndex[colId]]
                except KeyError:
                    log.error(
                        "No column with header %s, columns: %s" %(
                            colId,str(table.listIds())))
                if tableVal is not None:
                    val = tableVal
            if val is not None:
                #Too long names are truncated
                if par=="NAME" and len(val)>45:
                    val=val[:45]
                comp[par]=val

        fac = Facility()
        for par in fac.parOrder:
            val=fdbDefaults.get(par,None)
            if par in fdbCols:
                colId=fdbCols[par]
                tableVal=row[table.colIndex[colId]]
                if tableVal is not None:
                    val = tableVal
            if val is not None:
                #Too long names are truncated
                if par=="NAME" and len(val)>45:
                    val=val[:45]
                fac[par]=val

        src = Source()       
        for par in ["X1", "Y1","X2","Y2",
                    "PX","PY","NAME","INFO","INFO2","DATE","CHANGED",
                    "CHIMNEY HEIGHT","GASTEMPERATURE","GAS FLOW",
                    "SEARCHKEY1","SEARCHKEY2","SEARCHKEY3",
                    "SEARCHKEY4","SEARCHKEY5","CHIMNEY OUT","CHIMNEY IN",
                    "HOUSE WIDTH","HOUSE HEIGHT","NOSEGMENTS","BUILD_WIDTHS",
                    "BUILD_HEIGHTS","BUILD_LENGTHS","BUILD_DISTFARWALL",
                    "BUILD_CENTER","GEOCODE","FORMULAMACRO","ALOB"]:
            val=sdbDefaults.get(par,None)
            if par in sdbCols:
                colId=sdbCols[par]
                tableVal=row[table.colIndex[colId]]
                if tableVal is not None:
                    val = tableVal
            if val is not None:
                #validate code
                if par=="GEOCODE" and val is not None:
                    gcList=val.split()
                    for codeIndex,code in enumerate(gcList):
                        if not gcCodeTables[codeIndex].hasCode(code):
                            log.error("Invalid geo code %s on row %i" %(code,rowInd))
                            invalid=True
                #Too long names are truncated
                if par=="NAME" and len(val)>45:
                    val=val[:45]

                #Store in src object and convert to correct type
                src._fieldvalues[par] = lazy_parse(
                    src, par, val)

        gc1=geocodeRasters[0].getVal(src.get_coord()[0],src.get_coord()[1])
        gc2=geocodeRasters[1].getVal(src.get_coord()[0],src.get_coord()[1])
        src.GEOCODE = [str(int(gc1)) + "." + str(int(gc2))]

        for emisNr,emis in substEmisCols.items():
            substEmis={"unit":None,"ac":None,"substance":None,"emis":None}

            for par in substEmis.keys():
                if par in emis:
                    substEmis[par]=row[table.colIndex[emis[par]]]
                else:
                    try:
                        substEmis[par]=substEmisDefaults[emisNr][par]
                    except KeyError:
                        log.error(
                            "Need to specify column or default value for subgrp emis %i" %emisNr)

            
            substInd=subdb.substIndex(substEmis["substance"])
            if substInd is None:
                log.error("Invalid substance name %s on row %i" %(
                        substEmis["substance"],rowInd))
                sys.exit(1)

            try:
                unit=rsrc.sub[substEmis["unit"]]
            except KeyError:
                log.error("Invalid unit name %s on row %i" %(emis["unit"],rowInd))
                sys.exit(1)

            acList=substEmis["ac"].split('\\')[0].split()
            for codeIndex,code in enumerate(acList):
#                 if code == "2.A.4.2":
#                     import pdb; pdb.set_trace()
                refCode = acCodeTables[codeIndex].checkCode(code)
                if refCode == "-":
                    log.error("Invalid activity code %s on row %i" %(code,rowInd))
                    sys.exit(1)
                if refCode != code:
                    acList[codeIndex] = refCode
            substEmis["ac"] = acList
            
            if substEmis["emis"] is not None and substEmis["emis"]!="0":
                try:
                    emis = src.add_emission()
                    emis.UNIT = substEmis["unit"] 
                    emis.ACTCODE = substEmis["ac"]  # needs re-formatting
                    emis.EMISSION = float(substEmis["emis"])
                    emis.SUBSTANCE = substInd

                    emis.auto_adjust_unit(edb) 


                except:            
#                     print substEmis
#                     log.error("Invalid substance emission on row %i" %rowInd)
                    invalid=True
                    src.EMISSION=src.EMISSION[:-1]


        for emis in subgrpEmisCols.values():
            subgrpEmis={"unit":None,"ac":None,"name":None,"emis":None}
            for par in subgrpEmis.keys():
                if par in emis:
                    subgrpEmis[par]=row[table.colIndex[emis[par]]]
                else:
                    try:
                        subgrpEmis[par]=subgrpEmisDefaults[emisNr][par]
                    except KeyError:
                        log.error(
                            "Need to specify column or default value for subgrp emis %i" %emisNr)

            #validating subgrp name
            try:                        
                subgrp=subgrpdb.getByName(subgrpEmis["name"])
            except KeyError:
                log.error("Invalid subgrp name %s on row %i" %(subgrpEmis["name"],rowInd))
                invalid=True

            #validating subgrp emis unit
            try:                
                unitFactor=rsrc.subGrpEm[subgrpEmis["unit"]]
            except KeyError:
                log.error("Invalid unit %s for subgrp emission on row %i" %(
                        subgrpEmis["unit"],rowInd))
                invalid=True

            #validating subgrp activity code
            acList=subgrpEmis["ac"].split()
            for codeIndex,code in enumerate(acList):
                refCode = acCodeTables[codeIndex].checkCode(code)
                if refCode == "-":
                    log.error("Invalid activity code %s on row %i" %(code,rowInd))
                    invalid=True
                    break
                if refCode != code:
                    acList[codeIndex] = refCode
            substEmis["ac"] = acList

            try:
                src.addSubgrpEmis(subgrp.index,emis=subgrpEmis["emis"],unit=subgrpEmis["unit"],
                                  ac=subgrpEmis["ac"])
            except:            
                log.error("Invalid subgrp emission on row %i" %rowInd)
                invalid=True

        companydb.append(comp,force=True)
        facilitydb.append(fac,force=True)
        source_writer.write(src)
#         sourcedb.append(src)

    if invalid:
        log.info("No output written due to validation errors")
        sys.exit(0)
    if len(companydb.items)>0:
        if options.outfile is None:
            log.info("Writing companydb")
        else:
            log.info("Writing company db to file")
        companydb.write(filename=options.outfile+".companydb")

    if len(facilitydb.items)>0:
        if options.outfile is  None:
            log.info("Writing facilitydb")
        else:
            log.info("Writing facilitydb to file")
        facilitydb.write(filename=options.outfile+".facilitydb")

#     if len(sourcedb.sources)>0:
#         if options.outfile is None:
#             log.info("Writing sourcedb")
#         else:
#             log.info("Writing sourcedb to file")
#     sourcedb.write(filename=options.outfile+".sourcedb")

    if options.filterCol is not None:
        log.info("Filtered out %i out of %i" %(nFiltered,nRows))
Ejemplo n.º 2
0
def main():
    #setting up parser
    parser=OptionParser(usage= usage, version=version)

    parser.add_option("-l", "--loglevel",
                      action="store",dest="loglevel",default=2,
                      help="Sets the loglevel (0-3 where 3=full logging)")    

    parser.add_option("-t", "--templateCf",
                      action="store",dest="cf",
                      help="Path to generate controlfile template")

    parser.add_option("-i",'--inres',
                      action="store",dest="inres",
                      help="Input NOx .RES file")

    parser.add_option("-e",'--ext',
                      action="store",dest="ext",
                      help="Field ext for input result")

    parser.add_option("-o",'--outres',
                      action="store",dest="outres",
                      help="Output NO2 .RES file")
    
    parser.add_option("-u",'--user',
                      action="store",dest="user",
                      help="Owner of output result (default is same as input result")

    parser.add_option("-p",'--project',
                      action="store",dest="project",
                      help="Project of output result (default is same as input result")

    parser.add_option("-r",'--resType',
                      action="store",dest="resType",
                      help="Type of output result ('A' or 'T', default is same as input result")

    parser.add_option("-m",'--model',
                      action="store",dest="model",
                      help="Model used for output result (default is same as input result")

    parser.add_option("--load",
                      action="store_true",dest="load",
                      help="Import result to Airviro result db as a new result")
    
    (options, args) = parser.parse_args()

    rootLogger = logger.RootLogger(options.loglevel)
    log=rootLogger.getLogger("NOx2NO2.py")

    #Parsing and validating of input
    if options.cf!=None:
        controlfile.generateCf(path.abspath(options.cf),controlFileTemplate)
        log.info("Wrote default controlfile")
        sys.exit()


    if options.inres is None:
        parser.error("No input result specified")
    if options.outres is None:
        parser.error("No output result specified")

    if path.splitext(options.outres)[1]!=".RES":
        parser.error("Output result should be a .RES file")
    
    if path.splitext(options.inres)[1]!=".RES":
        parser.error("Input result should be a .RES file")
        
    if len(args)!=1:
        parser.error("Invalid nr of arguments, %i arguments given, should be 1" %len(args))

    rsrc=controlfile.ControlFile(args[0])
    
    resPath=path.abspath(options.inres)
    if options.outres is not None:
        outresPath= path.abspath(options.outres)
    else:
        dmn = domain.Domain()
        outresFile= tempfile.NamedTemporaryFile(suffix=".tmp",dir=dmn.tmpDir())
        outresPath=outresFile.name
        
    if not path.exists(resPath):
        parser.error("Input result does not exist")

    if options.ext is None:
        parser.error("No field ext key specified for input result")
    else:
        inExt=options.ext
    
    if options.user is not None:
        user = options.user
    else:
        user = resPath.split("/")[-2]

    if options.model is not None:
        model=options.model
    else:
        model = resPath.split("/")[-3]

    if options.project is not None:
        project=options.project
    else:
        project = resPath.split("/")[-1][2] #Get project letter from .RES-filename

    try:
        inresNr=resPath[-8:-4]
        slask=int(inresNr)
    except:
        log.error("Could not extract input result number from input filename")

    inresType=resPath.split("/")[-1][3] #Get resType letter from .RES-filename
    if options.resType is not None:
        outResType =options.resType

    area=resPath.split("/")[-1][:2]

    #Retrieve input parameters from resource file
    try:
        tau=rsrc.findFloat("tau:")
    except:
        log.error("Could not find parameter 'tau' in resource-file")

    try:
        fd=rsrc.findFloat("fd:")
    except:
        log.error("Could not find parameter 'fd' in resource-file")

    #Read NOx model result into result object
    inres=res.ScalarResult(model,user,inExt,area,resType=inresType,resNr=inresNr,project=project)

    #Read time-stamps
    inres.readTimestamps()
    log.debug("Found %i timestamps in result" %len(inres.ts))
    
    #Read res dimensions etc. from first field
    inres.readField(inres.ts[0])

    #Create output result object
    outExt=inExt
    outPar="NO2"
    outDesc="Total NO2 converted from NOx"
    outres=res.ScalarResult(model,user,outExt,area,
                               resType='A',valType=inres.valType,fieldType=inres.fieldType,
                               par=outPar,project=inres.project,
                               Xmin=inres.xmin,Xmax=inres.xmax,
                               Ymin=inres.ymin,Ymax=inres.ymax,
                               desc=outDesc)

    #Read tim-series macros from control-file
    macros={}
    macros["NO2_b"]=rsrc.findString("ts.NO2_b.macro:")
    macros["O3_b"]=rsrc.findString("ts.O3_b.macro:")
    macros["NOx_b"]=rsrc.findString("ts.NOx_b.macro:")
    macros["temp"]=rsrc.findString("ts.temp.macro:")
    macros["glob"]=rsrc.findString("ts.glob.macro:")

    #Read time-series
    ts={}
    for key,macro in macros.iteritems():
        ts[key]=series.Ts(begin=inres.ts[0],
                          end=inres.ts[-1])
        ts[key].read(macro)
        log.debug("Read time-series data for %s using macro: %s" %(key,macro))
        
    for i,dt in enumerate(ts["temp"].data["datetime"]):
        #If temp-data missing, continue
#        if ts["temp"].data.mask["datetime"][i]:
#            continue
        
        try:
            temp=ts["temp"][dt]+273.15 #conversion to Kelvin
            NO2_microg=ts["NO2_b"][dt]
            O3_microg=ts["O3_b"][dt]
            NOx_microg=ts["NOx_b"][dt]
        except ValueError:
            #If one of the data is missing, no conversion is made
            continue
        
        #Convert concentrations to ppb, necessary for OSPMScheme
        ts["temp"][dt]=temp
        ts["NO2_b"][dt]*=microg2ppb(46.0,temp)
        ts["NOx_b"][dt]*=microg2ppb(46.0,temp)
        ts["O3_b"][dt]*=microg2ppb(48.0,temp)

    
    outres.open(outresPath,mode='w')
    
    fieldNr=1
    nfields=inres.nfields()
    for timestamp in inres.ts:        
        NOx_raster=inres.readField(timestamp)
        missingData=False
        missingParams=[]
        for key,t in ts.iteritems():
            if not t.exists(timestamp):
                missingData=True
                missingParams.append(key)
                #break #uncomment for faster checking of missing data
        if missingData:
            log.debug("Incomplete input data for time %s (%i out of %i), missing parameters: %s" %(timestamp.strftime("%y%m%D %H:00"),
                                                                                                      fieldNr,
                                                                                                      nfields,
                                                                                                      str(missingParams)))
            fieldNr+=1
            continue

        log.debug("Converting field at time %s (%i out of %i)" %(timestamp.strftime("%y%m%D %H:00"),
                                                                    fieldNr,
                                                                    nfields))

        NOx_field=NOx_raster.data
        
        #Convert input NOx res to ppb
        NOx_field*=microg2ppb(46.0,temp)
        
        #convert to NO2
        NO2_field=OSPMScheme(ts["temp"][timestamp],
                            ts["glob"][timestamp],
                            ts["O3_b"][timestamp],
                            ts["NO2_b"][timestamp],
                            ts["NOx_b"][timestamp],
                            NOx_field,
                            fd,
                            tau)

        #Convert ppb back to microg/m3
        NO2_field*=1/microg2ppb(46.0,temp)
        outres.writeField(NO2_field,timestamp)

        fieldNr+=1

    if options.load:
        log.debug("Importing result to Airviro")
        outres.load()
    log.info("Finished NOx to NO2 conversion successfully")
Ejemplo n.º 3
0
def main():
    #-----------Setting up and unsing option parser-----------------------
    parser=OptionParser(usage= usage, version=version)
    
    parser.add_option("-l", "--loglevel",
                      action="store",dest="loglevel",default=2,
                      help="Sets the loglevel (0-3 where 3=full logging)")
    
    parser.add_option("-u", "--user",
                      action="store", dest="user", default=None,
                      help="Specify user manually")    

    parser.add_option("-e", "--edb",
                      action="store", dest="edb", default=None,
                      help="Name of target edb")

    parser.add_option("-t", "--template",
                      action="store",dest="cf",default=None,
                      help="Generate default controlfile")

    parser.add_option("-f", "--force",
                      action="store_true",dest="force",default=False,
                      help="To start the process without confirming the domain")

    parser.add_option("-a", "--attributeFilter",
                      action="store",dest="attributeFilter",default=None,
                      help="Set to filter out roads with the specified attribute value, attribute field name is set in controlfile")

    parser.add_option("-o","--outfile",
                      action="store",dest="outfile",default=None,
                      help="Output road ascii file")

    parser.add_option("-g","--geofilter",
                      action="store",dest="geoFilter",default=None,
                      help="Filter out roads within polygons with field value matching the geoFilter, specify shapefile in controlfile")

    parser.add_option("--scaleADT",
                      action="store",dest="ADTScaleFactor",default=None,
                      help="Scale ADT with a factor")
            
    (options, args) = parser.parse_args()

    #--------------------Init logger-----------------------
    rootLogger = logger.RootLogger(options.loglevel,format="%(message)s")
    global log
    log = rootLogger.getLogger(sys.argv[0])

    #-----------------Validating options-------------------

    if options.cf!=None:
        #Checks if the file already exists, prompt the user if overwrite is wanted, create file
        controlfile.generateCf(path.abspath(options.cf),controlFileTemplate)
        log.info("Wrote default controlfile")
        sys.exit()

    if len(args)!=1:
        parser.error("Incorrect number of arguments")
    
    if options.edb ==None:
        parser.error("Need to specify edb using flag -e")
    if options.user ==None:
        parser.error("Need to specify user using flag -u")
    if options.attributeFilter is not None:
        attributeFilter=options.attributeFilter
    else:
        attributeFilter=None

    
    domainName=os.environ["AVDBNAME"]
    dmn = domain.Domain(domainName)        

    if not options.force:
        answer=raw_input("Chosen dbase is: "+domainName+",continue(y/n)?")    
        if answer=="y":
            dmn=domain.Domain()            
        else:
            sys.exit(1)
    
    if not dmn.edbExistForUser(options.edb,options.user):
        log.error("Edb "+options.edb+" does not exist for user "+
                     options.user+" in domain "+domainName)    
        sys.exit(1)

    #Creating edb object
    edb=Edb(dmn.name,options.user,options.edb)

    #Create edb rsrc object
    rsrc=Rsrc(edb.rsrcPath())
    
    #Creating a roaddb object
    roaddb=Roaddb(dmn,options.user,edb.name)
    
    #Creating a control-file object (simple parser)
    cf=controlfile.ControlFile(fileName=path.abspath(args[0]),codec="latin6")

    #Retrieving data from control file
    shapeFilePath = cf.findExistingPath("shapeFile:")
    asciiCodeTable = cf.findString("asciiCodeTable:",optional=True,default="latin6")
    vehCompTablePath = cf.findExistingPath("vehicleCompositionTable:",optional=True)
    attributeFilterFieldName=cf.findString("attributeFilterFieldName:",optional=True,default=None)

    #Loads driver to read shape-files using ogr-library
    driver = ogr.GetDriverByName('ESRI Shapefile')

    #If option for geoFilter is used, the polygon defining the boundaries
    #of the area to be filtered is read from geoFilterShapeFile
    if options.geoFilter is not None:
        gfShapeFilePath=cf.findExistingPath("geoFilterShapeFile:")
        gfFieldName=cf.findString("geoFilterFieldName:")
        gfShapeFile =  driver.Open(str(gfShapeFilePath), update=0)
        if gfShapeFile is None:
            log.error("Could not open data source: " +gfShapeFilePath)
            sys.exit(1)
        gfLayer = gfShapeFile.GetLayer()
        log.info("Found %i features in geocode shapefile" %gfLayer.GetFeatureCount())    
        geoFilterPolys=[]
        gfFeature = gfLayer.GetNextFeature()
        while gfFeature:
            geocode=gfFeature.GetFieldAsString(str(gfFieldName))
            if geocode==options.geoFilter:
                geoFilterPoly=gfFeature.GetGeometryRef()
                geoFilterPolys.append(geoFilterPoly)
            gfFeature = gfLayer.GetNextFeature()
            
        if len(geoFilterPolys)==0:
            log.error("Could not find any polygon with field value matching the specified geoFilter in shapeFile: %s, field: %s" %(gfShapeFilePath,gfFieldName))
            sys.exit(1)


    #Creating table for default vehicle composition
    if vehCompTablePath is not None:
        vehCompTable=datatable.DataTable()
        vehCompTable.read(vehCompTablePath)
        try:
            #Set columns used as unique row identifiers, raise error if they do not exist
            vehCompTable.setKeys(["Vehicle","Tatort"])
            for fuel in ["bensin","ethanol","diesel","CNG","Totalt"]:
                vehCompTable.convertCol(fuel,float)

        except DataTableException,msg:
            log.error("Could not find column header in vehCompTable:"+msg)
            sys.exit(1)

        except ValueError:
            log.error("Could not find fuel %s among column headers in vehCompTable:" %fuel)
            sys.exit(1)
Ejemplo n.º 4
0
def main():
    #-----------Setting up and unsing option parser-----------------------
    parser=OptionParser(usage= usage, version=version)
    
    parser.add_option("-l", "--loglevel",
                      action="store",dest="loglevel",default=2,
                      help="Sets the loglevel (0-3 where 3=full logging)")
    
    parser.add_option("-t", "--template",
                      action="store",dest="cf",default=None,
                      help="Generate default controlfile")

    parser.add_option("-f", "--force",
                      action="store_true",dest="force",default=False,
                      help="To start the process without confirming the domain")

    parser.add_option("-n", "--nproc",
                      action="store",dest="nproc",default=1,
                      help="Number of gifmap processes to run at a time")
        
    (options, args) = parser.parse_args()

    #------------Setting up logging capabilities -----------
    rootLogger=logger.RootLogger(int(options.loglevel))
    log=rootLogger.getLogger(sys.argv[0])
    #logger=basicLogger(int(options.loglevel),sys.argv[0])

    if options.cf!=None:
        controlfile.generateCf(path.abspath(options.cf),controlFileTemplate)
        print("Wrote default controlfile")
        sys.exit()

    if len(args)!=1:
        parser.error("Incorrect number of arguments")
    
    domainName=os.environ["AVDBNAME"]
    dmn = domain.Domain(domainName)        

    if not options.force:
        answer=raw_input("Chosen dbase is: "+domainName+",continue(y/n)?")    
        if answer=="y":
            dmn=domain.Domain()            
        else:
            sys.exit("Interrupted by user")
            
    #Opening controlfile
    #---retrieving data from control file----
    cf=controlfile.ControlFile(fileName=path.abspath(args[0]))

    #Get search parameters from control file
    substances=cf.findStringList("substances:")
    outputDir=cf.findExistingPath("outputDir:")
    baseSearches=cf.findStringList("searches:")
    fromProj=cf.findString("fromProj:")
    toProj=cf.findString("toProj:")
    resample = cf.findBoolean("resample:")
    dmn=domain.Domain(os.environ.get('AVDBNAME'))
    substDict=dmn.listSubstanceIndices()

    if resample:
        #Get proj4 projection definitions from aliases
        try:
            fromProj=transcoord.proj4Dict[fromProj]
        except KeyError:
            print("Projection %s not found in proj4Dictin transCoord.py" %fromProj)
        try:
            toProj=transcoord.proj4Dict[toProj]
        except KeyError:
            print("Projection %s not found in proj4Dictin transCoord.py" %toProj)       

        #Get output grind parameters from controlfile
        out_xll=cf.findFloat("out_xll:")
        out_yll=cf.findFloat("out_yll:")
        out_ncols=cf.findInt("out_ncols:")
        out_nrows=cf.findInt("out_nrows:")
        out_cellsize=cf.findFloat("out_cellsize:")

    #Build list with search definitions
    searches=[]
    for search_id in baseSearches:
        prefix=cf.findString("search."+search_id+".prefix:")
        macroPath=cf.findString("search."+search_id+".macro:")
        starttime=cf.findString("search."+search_id+".starttime:")
        endtime=cf.findString("search."+search_id+".endtime:")
        parVals = cf.findParam("search."+search_id+".par.",findAll=True)
        parDict={}
        for p in parVals:
            ind=p.index(":")
            par = p[:ind+1]
            val=p[ind+1:].strip()
            parDict[par]=val

        alobVals = cf.findParam("search."+search_id+".alob.",findAll=True)
        alobDict={}
        for a in alobVals:
            ind=a.index(":")
            key = a[:ind]
            val=a[ind+1:].strip()
            alobDict[par]=val
        
        macro = controlfile.ControlFile(fileName=macroPath,removeComments=False)
        unitIndex=macro.findInt("edb.unit:")
        edbName=macro.findString("edb.edb:")
        userName=macro.findString("edb.user:"******"FROM       :",starttime)
        macro.setParam("TO         :",endtime)
        for key,val in parDict.iteritems():
            macro.setParam(key,val)
        for key,val in alobDict.iteritems():
            macro.setParam(key,"R"+val,addSpace=False)
            

        edb=Edb(dmn.name,userName,edbName)
        rsrc=Rsrc(edb.rsrcPath())
        unitName=rsrc.search[unitIndex]

        for substance in substances:
            filename=path.join(outputDir,prefix+"_"+substance.replace(" ","_")+".asc")
            searches.append(
                {'id':search_id,
                 'macro':macro,
                 'prefix':prefix,
                 'substance':substance,
                 'unit':unitName,
                 'filename':filename
                 }
                )

    if resample:
        summaryTable=datatable.DataTable(desc=[{"id":'search_id',"type":unicode},
                                               {"id":'substance',"type":unicode},
                                               {"id":'macro',"type":unicode},
                                               {"id":'filename',"type":unicode},
                                               {"id":'sum',"type":float},
                                               {"id":'proj_sum',"type":float},
                                               {"id":'unit',"type":unicode}],
                                         keys=["search_id","substance"])
    else:
        summaryTable=datatable.DataTable(desc=[{"id":'search_id',"type":unicode},
                                               {"id":'substance',"type":unicode},
                                               {"id":'macro',"type":unicode},
                                               {"id":'filename',"type":unicode},
                                               {"id":'sum',"type":float},
                                               {"id":'unit',"type":unicode}],
                                         keys=["search_id","substance"])

    #Inititalising parallell run
    nproc=int(options.nproc) #numer of processes to run in parallell
    running_proc=0           #counter for number of running processes
    pids={}                  #dictionary to store process output and info
    todo=len(searches)       #list of processes to be run
    searchInd=0              #index of the process

    def countRunning(pids):
        """Count the number of running processes"""
        for pid, props in pids.iteritems():
            props['ret_val']=props['proc'].poll()
        return len([pid for pid in pids if pids[pid]['ret_val'] is None])

    while todo+running_proc>0:
        #loop until all processes have finished
        if running_proc==nproc or todo==0:
            print("Running gifmap...")

            #When nproc processes are started, the program waits here
            #until one is finished before adding a new one
            while countRunning(pids)==running_proc:
                time.sleep(5)

            #mark first found finished process as done
            for pid,props in pids.iteritems():
                if not props['ret_val'] is None and not props['done']:
                    props['done']=True
                    break

            command=props["cmd"]
            if props['ret_val']!=0:
                errMsg=pids[pid]["proc"].stderr.read()
                print("Error while running command: %s\n%s" %(pids[pid],errMsg))
                sys.exit(1)

           
            print("Finished search %s" %props['search_id'])
            #Find search in search list
            running_proc-=1
            for f_search in searches:
                if f_search["id"]==props['search_id'] and f_search['substance']==props['substance']:
                    break

            f_search['done']=True
            try:
                f_search['res']=res2rast(f_search['filename'])
                #Store original result from search
                f_search['sum']=f_search['res'].sum()

            except:
                f_search['res']=None

                output=pids[pid]["proc"].stdout.read()
                f_search['sum']=re.compile("#EMIS (\S*).*").search(output).group(1)
                print "Could not extract result raster from %s" %f_search['filename']
                print "Uses total from gifmap log"
                

            if resample:
                outRast = raster.Raster(Xll=out_xll,Yll=out_yll,Ncols=out_ncols,
                                        Nrows=out_nrows,Cellsize=out_cellsize,
                                        Nodata=-9999)

                #Run post-processing of result for finsished processes
                if f_search['sum']>0:
                    print("Projecting result to target CRS")
                    f_search['res'] = transcoord.transformEmisRaster(f_search['res'],outRast,
                                                                     fromProj,toProj,
                                                                     tmpDir=dmn.tmpDir())
                    #Store projected result from search
                    f_search['proj_sum']=f_search['res'].sum()
                else:
                    f_search['proj_sum']=0
                   
            #Write result to file
            if f_search['res'] is not None:
                f_search['res'].write(f_search['filename'])

            #Add total emission to summary table
            if resample:
                summaryTable.addRow(
                    [f_search['id'],
                     f_search['substance'],
                     f_search['macro'].name,
                     f_search['filename'],
                     f_search['sum'],
                     f_search['proj_sum'],
                     f_search['unit']])
            else:
                summaryTable.addRow(
                    [f_search['id'],
                     f_search['substance'],
                     f_search['macro'].name,
                     f_search['filename'],
                     f_search['sum'],
                     f_search['unit']])

        #Add another search process
        elif todo>0:
            c_search=searches[searchInd]
            substance=c_search["substance"]
            substanceIndex=substDict[substance]
            c_search['macro'].setParam("ELEMENT    :",substanceIndex)
            c_search['macro'].write()
            command="gifmap -T -i "+c_search['macro'].name+" -o "+c_search['filename']           
            p=subprocess.Popen(command,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)

            #Add info and handles to process dictionary
            pids[p.pid]={"proc":p,
                         "search_id":c_search['id'],
                         "substance":c_search['substance'],
                         "cmd":command,
                         'ret_val':None,
                         'done':False}
            print(
                "Started search %s, substance %s" %(
                    c_search['id'],c_search['substance']))

            running_proc+=1
            todo-=1
            searchInd+=1
            
            #Each process needs some time to read
            #the macro before it is changed
            time.sleep(10)
            
    #summaryTable.sortRows()
    tableFile=open(path.join(outputDir,"summaryTable.txt"),'w')
    summaryTable.write(tableFile)    
    print("Finished")