コード例 #1
0
def _computeCharacteristics(gage,workspace,workspaceID,parameters):
    '''main worker for compute basin charactersitics'''
    
    method = None

    WiMResults = Result.Result(gage.comid,"Characteristics computed for "+gage.name)
    
    #get the global basin values from NLDI (just a csv for now)
    with NLDIServiceAgent() as sa:

        globalValue = sa.getBasinCharacteristics(gage.comid)
        
    #end with
    startTime = time()
    
    #open up stream stats ops and compute the characteristics
    characteristics = []
    with StreamStatsNationalOps(workspace, workspaceID) as sOps: 
                    
        #for all chars in the parameters
        for p in parameters:
            method = None
            
            parameter = Characteristic(p)
            if(not parameter): 
               
                continue

            method = getattr(sOps, parameter.Procedure) 
            
            #if method exists get value
            if (method): 
                result = method(parameter) 
                #todo:merge with request from NLDI
                
                #get the value minus the global value and add the characteristic
                #to the final list
                if(parameter.Name in globalValue):
                    
                    char_value = float(globalValue[parameter.Name])-result[parameter.Name]
                    characteristics.append(
                        get_characteristic_dict(parameter, char_value)
                        )
                else:
                    WiMLogging.sm("%s characteristic not in global service") % p

                WiMResults.Values.update(result)
            else:
                
                continue 
                  
    print 'Finished.  Total time elapsed:', str(round((time()- startTime)/60, 2)), 'minutes'

    #return the characteristics
    return characteristics

    
    
        
コード例 #2
0
    def __init__(self):
        WiMResults = None
        try:
            parser = argparse.ArgumentParser()
            #Move CSV to here
            #Workspace created below for CSV values
            parser.add_argument(
                "-workspaceID",
                help="specifies the split catchment workspace",
                type=str,
                default="FH20170313102909483000")  #Change default
            #Mask nldiWFS +
            #FedHighWay __delineate => Maskjson => mask for everything

            parser.add_argument(
                "-parameters",
                help=
                "specifies the ';' separated list of parameters to be computed",
                type=str,
                default="")
            args = parser.parse_args()

            config = Config(
                json.load(
                    open(os.path.join(os.path.dirname(__file__),
                                      'config.json'))))
            self.workspaceID = args.workspaceID
            self.workingDir = os.path.join(
                Config().getElement("workingdirectory"), self.workspaceID)

            if not os.path.exists(self.workingDir):
                raise Exception('workspaceID is invalid')
            if (args.parameters):
                self.params = args.parameters.split(";")
                #get all characteristics from config
            else:
                self.params = config["characteristics"].keys()

            WiMLogging.init(os.path.join(self.workingDir, "Temp"),
                            "mergeCatchment.log")

        except:
            tb = traceback.format_exc()
            self._sm(tb + "Failed to initialize", "Error")
            return None
コード例 #3
0
def delineate(**kargs):
    '''intermediate for the delineation work'''
    
    #get data
    arg_data = kargs['data']
    xpoint = float(arg_data['xlocation'])
    ypoint = float(arg_data['ylocation'])
    crs = int(arg_data['crs'])
    
    comID = None
    if 'comID' in arg_data:
        comID = int(arg_data['comID'])
    
    #set up config information
    config = get_config()
    workingDir = Shared.GetWorkspaceDirectory(config["workingdirectory"],'FH')   
    WiMLogging.init(os.path.join(workingDir,"Temp"),"gage3.log")
    
    #get appropriate guage via either x/y point or comID
#     gauge = get_gauge(xpoint, ypoint, crs, comID)
    
    #perform delineation task and get back the workspace,
    #basin featureClass, and pourpoint featureClass
    workspaceID, basin, ppoint = _delineate(workingDir, xpoint, ypoint, crs)
    
    #convert data in to geojson
    featureCollection = get_delineation_features(ppoint, basin, crs)
    
    #arrange return data
    data = {}
    data['workspaceID'] = workspaceID
    data['featureCollection'] = featureCollection
    data['messages'] = [{'message': 'success'}]
    
    #if include Parameters is not in args or not False
    #compute each charactersitic or all if True
    if 'includeParameters' not in arg_data:
        arg_data['includeParameters'] = True
        
    #return data
    if arg_data['includeParameters'] != False:
        arg_data['workspaceID'] = workspaceID
        data['parameters'] = basin_chars(data=arg_data)
    
    return data
コード例 #4
0
 def _sm(self, msg, type="INFO", errorID=0):
     WiMLogging.sm(msg, type="INFO", errorID=0)
コード例 #5
0
def _run(projectID, in_file, outwkid, parameters, today_date, arr, start_idx,
         end_idx):

    config = Config(
        json.load(open(os.path.join(os.path.dirname(__file__),
                                    'config.json'))))

    if projectID == '#' or not projectID:
        raise Exception('Input Study Area required')

    workingDir = os.path.join(config["workingdirectory"], "temp",
                              projectID + "_" + today_date)
    if not os.path.exists(workingDir):
        os.makedirs(workingDir)

    WiMLogging.init(os.path.join(workingDir, "temp"), "gage.log")
    WiMLogging.sm("Starting routine")
    params = parameters.split(";") if (
        parameters) else config["characteristics"].keys()
    gage_file = Shared.readCSVFile(in_file)

    headers = gage_file[0]
    if "Gage_no" in headers: idindex = headers.index("Gage_no")
    if "Gage_name" in headers: nmindex = headers.index("Gage_name")
    if "COMID" in headers: comIDindex = headers.index("COMID")
    if "Lat_snap" in headers: latindex = headers.index("Lat_snap")
    if "Long_snap" in headers: longindex = headers.index("Long_snap")
    if "State" in headers: stateindex = headers.index("State")
    # strip the header line
    gage_file.pop(0)
    header = []
    header.append("-+-+-+-+-+-+-+-+-+ NEW RUN -+-+-+-+-+-+-+-+-+")
    header.append("Execute Date: " + str(datetime.date.today()))
    header.append("-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+")

    header.append(",".join([
        'GAGEID', 'COMID', 'WorkspaceID', 'Description', 'LAT', 'LONG', 'STATE'
    ] + _formatRow(params)))

    Shared.writeToFile(os.path.join(workingDir, config["outputFile"]), header)

    ##not working yet, need to refresh rerun file
    #if not projectID == 'FH-10':
    #    rerunFile = "D:\Applications\output\gage_iii\RerunGageFiles\%s.csv" % today_date
    #else:
    #    rerunFile = "D:\Applications\output\gage_iii\RerunGageFiles\%s_Final.csv" % today_date
    rerunFile = "D:\Applications\output\gage_iii\RerunGageFiles\%s.csv" % today_date
    if not arcpy.Exists(rerunFile):
        rerunFileHeader = []
        rerunFileHeader.append(",".join(headers))
        Shared.writeToFile(rerunFile, rerunFileHeader)

    gagelist = gage_file[start_idx:end_idx]

    if not arcpy.Exists(r"D:\Applications\output\gage_iii\temp"):
        os.mkdir(r"D:\Applications\output\gage_iii\temp")

    for station in gagelist:

        #For use in temp directory name (see line #86)
        idx = start_idx

        # Create temp directory so ARC does not run out of internal memory
        newTempDir = r"D:\Applications\output\gage_iii\temp\gptmpenvr_" + time.strftime(
            '%Y%m%d%H%M%S') + '2018' + str(idx)
        os.mkdir(newTempDir)
        os.environ["TEMP"] = newTempDir
        os.environ["TMP"] = newTempDir

        with FederalHighwayWrapper(workingDir, projectID) as fh:

            results = {'Values': [{}]}

            g = gage.gage(station[idindex], station[comIDindex],
                          station[latindex], station[longindex], outwkid,
                          station[nmindex].replace(",", " "), '',
                          station[stateindex])

            results = fh.Run(g, params, arr)

            if results is None: results = {'Values': [{}]}
            Shared.appendLineToFile(
                os.path.join(workingDir, config["outputFile"]), ",".join(
                    str(v) for v in [
                        g.id, g.comid, fh.workspaceID, results.Description,
                        g.lat, g.long, g.state
                    ] + _formatRow(results.Values, params)))

            formatresults = _formatRow(results.Values, params)
            if (fh.workspaceID is None or "not reported. see log file"
                    in formatresults) and not g.comid == "-9999":
                f = open(rerunFile, "a")
                f.writelines('\n' + ",".join(station))
                f.close()
            gc.collect()
            idx += 1

        #The with statement should automatically take care of gc operations
        #but just in case
        fh = None
        gc.collect()
コード例 #6
0
    def __init__(self, chardefname):
        try:
            CharObj = Config()["characteristics"][chardefname]
            if (not CharObj): return None

            self.ID = CharObj["ID"] if ("ID" in CharObj) else 0
            self.Name = chardefname
            self.MapLayers = CharObj["MapLayers"] if ("MapLayers"
                                                      in CharObj) else None
            self.Method = CharObj["Method"] if ("Method" in CharObj) else None
            self.UnitID = CharObj["UnitID"] if ("UnitID" in CharObj) else ""
            self.Procedure = CharObj["Procedure"]
            self.Description = CharObj["Description"] if ("Description"
                                                          in CharObj) else ""
            self.QueryField = CharObj["QueryField"] if ("QueryField"
                                                        in CharObj) else None
            self.ClassCodes = CharObj["ClassCodes"] if ("ClassCodes"
                                                        in CharObj) else None
            self.Count = CharObj["Count"] if ("Count" in CharObj) else None
            self.Data = os.path.join(Config()["parentdirectory"],
                                     CharObj["Data"]) if ("Data"
                                                          in CharObj) else None
            self.MethField = CharObj["methodField"] if ("methodField"
                                                        in CharObj) else None
            self.Field = CharObj["selectorField"] if ("selectorField"
                                                      in CharObj) else None
            self.Operator = CharObj["Operator"] if ("Operator"
                                                    in CharObj) else None
            self.Keyword = CharObj["Keyword"] if ("Keyword"
                                                  in CharObj) else None
            self.Variables = CharObj["Variables"] if ("Variables"
                                                      in CharObj) else None
            self.Equation = CharObj["Equation"] if ("Equation"
                                                    in CharObj) else None
            self.EquationVariables = CharObj["EquationVariables"] if (
                "EquationVariables" in CharObj) else None
            self.SubProcedure = CharObj["SubProcedure"] if (
                "SubProcedure" in CharObj) else None
            self.WhereClause = CharObj["WhereClause"] if ("WhereClause"
                                                          in CharObj) else ""
            self.MultiplicationFactor = CharObj["MultiplicationFactor"] if (
                "MultiplicationFactor" in CharObj) else 1  # Added by JWX
            self.TimeRange = CharObj["TimeRange"] if ("TimeRange"
                                                      in CharObj) else ""
            self.TimeMethod = CharObj["TimeMethod"] if ("TimeMethod"
                                                        in CharObj) else ""
            self.AggregationMethod = CharObj["AggregationMethod"] if (
                "AggregationMethod"
                in CharObj) else "weighteddifference"  #seeWIMLib.ExpressionOps
            self.IDX = CharObj["IDX"] if ("IDX" in CharObj) else ""
            self.TOT_IDX = CharObj["TOT_IDX"] if ("TOT_IDX"
                                                  in CharObj) else "",
            self.JoinTables = CharObj["JoinTables"] if ("JoinTables"
                                                        in CharObj) else None,
            self.JoinField = CharObj["JoinField"] if ("JoinField"
                                                      in CharObj) else None

        except:
            WiMLogging.sm(
                chardefname +
                " not available to compute. Returning none value.", "ERROR")
            return None
コード例 #7
0
    def __init__(self):
        try:
            parser = argparse.ArgumentParser()
            parser.add_argument("-projectID",
                                help="specifies the projectID",
                                type=str,
                                default="FH_short")
            parser.add_argument(
                "-file",
                help=
                "specifies csv file location including gage lat/long and comid's to estimate",
                type=str,
                default=r'D:\Applications\input\CATCHMENT_gageloc_v1_short.csv'
            )
            parser.add_argument(
                "-outwkid",
                help="specifies the esri well known id of pourpoint ",
                type=int,
                default='4326')
            parser.add_argument("-parameters", help="specifies the ';' separated list of parameters to be computed", type=str,
                                      default = "TOT_BASIN_AREA;" \
                                        +"TOT_IMPV11;" \
                                        +"TOT_IMPV11_NODATA;"\
                                        +"TOT_MIRAD_2012;"\
                                        +"TOT_MIRAD_2012_NODATA;")

            args = parser.parse_args()
            projectID = args.projectID
            if projectID == '#' or not projectID:
                raise Exception('Input Study Area required')

            config = Config(
                json.load(
                    open(os.path.join(os.path.dirname(__file__),
                                      'config.json'))))
            workingDir = Shared.GetWorkspaceDirectory(
                config["workingdirectory"], projectID)

            WiMLogging.init(os.path.join(workingDir, "Temp"), "gage.log")
            WiMLogging.sm("Starting routine")

            params = args.parameters.split(";") if (
                args.parameters) else config["characteristics"].keys()

            file = Shared.readCSVFile(args.file)
            headers = file[0]
            if "Gage_no" in headers: idindex = headers.index("Gage_no")
            if "Gage_name" in headers: nmindex = headers.index("Gage_name")
            if "COMID" in headers: comIDindex = headers.index("COMID")
            if "Lat_snap" in headers: latindex = headers.index("Lat_snap")
            if "Long_snap" in headers: longindex = headers.index("Long_snap")
            #strip the header line
            file.pop(0)
            header = []
            header.append("-+-+-+-+-+-+-+-+-+ NEW RUN -+-+-+-+-+-+-+-+-+")
            header.append("Execute Date: " + str(datetime.date.today()))
            header.append("-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+")
            header.append(",".join(
                ['COMID', 'WorkspaceID', 'Description', 'LAT', 'LONG'] +
                self._formatRow(params)))

            Shared.writeToFile(os.path.join(workingDir, config["outputFile"]),
                               header)

            with FederalHighwayWrapper(workingDir, projectID) as fh:
                for station in file:
                    results = {'Values': [{}]}
                    try:
                        g = gage.gage(station[idindex], station[comIDindex],
                                      station[latindex], station[longindex],
                                      args.outwkid,
                                      station[nmindex].replace(",", " "))
                        results = fh.Run(g, params)

                        if results is None: results = {'Values': [{}]}
                        Shared.appendLineToFile(
                            os.path.join(workingDir, config["outputFile"]),
                            ",".join(
                                str(v) for v in [
                                    g.comid, fh.workspaceID,
                                    results.Description, g.lat, g.long
                                ] + self._formatRow(results.Values, params)))
                    except:
                        tb = traceback.format_exc()
                        WiMLogging.sm("error computing gage " + g.id + " " +
                                      tb)
                        continue
                    finally:
                        #ensure gc has collected before next gage
                        gc.collect()
                #next station
            #endwith
        except:
            tb = traceback.format_exc()
            WiMLogging.sm("error running " + tb)