コード例 #1
0
 def test_getFeatureCollectionGeoType_all(self):
     testpyGDP = pyGDP.pyGDPwebProcessing()
     
     testPyGDP = pyGDP.pyGDPwebProcessing()
     
     shapefile  = 'sample:CONUS_states'  
     
     attribute = 'STATE'
     
     value = None
     
     testFeatureCollection = testPyGDP._getFeatureCollectionGeoType(shapefile,attribute,value)
     
     assert_equal(testFeatureCollection.query.filters, [])
コード例 #2
0
    def test_getFeatureCollectionGeoType_all(self):
        testpyGDP = pyGDP.pyGDPwebProcessing()

        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'

        attribute = 'STATE'

        value = None

        testFeatureCollection = testPyGDP._getFeatureCollectionGeoType(
            shapefile, attribute, value)

        assert_equal(testFeatureCollection.query.filters, [])
コード例 #3
0
    def test_submit_FWGS_multi_stat_var_named(self):
        pyGDP.WPS_URL = 'http://cida.usgs.gov/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'
        shapefileAttribute = 'STATE'
        attributeValue = 'Wisconsin'
        datasetURI = 'http://cida.usgs.gov/thredds/dodsC/prism'  # Note that this test also tests the http to dods conversion for urls.
        dataType = ['ppt', 'tmx']
        Coverage = 'true'
        Delim = 'COMMA'
        stats = ['MEAN', 'STD_DEV']
        timeStart = '1900-01-01T00:00:00.000Z'
        timeEnd = '1900-03-01T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(
            geoType=shapefile,
            dataSetURI=datasetURI,
            varID=dataType,
            startTime=timeStart,
            endTime=timeEnd,
            attribute=shapefileAttribute,
            value=attributeValue,
            gmlIDs=None,
            verbose=False,
            coverage=Coverage,
            delim=Delim,
            stat=stats,
            grpby='STATISTIC',
            timeStep='false',
            summAttr='false')

        assert_equal(os.path.getsize(outputFile_handle), 375)
コード例 #4
0
    def test_get_dataset_parameters(self):
        DATASET_URI = "dods://cida.usgs.gov/qa/thredds/dodsC/prism"
        testPyGDP = pyGDP.pyGDPwebProcessing()

        datatypes = testPyGDP.getDataType(DATASET_URI, True)

        assert_equal(len(datatypes), 3)

        assert "ppt" in datatypes
コード例 #5
0
    def test_submit_FWGS_multi_stat_var_named(self):
        pyGDP.WPS_URL='http://cida.usgs.gov/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile  = 'sample:CONUS_states'
        shapefileAttribute  = 'STATE'
        attributeValue 	   = 'Wisconsin'
        datasetURI = 'http://cida.usgs.gov/thredds/dodsC/prism' # Note that this test also tests the http to dods conversion for urls.
        dataType   = ['ppt','tmx']
        Coverage='true'
        Delim='COMMA'
        stats      = ['MEAN','STD_DEV']
        timeStart  = '1900-01-01T00:00:00.000Z'
        timeEnd    = '1900-03-01T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(geoType=shapefile, dataSetURI=datasetURI, varID=dataType, startTime=timeStart, endTime=timeEnd, attribute=shapefileAttribute, value=attributeValue, gmlIDs=None, verbose=False, coverage=Coverage, delim=Delim, stat=stats, grpby='STATISTIC', timeStep='false', summAttr='false')

        assert_equal(os.path.getsize(outputFile_handle), 375)
        
    # def test_submit_FWGS_no_time(self):
    #     pyGDP.WPS_URL='http://cida.usgs.gov/gdp/process/WebProcessingService'
    #     pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
    #     testPyGDP = pyGDP.pyGDPwebProcessing()
    # 
    #     shapefile  = 'sample:simplified_HUC8s'
    #     shapefileAttribute  = 'HUC_8'
    #     attributeValue       = '08010211'
    #     datasetURI = 'http://raster.nationalmap.gov/ArcGIS/services/TNM_LandCover/MapServer/WCSServer' # Note that this test also tests the http to dods conversion for urls.
    #     dataType   = '6'
    #     Coverage='true'
    #     Delim='COMMA'
    #     stats      = ['MEAN','STD_DEV']
    #     timeStart  = None
    #     timeEnd    = None

    #     outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(geoType=shapefile, dataSetURI=datasetURI, varID=dataType, startTime=timeStart, endTime=timeEnd, attribute=shapefileAttribute, value=attributeValue, gmlIDs=None, verbose=False, coverage=Coverage, delim=Delim, stat=stats, grpby='STATISTIC', timeStep='false', summAttr='false')
    # 
    #     assert_equal(os.path.getsize(outputFile_handle), 58)
    # 
    # def test_submit_FWGS_arc(self):
    #       pyGDP.WPS_URL='http://cida.usgs.gov/gdp/process/WebProcessingService'
    #       pyGDP.WFS_URL = 'http://www.sciencebase.gov/arcgis/services/GeospatialFabric/GeospatialFabric/MapServer/WFSServer'
    #       testPyGDP = pyGDP.pyGDPwebProcessing()
    # 
    #       shapefile  = 'GeospatialFabric_mows_mapping:NHDPlus_Catchment'
    #       attribute  = 'hru_id'
    #       value='99'
    #       datasetURI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
    #       dataType   = 'ppt'
    #       timeStart  = '1900-01-01T00:00:00.000Z'
    #       timeEnd    = '1900-02-01T00:00:00.000Z'
    # 
    #       outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(shapefile, datasetURI, dataType, timeStart, timeEnd, attribute, value, coverage=False)
    # 
    #       assert_equal(os.path.getsize(outputFile_handle), 95)
コード例 #6
0
    def test_get_shapefile_attributes(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile  = 'sample:CONUS_states'

        attributes = testPyGDP.getAttributes(shapefile)
        
        assert_equal(len(attributes), 10)
        
        assert('STATE' in attributes)
コード例 #7
0
    def test_get_shapefile_attributes_arc(self):
        pyGDP.WFS_URL = 'http://www.sciencebase.gov/arcgis/services/GeospatialFabric/GeospatialFabric/MapServer/WFSServer'
        
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile  = 'GeospatialFabric_mows_mapping:NHDPlus_Catchment'

        attributes = testPyGDP.getAttributes(shapefile)

        assert('hru_id' in attributes)
コード例 #8
0
    def test_get_uri(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()
        dataseturis = testPyGDP.getDataSetURI(anyText='prism')
        uris = []
        for dat in dataseturis:
            for uri in dat[2]:
                uris.append(uri)

        assert_equal(len(dataseturis), 3)
        assert_equal('dods://cida.usgs.gov/thredds/dodsC/prism' in uris, True)
コード例 #9
0
    def test_get_uri(self):
	testPyGDP = pyGDP.pyGDPwebProcessing()
	dataseturis=testPyGDP.getDataSetURI(anyText='prism')
	uris=[]
	for dat in dataseturis:
	    for uri in dat[2]:
	        uris.append(uri)
	
	assert_equal(len(dataseturis), 3)
	assert_equal('dods://cida.usgs.gov/thredds/dodsC/prism' in uris, True)
コード例 #10
0
    def test_get_shapefile_attributes(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'

        attributes = testPyGDP.getAttributes(shapefile)

        assert_equal(len(attributes), 10)

        assert ('STATE' in attributes)
コード例 #11
0
    def test_get_shapefile_attributes_arc(self):
        pyGDP.WFS_URL = 'http://www.sciencebase.gov/arcgis/services/GeospatialFabric/GeospatialFabric/MapServer/WFSServer'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'GeospatialFabric_mows_mapping:NHDPlus_Catchment'

        attributes = testPyGDP.getAttributes(shapefile)

        assert ('hru_id' in attributes)
コード例 #12
0
    def test_get_time_range(self):
        DATASET_URI = "dods://cida.usgs.gov/qa/thredds/dodsC/prism"
        testPyGDP = pyGDP.pyGDPwebProcessing()

        datatype = "ppt"
        trange = testPyGDP.getTimeRange(DATASET_URI, datatype)

        assert_equal(len(trange), 2)

        assert_equal(trange[0], "1895-01-01T00:00:00Z")
コード例 #13
0
    def test_submit_WCSIntersection(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()
        
	shapefile = 'sample:simplified_HUC8s'
	attribute = 'SUBBASIN'
	value = 'Alafia'
	dataSetURI = 'http://raster.nationalmap.gov/ArcGIS/services/TNM_LandCover/MapServer/WCSServer'
	dataType = '6'

	outputFile_handle = testPyGDP.submitFeatureCoverageWCSIntersection(shapefile, dataSetURI, dataType, attribute, value, verbose=True)

	assert_equal(os.path.getsize(outputFile_handle), 1918261)
コード例 #14
0
    def test_get_dataset_parameters(self):
        pyGDP.WPS_Service = 'http://cida.usgs.gov/gdp/utility/WebProcessingService'

        DATASET_URI = 'dods://cida.usgs.gov/thredds/dodsC/prism'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        datatypes = testPyGDP.getDataType(DATASET_URI, True)

        assert_equal(len(datatypes), 3)

        assert ('ppt' in datatypes)
コード例 #15
0
    def test_get_shapefile_values(self):
	testPyGDP = pyGDP.pyGDPwebProcessing()
	
	shapefile  = 'sample:CONUS_States'  
	
	attribute = 'STATE'
	
	values    = testPyGDP.getValues(shapefile,attribute)
	
	assert_equal(len(values), 49)

	assert('Wisconsin' in values)
コード例 #16
0
 def test_get_dataset_parameters(self):
     pyGDP.WPS_Service= 'http://cida.usgs.gov/qa/climate/gdp/utility/WebProcessingService'
     
 	DATASET_URI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
     
     testPyGDP = pyGDP.pyGDPwebProcessing()
     
 	datatypes = testPyGDP.getDataType(DATASET_URI, True)
     
 	assert_equal(len(datatypes), 3)
     
 	assert('ppt' in datatypes)
コード例 #17
0
    def test_submit_WCSIntersection(self):
        pyGDP.WPS_URL='http://cida.usgs.gov/qa/climate/gdp/process/WebProcessingService'
        testPyGDP = pyGDP.pyGDPwebProcessing()
        
    	shapefile = 'sample:simplified_HUC8s'
    	attribute = 'HUC_8'
    	value = '08010211'
    	dataSetURI = 'http://raster.nationalmap.gov/ArcGIS/services/TNM_LandCover/MapServer/WCSServer'
    	dataType = '6'

    	outputFile_handle = testPyGDP.submitFeatureCoverageWCSIntersection(shapefile, dataSetURI, dataType, attribute, value)

    	assert_equal(os.path.getsize(outputFile_handle), 1574029)
コード例 #18
0
 def test_get_time_range(self):
     pyGDP.WPS_Service= 'http://cida.usgs.gov/qa/climate/gdp/utility/WebProcessingService'
 	
 	DATASET_URI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
     
     testPyGDP = pyGDP.pyGDPwebProcessing()
     
 	datatype = 'ppt'
 	
 	trange = testPyGDP.getTimeRange(DATASET_URI, datatype)
     
 	assert_equal(len(trange), 2)
     
 	assert_equal(trange[0], '1895-01-01T00:00:00Z')
コード例 #19
0
    def test_get_shapefile_values_arc(self):
        pyGDP.WFS_URL = 'http://www.sciencebase.gov/arcgis/services/GeospatialFabric/GeospatialFabric/MapServer/WFSServer'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'GeospatialFabric_mows_mapping:NHDPlus_Catchment'

        attribute = 'hru_id'

        values = testPyGDP.getValues(shapefile, attribute, limitFeatures=100)

        assert_equal(len(values), 100)

        assert ('100' in values)
コード例 #20
0
    def test_get_shapefile_values(self):
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'

        attribute = 'STATE'

        values = testPyGDP.getValues(shapefile, attribute)

        assert_equal(len(values), 49)

        assert ('Wisconsin' in values)
コード例 #21
0
 def test_getFeatureCollectionGeoType_single(self):
     pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
     
     testPyGDP = pyGDP.pyGDPwebProcessing()
     
     shapefile  = 'sample:CONUS_states'  
     
     attribute = 'STATE'
     
     value = 'Wisconsin'
     
     testFeatureCollection = testPyGDP._getFeatureCollectionGeoType(shapefile,attribute,value)
     
     assert_equal(len(testFeatureCollection.query.filters), 36)
コード例 #22
0
    def test_get_shapefile_values_arc(self):
        pyGDP.WFS_URL = 'http://www.sciencebase.gov/arcgis/services/GeospatialFabric/GeospatialFabric/MapServer/WFSServer'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile='GeospatialFabric_mows_mapping:NHDPlus_Catchment'
        
        attribute='hru_id'

        values    = testPyGDP.getValues(shapefile,attribute,limitFeatures=100)

        assert_equal(len(values), 100)
        
        assert('100' in values)
コード例 #23
0
    def test_get_shapefile_values(self):
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        
        testPyGDP = pyGDP.pyGDPwebProcessing()
        
        shapefile  = 'sample:CONUS_states'  
	
        attribute = 'STATE'
	
        values    = testPyGDP.getValues(shapefile,attribute)
	
        assert_equal(len(values), 49)
        
        assert('Wisconsin' in values)
コード例 #24
0
    def test_get_time_range(self):
        pyGDP.WPS_Service = 'http://cida.usgs.gov/gdp/utility/WebProcessingService'

        DATASET_URI = 'dods://cida.usgs.gov/thredds/dodsC/prism'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        datatype = 'ppt'

        trange = testPyGDP.getTimeRange(DATASET_URI, datatype)

        assert_equal(len(trange), 2)

        assert_equal(trange[0], '1895-01-01T00:00:00Z')
コード例 #25
0
    def test_submit_FWGS(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()
        
	shapefile  = 'sample:CONUS_States'
	attribute  = 'STATE'
	value 	   = 'Wisconsin'
	userPoly   = [(-102.8184, 39.5273), (-102.8184, 37.418), (-101.2363, 37.418), (-101.2363,39.5273), (-102.8184, 39.5273)]
	datasetURI = 'dods://cida.usgs.gov/qa/thredds/dodsC/prism'
	dataType   = 'ppt'
	timeStart  = '1900-01-01T00:00:00.000Z'
	timeEnd    = '1950-01-01T00:00:00.000Z'
	 
	outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(shapefile, datasetURI, dataType, timeStart, timeEnd, attribute, value)
  
	assert_equal(os.path.getsize(outputFile_handle), 18416)
コード例 #26
0
    def test_getFeatureCollectionGeoType_single(self):
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'

        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'

        attribute = 'STATE'

        value = 'Wisconsin'

        testFeatureCollection = testPyGDP._getFeatureCollectionGeoType(
            shapefile, attribute, value)

        assert_equal(len(testFeatureCollection.query.filters), 36)
コード例 #27
0
    def test_submit_FWGS_arc(self):
          pyGDP.WPS_URL='http://cida.usgs.gov/qa/climate/gdp/process/WebProcessingService'
          pyGDP.WFS_URL = 'http://www.sciencebase.gov/arcgis/services/GeospatialFabric/mows_mapping/MapServer/WFSServer'
          testPyGDP = pyGDP.pyGDPwebProcessing()

          shapefile  = 'GeospatialFabric_mows_mapping:NHDPlus_Catchment'
          attribute  = 'hru_id'
          value='99'
          datasetURI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
          dataType   = 'ppt'
          timeStart  = '1900-01-01T00:00:00.000Z'
          timeEnd    = '1900-02-01T00:00:00.000Z'

          outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(shapefile, datasetURI, dataType, timeStart, timeEnd, attribute, value, coverage=False)

          assert_equal(os.path.getsize(outputFile_handle), 95)
コード例 #28
0
    def test_submit_FCGC(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()
        
	shapefile = 'sample:CONUS_States'
	attribute = 'STATE'
	value = 'Rhode Island'
	dataSetURI = 'http://cida.usgs.gov/ArcGIS/services/statsgo_muid/MapServer/WCSServer'
	dataType = '1'

	outputFile_handle = testPyGDP.submitFeatureCategoricalGridCoverage(shapefile, dataSetURI, dataType, attribute, value, verbose=True)
  
	# This test is not currently working because what comes from
	# testPyGDP.submitFeatureCategoricalGridCoverage() is a NoneType
	# even through I've verified that it consistently writes a file
	# of the size below. I expect a string to come back from this
	# function
	assert_equal(os.path.getsize(outputFile_handle), 650)
コード例 #29
0
ファイル: pyGDP_client.py プロジェクト: prog556/pyGDP
def main():
    gdp = pyGDP.pyGDPwebProcessing()
    sfiles = gdp.getShapefiles()
    for s in sfiles:
        print s

    shapefile = 'sample:CONUS_States'

    print
    print 'Get Attributes:'
    attributes = gdp.getAttributes(shapefile)
    for a in attributes:
        print a

    print
    print 'Get values:'
    values = gdp.getValues(shapefile, 'STATE')
    for v in values:
        print v

    print
    print 'Getting available Dataset URIS'
    datasetURIs = gdp.getDataSetURI()
    dataSetURI = getInput(datasetURIs)

    print ''
    print 'Getting available dataTypes'
    dataTypes = gdp.getDataType(dataSetURI)
    dataType = getInput(dataTypes)

    print
    print 'Getting time range from dataset'

    timeRange = gdp.getTimeRange(dataSetURI, dataType)
    for i in timeRange:
        print i

    print
    print 'Submitting request'
    output = gdp.submitFeatureWeightedGridStatistics(shapefile, dataSetURI,
                                                     dataType, timeRange[0],
                                                     timeRange[0], 'STATE',
                                                     'Wisconsin')
    print output
コード例 #30
0
    def test_submit_FCOD(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()
        
	shapefile = 'sample:CONUS_States'
	attribute = 'STATE'
	value = 'Alabama'
	dataSetURI = 'dods://cida.usgs.gov/thredds/dodsC/gmo/GMO_w_meta.ncml'
	dataType = 'Prcp'
	timeStart = '1950-01-01T00:00:00.000Z'
	timeEnd = '1951-01-31T00:00:00.000Z'

	outputFile_handle = testPyGDP.submitFeatureCoverageOPenDAP(shapefile, dataSetURI, dataType, timeStart, timeEnd, attribute, value, verbose=True)

	# This test is not currently working because what comes from 
	# testPyGDP.submitFeatureCoverageOPenDAP() is a NoneType
	# even though I've verified that it constistently writes a 
	# file of the size below. I expect a string to come back from 
	# this function
	assert_equal(os.path.getsize(outputFile_handle), 2067840)
コード例 #31
0
    def test_submit_FWGS_no_time(self):
        pyGDP.WPS_URL='http://cida.usgs.gov/qa/climate/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile  = 'sample:simplified_HUC8s'
        shapefileAttribute  = 'HUC_8'
        attributeValue 	   = '08010211'
        datasetURI = 'http://raster.nationalmap.gov/ArcGIS/services/TNM_LandCover/MapServer/WCSServer' # Note that this test also tests the http to dods conversion for urls.
        dataType   = '6'
        Coverage='true'
        Delim='COMMA'
        stats      = ['MEAN','STD_DEV']
        timeStart  = None
        timeEnd    = None

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(geoType=shapefile, dataSetURI=datasetURI, varID=dataType, startTime=timeStart, endTime=timeEnd, attribute=shapefileAttribute, value=attributeValue, gmlIDs=None, verbose=False, coverage=Coverage, delim=Delim, stat=stats, grpby='STATISTIC', timeStep='false', summAttr='false')

        assert_equal(os.path.getsize(outputFile_handle), 57)
コード例 #32
0
    def test_submit_FWGS_multi_stat_var_named(self):
        pyGDP.WPS_URL='http://cida.usgs.gov/qa/climate/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile  = 'sample:CONUS_States'
        shapefileAttribute  = 'STATE'
        attributeValue 	   = 'Wisconsin'
        datasetURI = 'http://cida.usgs.gov/thredds/dodsC/prism' # Note that this test also tests the http to dods conversion for urls.
        dataType   = ['ppt','tmx']
        Coverage='true'
        Delim='COMMA'
        stats      = ['MEAN','STD_DEV']
        timeStart  = '1900-01-01T00:00:00.000Z'
        timeEnd    = '1900-03-01T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(geoType=shapefile, dataSetURI=datasetURI, varID=dataType, startTime=timeStart, endTime=timeEnd, attribute=shapefileAttribute, value=attributeValue, gmlIDs=None, verbose=False, coverage=Coverage, delim=Delim, stat=stats, grpby='STATISTIC', timeStep='false', summAttr='false')

        assert_equal(os.path.getsize(outputFile_handle), 375)
コード例 #33
0
    def test_submit_FCGC(self):
        pyGDP.WPS_URL = 'http://cida.usgs.gov/gdp/process/WebProcessingService'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'
        attribute = 'STATE'
        value = 'Rhode Island'
        dataSetURI = 'http://cida.usgs.gov/ArcGIS/services/statsgo_muid/MapServer/WCSServer'
        dataType = '1'

        outputFile_handle = testPyGDP.submitFeatureCategoricalGridCoverage(
            shapefile, dataSetURI, dataType, attribute, value, verbose=True)

        # This test is not currently working because what comes from
        # testPyGDP.submitFeatureCategoricalGridCoverage() is a NoneType
        # even through I've verified that it consistently writes a file
        # of the size below. I expect a string to come back from this
        # function
        assert_equal(os.path.getsize(outputFile_handle), 650)
コード例 #34
0
ファイル: pyGDP_client.py プロジェクト: prog556/pyGDP
def main():
    gdp = pyGDP.pyGDPwebProcessing()
    sfiles = gdp.getShapefiles()
    for s in sfiles:
        print s
    
    shapefile = 'sample:CONUS_States'
    
    print
    print 'Get Attributes:'
    attributes = gdp.getAttributes(shapefile)
    for a in attributes:
        print a
    
    print
    print 'Get values:'
    values = gdp.getValues(shapefile, 'STATE')
    for v in values:
        print v
        
    print
    print 'Getting available Dataset URIS'
    datasetURIs = gdp.getDataSetURI()
    dataSetURI = getInput(datasetURIs)
    
    print ''
    print 'Getting available dataTypes'
    dataTypes = gdp.getDataType(dataSetURI)
    dataType = getInput(dataTypes)
    
    print 
    print 'Getting time range from dataset'
    
    timeRange = gdp.getTimeRange(dataSetURI, dataType)
    for i in timeRange:
        print i
    
    print
    print 'Submitting request'
    output = gdp.submitFeatureWeightedGridStatistics(shapefile, dataSetURI, dataType, timeRange[0], timeRange[0], 'STATE', 'Wisconsin')
    print output
コード例 #35
0
    def test_submit_FWGS(self):
        pyGDP.WPS_URL = 'http://cida.usgs.gov/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'
        attribute = 'STATE'
        value = 'Wisconsin'
        userPoly = [(-102.8184, 39.5273), (-102.8184, 37.418),
                    (-101.2363, 37.418), (-101.2363, 39.5273),
                    (-102.8184, 39.5273)]
        datasetURI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
        dataType = 'ppt'
        timeStart = '1900-01-01T00:00:00.000Z'
        timeEnd = '1900-03-01T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(
            shapefile, datasetURI, dataType, timeStart, timeEnd, attribute,
            value)

        assert_equal(os.path.getsize(outputFile_handle), 133)
コード例 #36
0
    def test_submit_FWGS_multi_stat_var(self):
        pyGDP.WPS_URL='http://cida.usgs.gov/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile  = 'sample:CONUS_states'
        attribute  = 'STATE'
        value 	   = 'Wisconsin'
        datasetURI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
        dataType   = ['ppt','tmx']
        gmlIDs=None
        verbose=False
        coverage='true'
        delim='COMMA'
        stats      = ['MEAN','STD_DEV']
        timeStart  = '1900-01-01T00:00:00.000Z'
        timeEnd    = '1900-03-01T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(shapefile, datasetURI, dataType, timeStart, timeEnd, attribute, value, gmlIDs, verbose, coverage, delim, stats)

        assert_equal(os.path.getsize(outputFile_handle), 375)
コード例 #37
0
    def test_submit_FCOD(self):
        pyGDP.WPS_URL = 'http://cida.usgs.gov/gdp/process/WebProcessingService'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'
        attribute = 'STATE'
        value = 'Alabama'
        dataSetURI = 'dods://cida.usgs.gov/thredds/dodsC/gmo/GMO_w_meta.ncml'
        dataType = 'Prcp'
        timeStart = '1950-01-01T00:00:00.000Z'
        timeEnd = '1950-01-02T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureCoverageOPenDAP(
            shapefile, dataSetURI, dataType, timeStart, timeEnd, attribute,
            value)

        # This test is not currently working because what comes from
        # testPyGDP.submitFeatureCoverageOPenDAP() is a NoneType
        # even though I've verified that it constistently writes a
        # file of the size below. I expect a string to come back from
        # this function
        assert_equal(os.path.getsize(outputFile_handle), 14312)
コード例 #38
0
ファイル: pyGDP_client.py プロジェクト: cameronbracken/pyGDP
def main():
    gdp = pyGDP.pyGDPwebProcessing()
    sfiles = gdp.getShapefiles()
    for s in sfiles:
        print s
    
    shapefile = getInput(sfiles)
    
    print
    print 'Get Attributes:'
    attributes = gdp.getAttributes(shapefile)
    attribute = getInput(attributes)
	
    print
    print 'Get values:'
    values = gdp.getValues(shapefile, attribute)    
    value = getInput_2(values)

    print
    searchString = getInput_4()
    datasetURIs = gdp.getDataSetURI(anyText=searchString)
    dataSetURI = getInput_3(datasetURIs)
    
    print ''
    print 'Getting available dataTypes'
    dataTypes = gdp.getDataType(dataSetURI)
    dataType = getInput(dataTypes)
    
    print 
    print 'Getting time range from dataset'
    
    timeRange = gdp.getTimeRange(dataSetURI, dataType)
    for i in timeRange:
        print i
    
    print
    print 'Submitting request'
    output = gdp.submitFeatureWeightedGridStatistics(shapefile, dataSetURI, dataType, timeRange[0], timeRange[0], attribute, value, verbose=True)
    print output
コード例 #39
0
    def test_submit_FWGS_multi_stat_var(self):
        pyGDP.WPS_URL = 'http://cida.usgs.gov/gdp/process/WebProcessingService'
        pyGDP.WFS_URL = 'http://cida.usgs.gov/gdp/geoserver/wfs'
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefile = 'sample:CONUS_states'
        attribute = 'STATE'
        value = 'Wisconsin'
        datasetURI = 'dods://cida.usgs.gov/thredds/dodsC/prism'
        dataType = ['ppt', 'tmx']
        gmlIDs = None
        verbose = False
        coverage = 'true'
        delim = 'COMMA'
        stats = ['MEAN', 'STD_DEV']
        timeStart = '1900-01-01T00:00:00.000Z'
        timeEnd = '1900-03-01T00:00:00.000Z'

        outputFile_handle = testPyGDP.submitFeatureWeightedGridStatistics(
            shapefile, datasetURI, dataType, timeStart, timeEnd, attribute,
            value, gmlIDs, verbose, coverage, delim, stats)

        assert_equal(os.path.getsize(outputFile_handle), 375)
コード例 #40
0
def main_func(curdir, set, region):
    
    def Region_lookup(region):
        region_properties = []
        if region == 'nhru':
            region_properties.append(371)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d35da1e4b0ca184833940c')
            region_properties.append('false')
        if region == 'R01':
            region_properties.append(2462)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5244735ae4b05b217bada04e')
            region_properties.append('false')
        if region == 'R02':
            region_properties.append(4827)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52696784e4b0584cbe9168ee')
            region_properties.append('false')
        if region == 'R03':
            region_properties.append(9899)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5283bd23e4b047efbbb57922')
            region_properties.append('false')
        if region == 'R04':
            region_properties.append(5936)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5284ff57e4b063f258e61b9d')
            region_properties.append('false')
        if region == 'R05':
            region_properties.append(7182)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/528516bbe4b063f258e62161')
            region_properties.append('false')
        if region == 'R06':
            region_properties.append(2303)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d75296e4b055e0afd5be2c')
            region_properties.append('true')
        if region == 'R07':
            region_properties.append(8205)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52851cd5e4b063f258e643dd')
            region_properties.append('true')
        if region == 'R08':
            region_properties.append(4449)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52854695e4b063f258e6513c')
            region_properties.append('true')
        if region == 'R10L':
            region_properties.append(8603)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/520031dee4b0ad2d97189db2')
            region_properties.append('true')
        elif region =='R10U':
            region_properties.append(10299)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5216849ee4b0b45d6ba61e2e')
            region_properties.append('false')
        elif region == 'R11':
            region_properties.append(7373)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d1f9ebe4b08b18a62d586b')
            region_properties.append('true')
        elif region == 'R12':
            region_properties.append(7815)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5201328ae4b009d47a4c247a')
            region_properties.append('false')
        elif region == 'R13':
            region_properties.append(1958)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d752b9e4b055e0afd5be36')
            region_properties.append('false')
        elif region == 'R14':
            region_properties.append(3879)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52029c68e4b0e21cafa4b40c')
            region_properties.append('false')
        elif region == 'R15':
            region_properties.append(3441)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5285389ae4b063f258e64863')
            region_properties.append('false')
        elif region == 'R16':
            region_properties.append(2664)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52853f97e4b063f258e64875')
            region_properties.append('false')

            return region_properties
    
    def list_define(set):

        if set == 'cccma_cgmc3_1':
            return ['cccma_cgmc3'+str(scenario), 'http://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'cccma_cgcm3_1-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'cccma_cgcm3_1-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'cccma_cgcm3_1-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'cnrm_cm3':
            return ['cnrm_cm3'+str(scenario), 'http://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
             'cnrm_cm3-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
             'cnrm_cm3-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
             'cnrm_cm3-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'gfdl_cm2_1':
            return ['gfdl_cm2_1'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'gfdl_cm2_1-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'gfdl_cm2_1-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'gfdl_cm2_1-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'ipsl_cm4':
            return ['ipsl_cm4'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'ipsl_cm4-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'ipsl_cm4-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'ipsl_cm4-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'miroc3_2_medres':
            return ['miroc3_2_medres'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'miroc3_2_medres-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'miroc3_2_medres-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'miroc3_2_medres-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'miub_echo_g':
            return ['miub_echo_g'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'miub_echo_g-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'miub_echo_g-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'miub_echo_g-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'mri_cgcm2_3_2a':
            return ['mri_cgcm2_3_2a'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'mri_cgcm2_3_2a-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'mri_cgcm2_3_2a-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'mri_cgcm2_3_2a-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
        elif set == 'mpi_echam5':
            return ['mpi_echam5'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
                 'mpi_echam5-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
                 'mpi_echam5-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
                 'mpi_echam5-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
     
    global csvfile, vt_dataset, nhru, length, vt_datatype, url
    import pyGDP
    
    Region_return=Region_lookup(region)
    hrus = Region_return[0]
    nhru = hrus
    ScienceBase_URL= Region_return[1]
     
    pyGDP.WFS_URL = ScienceBase_URL
    url = pyGDP.WFS_URL
    pyGDP = pyGDP.pyGDPwebProcessing()
     
    # change working directory so the GDP output will be written there
     
    # Datasets and their properties
    # run 1, 2 , and 3
     
    #**********************************
    # run 1 only
    #scenario = 'a2'
    # other scenarios are 'a1b' and 'b1'
    #scenarios = ['a2','a1b','b1']
    scenarios = ['a2', 'a1b']
     
    timestart = '2046-01-15T00:00:00.000Z'
    timeend='2100-12-15T00:00:00.000Z'
     
    #**********************************
    # Datasets for each scenario - note that mpi_echam5 is not run for scenario a1b
    #a1b
    #data=[cccma_cgmc3_1,cnrm_cm3,gfdl_cm2_1,ipsl_cm4,miroc3_2_medres,miub_echo_g,mri_cgcm2_3_2a]
    #a2
    #data=[cccma_cgmc3_1,cnrm_cm3,gfdl_cm2_1,ipsl_cm4,miroc3_2_medres,miub_echo_g,mpi_echam5,mri_cgcm2_3_2a]
    #b1
    #data=[cccma_cgmc3_1,cnrm_cm3,gfdl_cm2_1,ipsl_cm4,miroc3_2_medres,miub_echo_g,mpi_echam5,mri_cgcm2_3_2a]
     
    
    # get list of shapefiles uploaded to the GDP
    shapefiles = pyGDP.getShapefiles()
    for shp in shapefiles:
        print shp
     
    # feature loaded from sciencebase
    #should shapefile be sb:SP_hru instead?
    shapefile = 'sb:nhru'
    user_attribute = 'hru_id_loc'
     
    user_value = None
     
    os.chdir(curdir)
    dir = os.getcwd()
     
    vt_data = list_define(set)
    vt_datatype = vt_data[5:] 
    
    #for scenario in scenarios:  
         
#     cnrm_cm3 = ['cnrm_cm3'+str(scenario), 'http://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#          'cnrm_cm3-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#          'cnrm_cm3-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#          'cnrm_cm3-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     gfdl_cm2_1 = ['gfdl_cm2_1'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'gfdl_cm2_1-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'gfdl_cm2_1-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'gfdl_cm2_1-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     ipsl_cm4 = ['ipsl_cm4'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'ipsl_cm4-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'ipsl_cm4-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'ipsl_cm4-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     mpi_echam5 = ['mpi_echam5'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'mpi_echam5-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'mpi_echam5-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'mpi_echam5-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     miroc3_2_medres = ['miroc3_2_medres'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'miroc3_2_medres-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'miroc3_2_medres-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'miroc3_2_medres-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     cccma_cgmc3_1 = ['cccma_cgmc3'+str(scenario), 'http://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'cccma_cgcm3_1-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'cccma_cgcm3_1-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'cccma_cgcm3_1-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     miub_echo_g = ['miub_echo_g'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'miub_echo_g-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'miub_echo_g-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'miub_echo_g-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
#      
#     mri_cgcm2_3_2a = ['mri_cgcm2_3_2a'+str(scenario), 'dods://pcmdi8.llnl.gov/thredds/dodsC/bcca/BCCA_0.125deg-gregorian-sres-monthly', 'Monthly', timestart, timeend,\
#              'mri_cgcm2_3_2a-gregorian-sres'+str(scenario)+'-run1-pr-BCCA_0-125deg-monthly',
#              'mri_cgcm2_3_2a-gregorian-sres'+str(scenario)+'-run1-tasmax-BCCA_0-125deg-monthly',
#              'mri_cgcm2_3_2a-gregorian-sres'+str(scenario)+'-run1-tasmin-BCCA_0-125deg-monthly']
     
     
#         if scenario == 'a1b':
#             data=[cccma_cgmc3_1,cnrm_cm3,gfdl_cm2_1,ipsl_cm4,miroc3_2_medres,miub_echo_g,mri_cgcm2_3_2a]
#         elif scenario == 'a2':
#                 data=[cccma_cgmc3_1,cnrm_cm3,gfdl_cm2_1,ipsl_cm4,miroc3_2_medres,miub_echo_g,mpi_echam5,mri_cgcm2_3_2a]
    
    timestart = time.time()
    
    
    #################
    #scenarios = ['a2', 'a1b']
    scenario = 'a2'
    
    
    
    
    #for dataset in data:
    #file_loc = dir+'\\'+scenario+'\\'+dataset[0]
    file_loc = dir+'\\'+set
    if not os.path.exists(file_loc):
        os.mkdir(file_loc)
    os.chdir(file_loc)
    #print "The current scenario is: " +scenario + "dataset being worked on is: " + dataset[0]
    print "The current scenario is: " +scenario + "dataset being worked on is: " + set

    #The url of each dataset
    dataSet = vt_data[1]
    #dataSet = dataset[1]

    #The precipitation and temperatures of each dataset
    #Start at position(not index) 5 until the end of the
    #dictionary's key(which is a list)
      
    dataType=vt_data[5:]  
    #dataType = dataset[5:]
        
    # daily or monthly for additional aggregation/formatting (not appended yet)
    timeStep = vt_data[2]
    length = timeStep
    #timeStep = dataset[2]

    #Start date
    timeBegin = vt_data[3]
    #timeBegin = dataset[3]
    #End date
    timeEnd = vt_data[4]
    #timeEnd = dataset[4]

    # data processing arguments
    gmlIDs=None
    verbose=True
    #coverage = 'false' check if on US border/ocean
    coverage = Region_return[2]
    delim='COMMA'
    stats='MEAN'

    # run the pyGDP
    start = time.time()
    outputPath = pyGDP.submitFeatureWeightedGridStatistics(shapefile, dataSet, dataType, timeBegin, timeEnd, user_attribute, user_value, gmlIDs, verbose, coverage, delim, stats)
    end = time.time()
    print 'Start time is: ' + str(start)
    print 'End time is: ' + str(end)
    print 'Total time was: ' + str(end-start)
    print outputPath
    # copy the output and rename it
    
#     ind = 5
#     for var in range(5, len(dataset)):
#         line = dataset[var].split('-')
#         dataset[ind] = line[4]
#         ind += 1
#         
#     dataType = dataset[5:]
    
    #shutil.copy2(outputPath, region+'_'+dataset[0]+'.csv')
    shutil.copy2(outputPath, region+'_'+vt_data[0]+'.csv')
    
#     csvfile = os.getcwd()+region+'_'+dataset[0]+'.csv'       
#     #Parse the csv file 
#     index = 0            
#     
#     csvread = csv.reader(open(region+'_'+dataset[0] + '.csv', "rb")) 
#     
#     csvwrite = csv.writer(open(dataType[0] + '.csv', "wb"))
#         
#     temp = csvread
#     var = temp.next()
#     var[0] = '#'+dataType[0]
#     #Gets gage ids
#     gage = temp.next()
#     
#     #Writes current variable to csv file
#     csvwrite.writerow(var)
#     #Writes all gage ids to csv file
#     csvwrite.writerow(gage)
#     
#     for variable in dataType:                
#             
#         for row in csvread:
#             #if on last variable     
#             if variable == dataType[len(dataType) - 1]: 
#                 csvwrite.writerow(row)               
#             else:  
#                 if (row[0] in '#'+dataType[index+1]) or (row[0] in '# '+dataType[index+1]):
#                     #Line 33 is used for titling the csv file the name of the variable (like tmin, ppt, or tmax)
#                     var = '#'+dataType[index+1]
#                     parsedFiles.append(os.getcwd()+'\\'+variable+'.csv')
#                     csvwrite = csv.writer(open(dataType[index+1] + '.csv', "wb"))
#                     row[1:] = ""
#                     row[0] = var
#                     csvwrite.writerow(row)
#                     csvwrite.writerow(gage)
#                     if len(dataType) == 2:
#                         csvread.next()
#                     else:
#                         csvread.next()
#                         csvwrite.writerow(csvread.next())
#                         csvwrite.writerow(csvread.next())
#                     break
#                 else:
#                     if dataType[index+1] not in row[0] and row[0] not in dataType[index+1]:
#                         csvwrite.writerow(row)
#         print "Finished parsing " + variable + ".csv"
#         parsedFiles.append(os.getcwd()+'\\'+variable+'.csv')
#         # use index to keep track of next variable
#         if (index + 1) < len(dataType):
#             index += 1
        
    timeend = time.time()
    print 'Start time of pyGDP: ' + str(timestart)
    print 'End time of pyGDP: ' + str(timeend)
    print 'Total time of pyGDP: ' + str(timeend-timestart)
    

    os.chdir(dir)
コード例 #41
0
def create_web_processing_object():
    new_web_processing = pyGDP.pyGDPwebProcessing()
    return new_web_processing
コード例 #42
0
    def test_get_shapefile_list(self):
        testPyGDP = pyGDP.pyGDPwebProcessing()

        shapefiles = testPyGDP.getShapefiles()

        assert_not_equal(len(shapefiles), 0)
コード例 #43
0
ファイル: pyGDP_client.py プロジェクト: timcera/pygdp
def main():
    # This instantiates a pyGDP web processing object. All other processes are done through the web processing
    # object.
    gdp = pyGDP.pyGDPwebProcessing()

    # Returns a list of shapefiles that are currently sitting on the GDP server.
    # It's possible to upload your own shapefile using the .uploadShapefile function.
    sfiles = gdp.getShapefiles()
    for s in sfiles:
        print(s)
    shapefile = get_input(sfiles)

    print()
    print('Get Attributes:')
    # Gets shapefile dbf attributes of the file you chose from the previous selection process.
    # A good example of levels of detail processed by on GDP.
    attributes = gdp.getAttributes(shapefile)
    attribute = get_input_1(attributes)

    print()
    print('Get values:')
    # Yet another level of detail down on the shapefile. This time it is values of an attribute or shapefile.
    # Does all the web processing necessary to show the user what they are working with.
    values = gdp.getValues(shapefile, attribute)
    value = get_input_2(values)

    print()
    # Allows the user to select a dods dataset for processing. The getDataSetURI function returns a lot of
    # metadata making it helpful to narrow down the search with anyText.
    search_string = getinput_4()
    dataset_uris = gdp.getDataSetURI(anyText=search_string)
    dataset_uri = get_input_3(dataset_uris)

    print()
    print('Getting available data_types... \n')
    # Gives a list of the available data types within the dods dataset for processing.
    data_types = gdp.getDataType(dataset_uri)
    data_type = get_input_5(data_types)

    print()
    print('Getting time range from dataset...')
    # This example only uses the first time range (see submitFeatureWeightedGridStatistics execution)
    time_range = gdp.getTimeRange(dataset_uri, data_type)
    for i in time_range:
        print(i)

    print()
    print('Submitting request...')
    # Time for some heavy web services processing. The submitFeatureWeightedGridStaistics is the end product of
    # all the variable choosing we have done up until this point. It takes a lot of inputs and sends them all
    # through a remote processing algorithm associated with GDP. The result is a file downloaded to the location
    # of the executing script, also returned is the URL associated with the download (it usually gives a csv file).
    output = gdp.submitFeatureWeightedGridStatistics(shapefile,
                                                     dataset_uri,
                                                     data_type,
                                                     time_range[0],
                                                     time_range[0],
                                                     attribute,
                                                     value,
                                                     verbose=True)
    print()
    print(output)
    print()

    print(
        "The resulting ouput file (which should now exist in the folder where "
        +
        "this example was executed) holds the Feature Weighted Grid Statistics "
        +
        "(just the mean value if you did the default options) of the area chose "
        +
        "from the 'value' shapefile (from GDP). \nFor more details refer to comments "
        + "in the main method of this example script.")
コード例 #44
0
from __future__ import print_function
import pyGDP

pyGDP = pyGDP.pyGDPwebProcessing()

"""
This example shows how to use multiple dataTypes and Statistics.

"""

shapefile = 'sample:simplified_huc8'
user_attribute = 'SUBBASIN'
user_value = 'Baraboo'
dataSet = 'dods://cida.usgs.gov/thredds/dodsC/gmo/GMO_w_meta.ncml'
dataType = ['Prcp', 'Tavg', 'Tmax', 'Tmin']
timeBegin = '1970-01-24T00:00:00.000Z'
timeEnd = '1970-01-25T00:00:00.000Z'
gmlIDs = None
verbose = True
coverage = 'true'
delim = 'COMMA'
stats = ['MEAN', 'STD_DEV']

print('Processing request.')
outputPath = pyGDP.submitFeatureWeightedGridStatistics(shapefile, dataSet, dataType, timeBegin, timeEnd,
                                                       user_attribute, user_value, gmlIDs, verbose,
                                                       coverage, delim, stats)
コード例 #45
0
def arc_FWGS(step):
    test_pyGDP = pyGDP.pyGDPwebProcessing(wfs_url=world.WFS_URL)
    world.output_file = test_pyGDP.submitFeatureWeightedGridStatistics(world.shapefile, world.dataset_uri,
                                                                       world.data_type, world.time_start,
                                                                       world.time_end, world.attribute, world.value,
                                                                       coverage=False, verbose=False)
コード例 #46
0
def create_web_processing_object():
    new_web_processing = pyGDP.pyGDPwebProcessing()
    return new_web_processing
コード例 #47
0
def main_func(region, currdir, timestart, timeend):#, scenarios):
    
    def Region_lookup(region):
        region_properties = []
        if region == 'nhru':
            region_properties.append(371)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d35da1e4b0ca184833940c')
            region_properties.append('false')
        elif region == 'R01':
            region_properties.append(2462)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5244735ae4b05b217bada04e')
            region_properties.append('false')
        elif region == 'R02':
            region_properties.append(4827)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52696784e4b0584cbe9168ee')
            region_properties.append('false')
        elif region == 'R03':
            region_properties.append(9899)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5283bd23e4b047efbbb57922')
            region_properties.append('false')
        elif region == 'R04':
            region_properties.append(5936)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5284ff57e4b063f258e61b9d')
            region_properties.append('false')
        elif region == 'R05':
            region_properties.append(7182)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/528516bbe4b063f258e62161')
            region_properties.append('false')
        elif region == 'R06':
            region_properties.append(2303)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d75296e4b055e0afd5be2c')
            region_properties.append('true')
        elif region == 'R07':
            region_properties.append(8205)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52851cd5e4b063f258e643dd')
            region_properties.append('true')
        elif region == 'R08':
            region_properties.append(4449)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52854695e4b063f258e6513c')
            region_properties.append('true')
        elif region == 'R10L':
            region_properties.append(8603)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/520031dee4b0ad2d97189db2')
            region_properties.append('true')
        elif region =='R10U':
            region_properties.append(10299)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5216849ee4b0b45d6ba61e2e')
            region_properties.append('false')
        elif region == 'R11':
            region_properties.append(7373)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d1f9ebe4b08b18a62d586b')
            region_properties.append('true')
        elif region == 'R12':
            region_properties.append(7815)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5201328ae4b009d47a4c247a')
            region_properties.append('false')
        elif region == 'R13':
            region_properties.append(1958)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/51d752b9e4b055e0afd5be36')
            region_properties.append('false')
        elif region == 'R14':
            region_properties.append(3879)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52029c68e4b0e21cafa4b40c')
            region_properties.append('false')
        elif region == 'R15':
            region_properties.append(3441)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/5285389ae4b063f258e64863')
            region_properties.append('false')
        elif region == 'R16':
            region_properties.append(2664)
            region_properties.append('https://www.sciencebase.gov/catalogMaps/mapping/ows/52853f97e4b063f258e64875')
            region_properties.append('false')

        return region_properties
            
    
    global parsedFiles, csvfile, vistrails_data_set, nhru, url
    import pyGDP
    #region = 'R13'
    parsedFiles = []
    vistrails_data_set = []
    
    Region_return=Region_lookup(region)
    hrus = Region_return[0]
    nhru = hrus
    ScienceBase_URL= Region_return[1]
    
    pyGDP.WFS_URL = ScienceBase_URL
    
    # call web processing module
    
    pyGDP = pyGDP.pyGDPwebProcessing()
    
    # change working directory so the GDP output will be written there
    
    # Datasets and their properties
    # run 1, 2 , and 3
    
    #**********************************
    # run 1 only
    #scenario = 'a2'
    # other scenarios are 'a1b' and 'b1'
    scenarios = ['a2','a1b'] # not running b1 or a1fi
    
    #ends 2012
    timestart = '1950-01-15T00:00:00.000Z'
    timeend='1960-12-15T00:00:00.000Z'
    
    shapefiles = pyGDP.getShapefiles()
    for shp in shapefiles:
        print shp
    
    shapefile = 'sb:nhru'
    user_attribute = 'hru_id_loc'
    
    user_value = None
    
    #os.chdir('C:\\Users\\reimandy\\workspace\\userpackages\\WaterBalanceModel\\Step1_CLIMATE_DATA\\'+region+'\\MAURERBREKE')
    dir = currdir
    
    gcmRun = '1'
    for scenario in scenarios:
        cccma_cgcm3_1_1 = ['sres'+scenario+'_cccma-cgcm3-1_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                           'sres'+scenario+'_cccma-cgcm3-1_1_Prcp',
                           'sres'+scenario+'_cccma-cgcm3-1_1_Tavg']
        gfdl_cm2_1_1_1 = ['sres'+scenario+'_gfdl-cm2-1_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                          'sres'+scenario+'_gfdl-cm2-1_1_Prcp',
                          'sres'+scenario+'_gfdl-cm2-1_1_Tavg']
        miroc3_2_medres_1 = ['sres'+scenario+'_miroc3-2-medres_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                             'sres'+scenario+'_miroc3-2-medres_1_Prcp',
                             'sres'+scenario+'_miroc3-2-medres_1_Tavg']
        miub_echo_g_1_1 = ['sres'+scenario+'_miub-echo-g_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                           'sres'+scenario+'_miub-echo-g_1_Prcp',
                           'sres'+scenario+'_miub-echo-g_1_Tavg']
        mpi_echam5_1 = ['sres'+scenario+'_mpi-echam5_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                        'sres'+scenario+'_mpi-echam5_1_Prcp',
                        'sres'+scenario+'_mpi-echam5_1_Tavg']
        mri_cgcm2_3_2a_1 = ['sres'+scenario+'_mri-cgcm2-3-2a_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                            'sres'+scenario+'_mri-cgcm2-3-2a_1_Prcp',
                            'sres'+scenario+'_mri-cgcm2-3-2a_1_Tavg']
        # New MaurerBreke Statistically downscaled datasets (put with other MB datasets)
        bccr_bcm2_0 = ['sres'+scenario+'_bccr-bcm2-0_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                           'sres'+scenario+'_bccr-bcm2-0_'+gcmRun+'_Prcp',
                           'sres'+scenario+'_bccr-bcm2-0_'+gcmRun+'_Tavg']
        cnrm_cm3 = ['sres'+scenario+'_cnrm-cm3_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                            'sres'+scenario+'_cnrm-cm3_'+gcmRun+'_Prcp',
                            'sres'+scenario+'_cnrm-cm3_'+gcmRun+'_Tavg']
        csiro_mk3_0 = ['sres'+scenario+'_csiro-mk3-0_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                            'sres'+scenario+'_csiro-mk3-0_'+gcmRun+'_Prcp',
                            'sres'+scenario+'_csiro-mk3-0_'+gcmRun+'_Tavg']
        giss_model_e_r = ['sres'+scenario+'_giss-model-e-r_2','dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                           'sres'+scenario+'_giss-model-e-r_2_Prcp',
                           'sres'+scenario+'_giss-model-e-r_2_Tavg']
        inmcm3_0 = ['sres'+scenario+'_inmcm3-0_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                          'sres'+scenario+'_inmcm3-0_'+gcmRun+'_Prcp',
                          'sres'+scenario+'_inmcm3-0_'+gcmRun+'_Tavg']
        ipsl_cm4 = ['sres'+scenario+'_ipsl-cm4_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                          'sres'+scenario+'_ipsl-cm4_'+gcmRun+'_Prcp',
                          'sres'+scenario+'_ipsl-cm4_'+gcmRun+'_Tavg']
        ncar_ccsm3_0 = ['sres'+scenario+'_ncar-ccsm3-0_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                            'sres'+scenario+'_ncar-ccsm3-0_'+gcmRun+'_Prcp',
                            'sres'+scenario+'_ncar-ccsm3-0_'+gcmRun+'_Tavg']
        ncar_pcm1 = ['sres'+scenario+'_ncar-pcm1_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                            'sres'+scenario+'_ncar-pcm1_'+gcmRun+'_Prcp',
                            'sres'+scenario+'_ncar-pcm1_'+gcmRun+'_Tavg']
        ukmo_hadcm3 = ['sres'+scenario+'_ukmo-hadcm3_'+gcmRun,'dods://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','Monthly',timestart,timeend,\
                            'sres'+scenario+'_ukmo-hadcm3_'+gcmRun+'_Prcp',
                            'sres'+scenario+'_ukmo-hadcm3_'+gcmRun+'_Tavg']
    
        data = [cccma_cgcm3_1_1,gfdl_cm2_1_1_1,miroc3_2_medres_1,miub_echo_g_1_1,mpi_echam5_1,mri_cgcm2_3_2a_1]
    
        for dataset in data:
            if len(scenario) == 2:
                name = dataset[0]
                name = name[7:]
            else:
                name = dataset[0]
                name = name[8:]
            file_loc = str(dir.name)+'\\Step1_CLIMATE_DATA\\'+region+'\\'+scenario+'\\'+name
            if not os.path.exists(file_loc):
                os.mkdir(file_loc)
            os.chdir(file_loc)
            print "The current dataset being worked on is: " + name
    
            dataSet = dataset[1]
    
            dataType = dataset[5:]
    
            timestep = dataset[2]
    
            timeBegin = dataset[3]
            timeEnd = dataset[4]
    
            gmlIDs = None
            verbose = True
    
            coverage = Region_return[2]
            delim = 'COMMA'
            stats = 'MEAN'
    
            start = time.time()
            outputPath = pyGDP.submitFeatureWeightedGridStatistics(shapefile, dataSet, dataType, timeBegin, timeEnd, user_attribute, user_value, gmlIDs, verbose, coverage, delim, stats)
            end = time.time()
            
            print "Start time is: " + str(start)
            print 'End time is: ' + str(end)
            print 'Total time was: ' + str(end-start)
            print outputPath
    
            shutil.copy2(outputPath, region+'_'+name+'.csv')
            
            csvfile = os.getcwd()+region+'_'+name+'.csv'

#             dataType = ['Prcp', 'Tavg']
#             vistrails_data_set = ['Prcp', 'Tavg']
            
            #csvread = csv.reader(open(region+'_'+name+'.csv', 'rb'))
            
            #csvwrite = csv.writer(open(dataType[0]+'.csv', 'wb'))
            #parsedFiles.append(dataType[0]+'.csv')
            #index = 0
    
            #temp = csvread
            #var = temp.next()
            #var[0] = '#'+dataType[0]
    
            #gage = temp.next()
    
            #csvwrite.writerow(var)
            #csvwrite.writerow(gage)
    
#             for variable in dataType:
#     
#                 for row in csvread:
#                     if variable == dataType[len(dataType) - 1]:
#                         csvwrite.writerow(row)
#                     else:
#                         if (row[0] in '#'+dataType[index+1]) or (row[0] in '# ' + dataType[index+1]):
#                             var = '#'+dataType[index+1]
#                             csvwrite = csv.writer(open(dataType[index+1] + '.csv', 'wb'))
#                             parsedFiles.append(dataType[index+1]+'.csv')
#                             row[1:] = ''
#                             row[0] = var
#                             csvwrite.writerow(row)
#                             csvwrite.writerow(gage)
#     
#                             if len(dataType) == 2:
#                                 csvwrite.writerow(csvread.next())
#                             else:
#                                 csvread.next()
#                                 csvwrite.writerow(csvread.next())
#                                 csvwrite.writerow(csvread.next())
#                             break
#                         else:
#                             if dataType[index+1] not in row[0] and row[0] not in dataType[index+1]:
#                                 csvwrite.writerow(row)
#                 print 'Finished parsing ' + variable + '.csv'
#     
#                 if (index+1)<len(dataType):
#                     index += 1
#     
#                 os.chdir(dir)
    
    
#main_func('nhru', os.getcwd(), '', '')
コード例 #48
0
import pyGDP
pyGDP = pyGDP.pyGDPwebProcessing()

shapefile = 'sample:CONUS_states'
attribute = 'STATE'
value = 'Alabama'

dataSetURI = 'https://cida.usgs.gov/ArcGIS/services/SSURGO_Products/MapServer/WCSServer'

dataType = '1'

pyGDP.submitFeatureCoverageWCSIntersection(shapefile,
                                           dataSetURI,
                                           dataType,
                                           attribute,
                                           value,
                                           verbose=True)
コード例 #49
0
 def test_get_shapefile_list(self):
     testPyGDP = pyGDP.pyGDPwebProcessing()
     
     shapefiles = testPyGDP.getShapefiles()
     
     assert_not_equal(len(shapefiles), 0)
コード例 #50
0
def point_to_bogus_geoserver(step):
    import pyGDP
    test_pyGDP = pyGDP.pyGDPwebProcessing(WFS_URL = 'https://bogus/bogy/geoserver/wfs')
    world.bogus_wfs = test_pyGDP.wfsUrl
コード例 #51
0
ファイル: Test_Change_URLs.py プロジェクト: timcera/pygdp
def point_to_bogus_geoserver(step):
    import pyGDP
    test_pyGDP = pyGDP.pyGDPwebProcessing(
        wfs_url='https://bogus/bogy/geoserver/wfs')
    world.bogus_wfs = test_pyGDP.wfsUrl