Пример #1
0
class WCSHelper(object):
	"""docstring for WCSHelper"""
	def __init__(self, url, dates, variable, bbox, single=False):
		super(WCSHelper, self).__init__()
		self.url = url
		self.single = single
		self.dates = dates
		self.variable = variable
		self.bbox = bbox
		self.owslib_log = logging.getLogger('owslib')
		self.owslib_log.setLevel(logging.DEBUG)
		self.wcs = WebCoverageService(url, version="1.0.0")

	def __repr__(self):
		return str(self.wcs)

	def getData(self):
		#print '-'*20
		#print self.bbox
		#print self.dates
		if self.single :
			output = self.wcs.getCoverage(identifier=self.variable, time=[self.dates], bbox=self.bbox, format="NetCDF3")
		else:	
			output = self.wcs.getCoverage(identifier=self.variable, time=self.dates, bbox=self.bbox, format="NetCDF3")
		return output
Пример #2
0
class AhnWebCoverageDatasource(object):
    def __init__(self):
        try:
            self.wcs_ahn3 = WebCoverageService(
                'https://geodata.nationaalgeoregister.nl/ahn3/wcs',
                version='1.0.0')
            self.wcs_ahn2 = WebCoverageService(
                'https://geodata.nationaalgeoregister.nl/ahn2/wcs',
                version='1.0.0')
        except:
            print("AHN WCS host unavailable")

    def retrieve_tile_ahn3(self, srid, x_min, y_min, x_max, y_max):
        output_ahn3 = self.wcs_ahn3.getCoverage(
            identifier='ahn3_05m_dtm',
            bbox=[x_min, y_min, x_max, y_max],
            format='GEOTIFF_FLOAT32',
            crs=srid,
            resx=0.5,
            resy=0.5)

        return output_ahn3

    def retrieve_tile_ahn2(self, srid, x_min, y_min, x_max, y_max):
        output_ahn2 = self.wcs_ahn2.getCoverage(
            identifier='ahn2_05m_int',
            bbox=[x_min, y_min, x_max, y_max],
            format='GEOTIFF_FLOAT32',
            crs=srid,
            resx=0.5,
            resy=0.5)

        return output_ahn2
Пример #3
0
def test_wcs1_getcoverage_netcdf(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="1.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    test_layer_name = contents[0]
    test_layer = wcs.contents[test_layer_name]

    bbox = test_layer.boundingBoxWGS84

    output = wcs.getCoverage(identifier=contents[0],
                             format='netCDF',
                             bbox=pytest.helpers.enclosed_bbox(bbox),
                             crs='EPSG:4326',
                             width=400,
                             height=300)

    assert output
    assert output.info()['Content-Type'] == 'application/x-netcdf'

    output = wcs.getCoverage(identifier=contents[0],
                             format='netCDF',
                             bbox=pytest.helpers.enclosed_bbox(bbox),
                             crs='I-CANT-BELIEVE-ITS-NOT-EPSG:4326',
                             width=400,
                             height=300)

    assert output
    assert output.info()['Content-Type'] == 'application/x-netcdf'
Пример #4
0
def getGeotiffFromWebCoverageService(url, version, bbox, identifier, frmt,
                                     directory, filename):
    """ Obtain data from WebCoverageService.
    
    :url: url from which to obtain the data
    :version: version of the data to obtain
    :bbox: bounding box
    :identifier: user-specified filename
    :format: data format to obtain
    :directory: folder in which file is to be written
    :filename: filename of file to write
    """

    # create path
    direc = "./" + directory + "/"
    dirFile = direc + filename + ".tif"

    # check if directory and file exist
    if not os.path.exists(direc):
        os.makedirs(direc)

    if not os.path.isfile(dirFile):
        wcs = WebCoverageService(url, version=version)
        response = wcs.getCoverage(identifier=identifier,
                                   bbox=bbox,
                                   format=frmt,
                                   crs='urn:ogc:def:crs:EPSG::28992',
                                   resx=0.5,
                                   resy=0.5)
        with open(dirFile, 'wb') as file:
            file.write(response.read())
Пример #5
0
def loadWCS(folder, out_name, WCS_URL, Version, layerName, bbox, srid):
    folder = folder
    urls = WCS_URL
    version = Version
    bbox = bbox
    input_value_raster = layerName
    crs_num = 'urn:ogc:def:crs:EPSG::' + str(srid)

    if urls == 'None':
        return ("Please provide WCS parameters")

    wcs = WebCoverageService(urls, version)
    #print(list(wcs.contents))

    #
    # print([op.name for op in wcs.operations])

    cvg = wcs.contents[input_value_raster]
    if bbox == 'None':
        bbox = cvg.boundingBoxWGS84

    response = wcs.getCoverage(identifier=input_value_raster,
                               bbox=bbox,
                               format='GEOTIFF_FLOAT32',
                               crs='urn:ogc:def:crs:EPSG::28992',
                               resx=0.5,
                               resy=0.5)
    temp_raster = folder + '//' + out_name + '.tif'
    with open(temp_raster, 'wb') as file:
        file.write(response.read())
Пример #6
0
    def wcsRequest(self, layer='AGRICULTURE'):
        
        
        self.layer = layer
        ID = 'your ID'
        wcs_url = 'https://services.sentinel-hub.com/ogc/wcs/'+ID
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        
        self.x, self.y = self.deg2num(self.lat_center, self.lon_center, self.zoom)
        self.lat_max, self.lon_min = self.num2deg(self.x, self.y, self.zoom)
        self.lat_min, self.lon_max = self.num2deg(self.x+1, self.y+1, self.zoom)
        

        inProj = Proj(init='epsg:4326')
        outProj = Proj(init='epsg:3857')
        x1,y1 = transform(inProj,outProj,self.lon_min,self.lat_min)
        x2,y2 = transform(inProj,outProj,self.lon_max,self.lat_max)
        
        bb=(x1, y1, x2, y2)
        
        self.wcsOut = wcs.getCoverage(identifier=self.layer,
                                      time=None,
                                      width=800,
                                      height=800,
                                      bbox = bb,
                                      format = 'GeoTIFF')
        
        self.imgTiff = Image.open(self.wcsOut)
        self.imgArr = np.array(self.imgTiff)
        
        imgurl = image_to_url(image=self.imgArr)
        self.map.add_layer(ImageOverlay(url=imgurl,
                                        bounds=[[self.lat_min, self.lon_min],
                                                [self.lat_max, self.lon_max]]))
Пример #7
0
    def getCube(self, maptype, depths, datatypes, bounds, overwrite=False):
        #check if all files are already downloaded
        fpaths = [
            '../tmp/Hadocha_' + maptype + '_' + depth + 'cm_' + datatype +
            '.tif' for depth in depths for datatype in datatypes
        ]
        if (min([os.path.isfile(fpath) for fpath in fpaths])
                and not overwrite):
            print('all files already downloaded for: ' + maptype)
            return fpaths

        #set up the Web Coverage Service. This sometimes raises errors, so retry
        print('setting up WCS for maptype: ' + maptype)
        for attempt in range(5):
            try:
                wcs = WebCoverageService(
                    'http://maps.isric.org/mapserv?map=/map/' + maptype +
                    '.map',
                    version='1.0.0')
            except:
                print('failed to connect to WCS ' + str(attempt + 1) +
                      ' times to: ' + maptype)
            else:
                break
        else:
            print('failed to connect to WCS, try again later')
            return []

        #start retrieving map data
        for depth in depths:
            for datatype in datatypes:
                fpath = '../tmp/Hadocha_' + maptype + '_' + depth + 'cm_' + datatype + '.tif'
                #also handle exceptions for retrieving files
                if (not os.path.isfile(fpath) or overwrite):
                    for attempt in range(5):
                        try:
                            response = wcs.getCoverage(
                                identifier=maptype + '_' + depth + 'cm_' +
                                datatype,
                                crs='urn:ogc:def:crs:EPSG::152160',
                                bbox=(bounds.minx, bounds.miny, bounds.maxx,
                                      bounds.maxy),
                                resx=20,
                                resy=20,
                                format='GEOTIFF_INT16')
                        except:
                            print('coverage failed ' + str(attempt + 1) +
                                  ' times for: ' + fpath)
                        else:
                            with open(fpath, 'wb') as file:
                                file.write(response.read())
                                # print('file: '+fpath+'downloaded')
                            break
                    else:
                        print('file not retrieved: ' + fpath)
                        fpaths.remove(fpath)
                    # else:
                    #     print('file already downloaded: '+fpath)
        # print('map at location: '+fpath)
        return fpaths
Пример #8
0
def get_products(products, dt, bbox=None, fcast='000', outpath='.'):
    ''' Get a list of products for a given date and time 
        Example:
        >> get_products(['Temperature'], datetime(2010,10,10,12,0))
    ''' 
    saved_files = []
    url_params = (dt.strftime('%Y%m'), dt.strftime('%Y%m%d'), dt.strftime('%Y%m%d'), dt.strftime('%H%M'), fcast)
    server_url = 'http://nomads.ncdc.noaa.gov/thredds/wcs/ruc13/%s/%s/ruc2_130_%s_%s_%s.grb2' % url_params
    try:
        wcs = WebCoverageService(server_url,version='1.0.0')
    except:
        print >> sys.stderr, "Bad URL!"
    else:
        if set(products).issubset(wcs.contents.keys()):
            print >> sys.stderr, 'Requested products are a subset of available products'
        else:
            print >> sys.stderr, 'Some product(s) selected do not exist'
        if bbox is None:
            bbox = (-139, -57.995, 16, 55) # CONUS
        date_time = dt.isoformat() + 'Z'
        for product in products:
            filename = os.path.join(outpath, model + '_' + date_time.replace(':','') + '_' + product + '.tif')
            if os.path.exists(filename): # Check if file exists so we don't download duplicates
               pass
            else: 
                try:
                    output = wcs.getCoverage(identifier = product, time = [date_time], bbox= bbox, format='GeoTiff_float')
                except urllib2.HTTPError:
                    print "Bad URL"
                f = open(filename, 'wb')
                f.write(output.read())
                f.close() 
                saved_files.append(filename)
                print >> sys.stderr, 'Saved: ' + filename
    return saved_files
Пример #9
0
def get_raster_wcs(
    coordinates: Union[Iterable, Sequence[Union[float, str]]],
    geographic: bool = True,
    layer: str = None,
    geoserver: str = GEO_URL,
) -> bytes:
    """Return a subset of a raster image from the local GeoServer via WCS 2.0.1 protocol.

    For geographic rasters, subsetting is based on WGS84 (Long, Lat) boundaries. If not geographic, subsetting based
    on projected coordinate system (Easting, Northing) boundaries.

    Parameters
    ----------
    coordinates : Sequence[Union[int, float, str]]
      Geographic coordinates of the bounding box (left, down, right, up)
    geographic : bool
      If True, uses "Long" and "Lat" in WCS call. Otherwise uses "E" and "N".
    layer : str
      Layer name of raster exposed on GeoServer instance, e.g. 'public:CEC_NALCMS_LandUse_2010'
    geoserver: str
      The address of the geoserver housing the layer to be queried. Default: http://pavics.ouranos.ca/geoserver/.

    Returns
    -------
    bytes
      A GeoTIFF array.

    """
    (left, down, right, up) = coordinates

    if geographic:
        x, y = "Long", "Lat"
    else:
        x, y = "E", "N"

    wcs = WebCoverageService(url=urljoin(geoserver, "ows"), version="2.0.1")

    try:
        resp = wcs.getCoverage(
            identifier=[layer],
            format="image/tiff",
            subsets=[(x, left, right), (y, down, up)],
            timeout=120,
        )

    except Exception as e:
        raise Exception(e)

    data = resp.read()

    try:
        etree.fromstring(data)
        # The response is an XML file describing the server error.
        raise ChildProcessError(data)

    except etree.XMLSyntaxError:
        # The response is the DEM array.
        return data
Пример #10
0
def GetWCSLayer(u, p):
  start = time.time()
  # Manage the WFS URL & the layer name
  split_url = u.split('?')
  server_url = split_url[0]
  ows = server_url[-3:]
  print 'The OGC standard is: '+ ows
  
  if ows == 'ows' or ows == 'wcs':
    server_url = server_url[:-3]+ 'wcs' 
    spacename_wcs = split_url[1]
    chemin = p + spacename_wcs +'.tif'
    
    if not os.path.exists(chemin):
      
      # Get the raster layer using OGC WCS standard
      wcs = WebCoverageService(server_url ,version='1.0.0')
      image = wcs[spacename_wcs]
      
      # Download the GeoTIFF image file
      info = (image.boundingboxes)[0]
          
      epsg = info['nativeSrs']
      bboxx = info['bbox']
      
      offset = image.grid.offsetvectors
      cellsize_x= offset[0]
      x = cellsize_x[0]
      X = str(abs(float(x)))
      
      cellsize_y= offset[1]
      y = cellsize_y[1]
      Y = str(abs(float(y)))
      
  #    img_formats = image.supportedFormats
  #    img_format = img_formats[0]
      img_format = 'GeoTIFF'        
      
      print "Downloading the GeoTIFF file... : "+spacename_wcs
      print "From: "+server_url
      output = wcs.getCoverage(identifier = spacename_wcs,
                                 bbox = bboxx,
                                   crs = epsg,
                                     format = img_format,
                                       resx = X,
                                         resy = Y)                            
                               
      data = output.read()
      f = open(chemin,'wb')
      f.write(data)
      f.close()
    
    # Calculat time
    temps =time.time() - start
    tps = round(temps,2)
    temps_ms = str(tps)
    print "GetWCSLayer download time : " + temps_ms +" ms"
Пример #11
0
def wcs_links(wcs_url,
              identifier,
              bbox=None,
              crs=None,
              height=None,
              width=None,
              exclude_formats=True,
              quiet=True,
              version='1.0.0'):
    # FIXME(Ariel): This would only work for layers marked for public view,
    # what about the ones with permissions enabled?

    try:
        wcs = WebCoverageService(wcs_url, version=version)
    except ServiceException as err:
        err_msg = 'WCS server returned exception: %s' % err
        if not quiet:
            logger.warn(err_msg)
        raise GeoNodeException(err_msg)

    msg = ('Could not create WCS links for layer "%s",'
           ' it was not in the WCS catalog,'
           ' the available layers were: "%s"' %
           (identifier, wcs.contents.keys()))

    output = []
    formats = []

    if identifier not in wcs.contents:
        if not quiet:
            raise RuntimeError(msg)
        else:
            logger.warn(msg)
    else:
        coverage = wcs.contents[identifier]
        formats = coverage.supportedFormats
        for f in formats:
            if exclude_formats and f in DEFAULT_EXCLUDE_FORMATS:
                continue
            # roundabout, hacky way to accomplish getting a getCoverage url.
            # nonetheless, it's better than having to load an entire large
            # coverage just to generate a URL
            fakeUrl = wcs.getCoverage(identifier=coverage.id,
                                      format=f,
                                      bbox=bbox,
                                      crs=crs,
                                      height=20,
                                      width=20).geturl()
            url = sub(r'(height=)20(\&width=)20', r'\g<1>{0}\g<2>{1}',
                      fakeUrl).format(height, width)
            # The outputs are: (ext, name, mime, url)
            # FIXME(Ariel): Find a way to get proper ext, name and mime
            # using format as a default for all is not good enough
            output.append((f, f, f, url))
    return output
Пример #12
0
def get_raster_wcs(coordinates, geographic=True, layer=None):
    """Return a subset of a raster image from the local GeoServer via WCS 2.0.1 protocol.

    For geoggraphic rasters, subsetting is based on WGS84 (Long, Lat) boundaries. If not geographic, subsetting based
    on projected coordinate system (Easting, Northing) boundries.

    Parameters
    ----------
    coordinates : sequence
      Geographic coordinates of the bounding box (left, down, right, up)
    geographic : bool
      If True, uses "Long" and "Lat" in WCS call. Otherwise uses "E" and "N".
    layer : str
      Layer name of raster exposed on GeoServer instance. E.g. 'public:CEC_NALCMS_LandUse_2010'

    Returns
    -------
    bytes
      A GeoTIFF array.

    """
    from owslib.wcs import WebCoverageService
    from lxml import etree

    (left, down, right, up) = coordinates

    if geographic:
        x, y = 'Long', 'Lat'
    else:
        x, y = 'E', 'N'

    wcs = WebCoverageService('http://boreas.ouranos.ca/geoserver/ows',
                             version='2.0.1')

    try:
        resp = wcs.getCoverage(identifier=[
            layer,
        ],
                               format='image/tiff',
                               subsets=[(x, left, right), (y, down, up)])

    except Exception as e:
        raise Exception(e)

    data = resp.read()

    try:
        etree.fromstring(data)
        # The response is an XML file describing the server error.
        raise ChildProcessError(data)

    except etree.XMLSyntaxError:
        # The response is the DEM array.
        return data
Пример #13
0
def wcs_links(
        wcs_url,
        identifier,
        bbox=None,
        crs=None,
        height=None,
        width=None,
        exclude_formats=True,
        quiet=True,
        version='1.0.0'):
    # FIXME(Ariel): This would only work for layers marked for public view,
    # what about the ones with permissions enabled?

    try:
        wcs = WebCoverageService(wcs_url, version=version)
    except ServiceException as err:
        err_msg = 'WCS server returned exception: %s' % err
        if not quiet:
            logger.warn(err_msg)
        raise GeoNodeException(err_msg)

    msg = ('Could not create WCS links for layer "%s",'
           ' it was not in the WCS catalog,'
           ' the available layers were: "%s"' % (
               identifier, wcs.contents.keys()))

    output = []
    formats = []

    if identifier not in wcs.contents:
        if not quiet:
            raise RuntimeError(msg)
        else:
            logger.warn(msg)
    else:
        coverage = wcs.contents[identifier]
        formats = coverage.supportedFormats
        for f in formats:
            if exclude_formats and f in DEFAULT_EXCLUDE_FORMATS:
                continue
            # roundabout, hacky way to accomplish getting a getCoverage url.
            # nonetheless, it's better than having to load an entire large
            # coverage just to generate a URL
            fakeUrl = wcs.getCoverage(identifier=coverage.id, format=f,
                                      bbox=bbox, crs=crs, height=20,
                                      width=20).geturl()
            url = sub(r'(height=)20(\&width=)20', r'\g<1>{0}\g<2>{1}',
                      fakeUrl).format(height, width)
            # The outputs are: (ext, name, mime, url)
            # FIXME(Ariel): Find a way to get proper ext, name and mime
            # using format as a default for all is not good enough
            output.append((f, f, f, url))
    return output
Пример #14
0
def get_dtm(path_out, minlong,maxlong,minlat,maxlat):


    bbox=(minlong,minlat,maxlong,maxlat)

    url="http://services.ga.gov.au/gis/services/DEM_SRTM_1Second_over_Bathymetry_Topography/MapServer/WCSServer?"
    wcs = WebCoverageService(url,version='1.0.0')

    cvg=wcs.getCoverage(identifier='1',  bbox=bbox, format='GeoTIFF', crs=4326, width=200, height=200)

    f = open(path_out, 'wb')
    bytes_written = f.write(cvg.read())
    f.close()
    print("dtm geotif saved as",path_out)
Пример #15
0
def test_wcs_200():
    """
    Web Coverage Service
    WCS Version 2.0.x

    rewritten doctest/wcs_200.txt
    """
    wcs = WebCoverageService(SERVICE_URL, version="2.0.1")
    assert wcs.version == '2.0.1'
    assert wcs.url == SERVICE_URL
    assert wcs.identification.title == 'rasdaman'
    assert wcs.identification.service == 'OGC WCS'
    assert wcs.provider.name == 'Jacobs University Bremen'
    assert 'AvgLandTemp' in wcs.contents.keys()
    assert len(wcs.contents.keys()) >= 20
    cvg = wcs.contents['AvgLandTemp']
    assert cvg.boundingboxes[0]['bbox'] == (-90, -180,
                                            90, 180)
    assert cvg.timelimits == [datetime.datetime(2000, 2, 1, 0, 0), datetime.datetime(2015, 6, 1, 0, 0)]
    assert cvg.timepositions[0:5] == [datetime.datetime(2000, 2, 1, 0, 0), datetime.datetime(2000, 3, 1, 0, 0),
                                      datetime.datetime(2000, 4, 1, 0, 0), datetime.datetime(2000, 5, 1, 0, 0),
                                      datetime.datetime(2000, 6, 1, 0, 0)]
    assert cvg.supportedFormats == ['application/gml+xml', 'image/jpeg', 'image/png', 'image/tiff', 'image/bmp',
                                    'image/jp2', 'application/netcdf', 'text/csv', 'application/json',
                                    'application/dem', 'application/x-ogc-dted', 'application/x-ogc-ehdr',
                                    'application/x-ogc-elas', 'application/x-ogc-envi', 'application/x-ogc-ers',
                                    'application/x-ogc-fit', 'application/x-ogc-fits', 'image/gif',
                                    'application/x-netcdf-gmt', 'application/x-ogc-gs7bg', 'application/x-ogc-gsag',
                                    'application/x-ogc-gsbg', 'application/x-ogc-gta', 'application/x-ogc-hf2',
                                    'application/x-erdas-hfa', 'application/x-ogc-ida', 'application/x-ogc-ingr',
                                    'application/x-ogc-isis2', 'application/x-erdas-lan', 'application/x-ogc-mff2',
                                    'application/x-ogc-nitf', 'application/x-ogc-paux', 'application/x-ogc-pcidsk',
                                    'application/x-ogc-pcraster', 'application/x-ogc-pdf', 'application/x-ogc-pnm',
                                    'text/x-r', 'application/x-ogc-rmf', 'image/x-sgi', 'application/x-ogc-vrt',
                                    'image/xpm', 'application/x-ogc-zmap']
    assert cvg.grid.axislabels == ['Lat', 'Long', 'ansi']
    assert cvg.grid.dimension == 3
    assert cvg.grid.lowlimits == ['0', '0', '0']
    assert cvg.grid.highlimits == ['1799', '3599', '184']
    covID = 'AvgLandTemp'
    time_subset = ("ansi", "2000-02-01T00:00:00Z")
    lat_subset = ('Lat', 40, 50)
    long_subset = ('Long', -10, 0)
    formatType = 'application/netcdf'
    output = wcs.getCoverage(identifier=[covID], format=formatType, subsets=[long_subset, lat_subset, time_subset])
    f = open(scratch_file('test_wcs_200.nc'), 'wb')
    bytes_written = f.write(output.read())
    f.close()
Пример #16
0
def test_wcs20_getcoverage_netcdf(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="2.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    output = wcs.getCoverage(
        identifier=[contents[0]],
        format='netCDF',
        subsets=[('x', 144, 144.3), ('y', -42.4, -42), ('time', '2019-11-05')],
        subsettingcrs="EPSG:4326",
        scalesize="x(400),y(300)",
    )

    assert output
    assert output.info()['Content-Type'] == 'application/x-netcdf'
Пример #17
0
def test_wcs_idee():
    """
    COWS Web Coverage Service
    WCS Version 1.0.0

    rewritten doctest/wcs_idee.txt
    """
    wcs = WebCoverageService(SERVICE_URL)
    assert wcs.version == '1.0.0'
    assert wcs.url == SERVICE_URL
    assert wcs.identification.title == 'WCS UTM30N - MDT Peninsula y Baleares'
    assert wcs.identification.service == 'IDEE-WCS-UTM30N'
    assert wcs.provider.name == u'Instituto Geogr\xe1fico Nacional'
    assert sorted(wcs.contents.keys()) == [
        'MDT1000_peninsula_baleares',
        'MDT1000_peninsula_baleares_aspecto',
        'MDT1000_peninsula_baleares_pendientes',
        'MDT25_peninsula_ZIP',
        'MDT25_peninsula_aspecto',
        'MDT25_peninsula_pendientes',
        'MDT500_peninsula_baleares',
        'MDT500_peninsula_baleares_aspecto',
        'MDT500_peninsula_baleares_pendientes',
        'MDT_peninsula_baleares',
        'MDT_peninsula_baleares_aspecto',
        'MDT_peninsula_baleares_pendientes']
    cvg = wcs['MDT25_peninsula_pendientes']
    assert cvg.title == 'MDT25 Pendientes Peninsula'
    assert cast_tuple_int_list(cvg.boundingBoxWGS84) == [-8, 35, 3, 43]
    assert cvg.timelimits == []
    assert sorted(cvg.supportedFormats) == ['AsciiGrid', 'FloatGrid_Zip', 'GeoTIFF']
    assert sorted(map(lambda x: x.getcode(), cvg.supportedCRS)) == [
        'EPSG:23028',
        'EPSG:23029',
        'EPSG:23030',
        'EPSG:23030',
        'EPSG:23031',
        'EPSG:4230',
        'EPSG:4326']
    output = wcs.getCoverage(
        identifier='MDT25_peninsula_pendientes',
        bbox=(600000, 4200000, 601000, 4201000),
        crs='EPSG:23030', format='AsciiGrid', resX=25, resY=25)
    f = open(scratch_file('test_idee.grd'), 'wb')
    bytes_written = f.write(output.read())
    f.close()
Пример #18
0
def main(input_zone, input_value_raster, earsource, **kwargs):
    wcs = kwargs.get("WCS", 'no')
    if wcs == 'yes':
        folder = kwargs.get("WCS_temp", '/home/')
        urls = kwargs.get("WCS_url", 'None')
        version = kwargs.get("WCS_version", '1.0.0')
        bbox = kwargs.get("bbox", 'None')

        if urls == 'None':
            return ("Please provide WCS parameters")

        wcs = WebCoverageService(url, version)
        ##print(list(wcs.contents))

        #print([op.name for op in wcs.operations])

        cvg = wcs.contents[input_value_raster]
        if bbox == 'None':
            bbox = cvg.boundingBoxWGS84
        response = wcs.getCoverage(identifier=input_value_raster,
                                   bbox=bbox,
                                   format='GEOTIFF_FLOAT32',
                                   crs='urn:ogc:def:crs:EPSG::4326',
                                   resx=0.5,
                                   resy=0.5)
        temp_raster = folder + '//' + input_value_raster + '.tif'
        with open(temp_raster, 'wb') as file:
            file.write(response.read())
        input_value_raster = temp_raster

    if earsource == "shp":
        return loop_zonal_stats(input_zone, input_value_raster)

    elif earsource == "pgtable":
        connString = kwargs.get('connString', "None")
        if (connString == "None"):
            print("Please Supply valid connection string")
        #print("please supply valid data and their source")
        return loop_zonal_statsPG(input_zone, input_value_raster, connString)
    elif earsource == "wfs":
        wfsURL = kwargs.get('wfsURL', "None")
        if (wfsURL == "None"):
            return ("Please provide valid WFS URL")
        return loop_zonal_statsWFS(input_zone, input_value_raster, wfsURL)
    else:
        print("please supply valid data and their source")
Пример #19
0
def test_wcs21_getcoverage(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="2.0.1")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    output = wcs.getCoverage(
        identifier=[contents[0]],
        format='image/geotiff',
        subsets=[('x', 144, 144.3), ('y', -42.4, -42)],
        # timeSequence=['2019-11-05'],
        subsettingcrs="EPSG:4326",
        subset='time("2019-11-05")',
        scalesize="x(400),y(300)")

    assert output
    assert output.info()['Content-Type'] == 'image/geotiff'
Пример #20
0
def test_wcs20_getcoverage_multidate(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="2.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    try:
        resp = wcs.getCoverage(
            identifier=[contents[0]],
            format='image/geotiff',
            subsets=[('x', 144, 144.3), ('y', -42.4, -42),
                     ('time', '2019-11-05', "2019-12-05")],
            subsettingcrs="EPSG:4326",
            scalesize="x(400),y(300)",
        )
    except ServiceException as e:
        assert 'Format does not support multi-time datasets' in str(e)
Пример #21
0
def get_wcs(server_url, spacename_wcs):

    chemin = '/home/tmp/'+spacename_wcs+'.tif'
    
    if not os.path.exists(chemin):
        
        wcs = WebCoverageService(server_url +"/wcs/",version='1.0.0')
    
        image = wcs[spacename_wcs]
        
        info = (image.boundingboxes)[0]
        
        epsg = info['nativeSrs']
        bboxx = info['bbox']
        
        offset = image.grid.offsetvectors
        cellsize_x= offset[0]
        x = cellsize_x[0]
        X = str(abs(float(x)))
        
        cellsize_y= offset[1]
        y = cellsize_y[1]
        Y = str(abs(float(y)))
        
#        img_formats = image.supportedFormats
#        img_format = img_formats[0]
        img_format = 'GeoTIFF'        
        
        print "Downloading the WCS: "+spacename_wcs
        print "From: "+server_url
        output = wcs.getCoverage(identifier = spacename_wcs,
                                 bbox = bboxx,
                                 crs = epsg,
                                 format = img_format,
                                 resx = X,
                                 resy = Y)                            
                                 
        data = output.read()
        f = open(chemin,'wb')
        f.write(data)
        f.close()
        print "Done"
    else: print "Done"
        
    return chemin
Пример #22
0
def AHN2_5m_forStudyArea(bbox, outputfilename):
    
    # load necessary modules
    from owslib.wcs import WebCoverageService
    import rasterio
    
    #specify the AHN3 wcs-url
    wcs = WebCoverageService('http://geodata.nationaalgeoregister.nl/ahn2/wcs?service=WCS', version='1.0.0')

    # Download and save DTM
    response = wcs.getCoverage(identifier='ahn2_5m', bbox=bbox, format='GEOTIFF_FLOAT32',
                               crs='urn:ogc:def:crs:EPSG::28992', resx=5, resy=5)
    with open(outputfilename, 'wb') as file:
        file.write(response.read())

    # Load DTM 
    DTM = rasterio.open(outputfilename, driver="GTiff")
    return DTM
Пример #23
0
 def provide(self,name,options={}):
     """
     
     Return a numpy array of the requested data
     
     todo:
     - split into a download() or get_from_cache()
     
     """
     logger.debug("Looking for layer '%s' at timestamp '%s'"%(name,options['timestamp']))        
     if name not in self.available_layers:
         logger.error("Layer '%s' is not available in this provider."%(name))
     if options['timestamp'].isoformat() not in self.gfs_wcs_access_urls:
         logger.error("Timestamp %s was not found in the list of available GFS timesteps."%(options['timestamp'].isoformat()))
     
     logger.debug("Downloading coverage from url:")
     logger.debug(self.gfs_wcs_access_urls.get(options['timestamp'].isoformat()))
     
     extent=box(*self._grid["bounds"]).buffer(1.0)
     logger.debug("request bounds: %s"%(str(extent.bounds)))      
     
     
     req_url=self.gfs_wcs_access_urls[options['timestamp'].isoformat()]['url']
     cache_key=self.gfs_wcs_access_urls[options['timestamp'].isoformat()]['cache_key']+name
     logger.debug("req_url=%s"%(req_url))
     
     wcs=WebCoverageService(req_url, version='1.0.0')
     meta=wcs.contents[name]
         
     #cov = wcs.getCoverage(identifier=name,bbox=extent.bounds, format="GeoTIFF_Float")
     try:
         cov = wcs.getCoverage(identifier=name, bbox=extent.bounds, format="GeoTIFF_Float")
         filename=os.path.join(self._cache,"%s.tif"%(cache_key))
         logger.debug("WCS: saving file to: %s"%(filename))
         with open(filename,'w') as f:
             f.write(cov.read())
         dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
     except Exception as e:
         logger.error("WCS: failure: %s"%(e))
     
     return self.warp_to_grid(dataset)
Пример #24
0
 def provide(self,name,options={}):
     """
     
     Return a numpy array of the requested data
     
     todo:
     - split into a download() or get_from_cache()
     
     """
     logger.debug("Looking for layer '%s' at timestamp '%s'"%(name,options['timestamp']))        
     if name not in self.available_layers:
         logger.error("Layer '%s' is not available in this provider."%(name))
     if options['timestamp'].isoformat() not in self.gfs_wcs_access_urls:
         logger.error("Timestamp %s was not found in the list of available GFS timesteps."%(options['timestamp'].isoformat()))
     
     logger.debug("Downloading coverage from url:")
     logger.debug(self.gfs_wcs_access_urls.get(options['timestamp'].isoformat()))
     
     extent=box(*self._grid["bounds"]).buffer(1.0)
     logger.debug("request bounds: %s"%(str(extent.bounds)))      
     
     
     req_url=self.gfs_wcs_access_urls[options['timestamp'].isoformat()]['url']
     cache_key=self.gfs_wcs_access_urls[options['timestamp'].isoformat()]['cache_key']+name
     logger.debug("req_url=%s"%(req_url))
     
     wcs=WebCoverageService(req_url, version='1.0.0')
     meta=wcs.contents[name]
         
     #cov = wcs.getCoverage(identifier=name,bbox=extent.bounds, format="GeoTIFF_Float")
     try:
         cov = wcs.getCoverage(identifier=name, bbox=extent.bounds, format="GeoTIFF_Float")
         filename=os.path.join(self._cache,"%s.tif"%(cache_key))
         logger.debug("WCS: saving file to: %s"%(filename))
         with open(filename,'w') as f:
             f.write(cov.read())
         dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
     except Exception as e:
         logger.error("WCS: failure: %s"%(e))
     
     return self.warp_to_grid(dataset)
Пример #25
0
def test_wcs_idee():
    """
    COWS Web Coverage Service
    WCS Version 1.0.0

    rewritten doctest/wcs_idee.txt
    """
    wcs = WebCoverageService(SERVICE_URL)
    assert wcs.version == '1.0.0'
    assert wcs.url == SERVICE_URL
    assert wcs.identification.title == 'WCS UTM30N - MDT Peninsula y Baleares'
    assert wcs.identification.service == 'IDEE-WCS-UTM30N'
    assert wcs.provider.name == u'Instituto Geogr\xe1fico Nacional'
    assert sorted(wcs.contents.keys()) == [
        'MDT1000_peninsula_baleares', 'MDT1000_peninsula_baleares_aspecto',
        'MDT1000_peninsula_baleares_pendientes', 'MDT25_peninsula_ZIP',
        'MDT25_peninsula_aspecto', 'MDT25_peninsula_pendientes',
        'MDT500_peninsula_baleares', 'MDT500_peninsula_baleares_aspecto',
        'MDT500_peninsula_baleares_pendientes', 'MDT_peninsula_baleares',
        'MDT_peninsula_baleares_aspecto', 'MDT_peninsula_baleares_pendientes'
    ]
    cvg = wcs['MDT25_peninsula_pendientes']
    assert cvg.title == 'MDT25 Pendientes Peninsula'
    assert cast_tuple_int_list(cvg.boundingBoxWGS84) == [-8, 35, 3, 43]
    assert cvg.timelimits == []
    assert sorted(
        cvg.supportedFormats) == ['AsciiGrid', 'FloatGrid_Zip', 'GeoTIFF']
    assert sorted(map(lambda x: x.getcode(), cvg.supportedCRS)) == [
        'EPSG:23028', 'EPSG:23029', 'EPSG:23030', 'EPSG:23030', 'EPSG:23031',
        'EPSG:4230', 'EPSG:4326'
    ]
    output = wcs.getCoverage(identifier='MDT25_peninsula_pendientes',
                             bbox=(600000, 4200000, 601000, 4201000),
                             crs='EPSG:23030',
                             format='AsciiGrid',
                             resX=25,
                             resY=25)
    f = open(scratch_file('test_idee.grd'), 'wb')
    bytes_written = f.write(output.read())
    f.close()
Пример #26
0
def get_coverage(wcs_url, layer, verbose=False):
    """Get coverage from Web Coverage Service (WCS) in GeoTIFF format
    
    Input:
       wcs_url: URL for web ceature service. E.g. http://www.aifdr.org:8080/geoserver/ows?
       layer: Coverage layer name as <workspace>:<layer>
       verbose [optional]: Flag controlling the verbosity level. Default is False.
       
    Output:
       GeoTIFF data or None.    
    """
        
    if verbose:
        print('Retrieving %s from %s' % (layer, wcs_url))
            
    # wcs = WebCoverageService(wcs_url, version='1.1.1')
    wcs = WebCoverageService(wcs_url, version='1.0.0')
    interrogate(wcs)
    if layer not in wcs.contents.keys():
        return None

    response = wcs.getCoverage(typename=[layer], format='GeoTIFF')
    return response
Пример #27
0
def test_wcs1_getcoverage_geotiff(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="1.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    test_layer_name = contents[0]
    test_layer = wcs.contents[test_layer_name]

    bbox = test_layer.boundingBoxWGS84

    output = wcs.getCoverage(
        identifier=contents[0],
        format='GeoTIFF',
        bbox=pytest.helpers.disjoint_bbox(bbox),
        crs='EPSG:4326',
        width=400,
        height=300,
        timeSequence=test_layer.timepositions[len(test_layer.timepositions) //
                                              2].strip(),
    )

    assert output
    assert output.info()['Content-Type'] == 'image/geotiff'
Пример #28
0
# Very simple script demonstrating how to interact with a THREDDS based WCS.
# ---
#
# The GetCapabilities and DescribeCoverage requests for this dataset are: 
# http://cida.usgs.gov/thredds/wcs/prism?service=WCS&version=1.0.0&request=GetCapabilities
# http://cida.usgs.gov/thredds/wcs/prism?service=WCS&version=1.0.0&request=DescribeCoverage
#
# The equivalent GetCoverage request that is equivalent ot hte example is:
# http://cida.usgs.gov/thredds/wcs/prism?request=GetCoverage&version=1.0.0&service=WCS&format=GeoTIFF&coverage=tmx&time=1895-01-01T00:00:00Z&bbox=-90,40,-89,41
# ---
# 
# Example to find the equivalent information using OWSLib:
# 
from owslib.wcs import WebCoverageService
wcs=WebCoverageService('http://cida.usgs.gov/thredds/wcs/prism',version='1.0.0')
# Take a look at the contents (coverages) of the wcs.
print wcs.contents
tmax=wcs['tmx']
# Take a look at the attributes of the coverage
dir(tmax)
print tmax.boundingBoxWGS84
print tmax.timepositions
print tmax.supportedFormats
# mock up a simple GetCoverage request.
output=wcs.getCoverage(identifier='tmx',time=['1895-01-01T00:00:00Z'],bbox=(-90,40,-89,41),format='GeoTIFF')
# Write the file out to disk.
f=open('foo.tif','wb')
f.write(output.read())
f.close()
Пример #29
0
def test_wcs1_getcoverage_exceptions(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="1.0.0")
    contents = list(wcs.contents)
    test_layer_name = contents[0]
    test_layer = wcs.contents[test_layer_name]

    bbox = test_layer.boundingBoxWGS84

    try:
        # test where product name is not available
        wcs.getCoverage(identifier='nonexistentproduct',
                        format='GeoTIFF',
                        bbox=pytest.helpers.disjoint_bbox(bbox),
                        crs='EPSG:4326',
                        width=400,
                        height=300)
    except ServiceException as e:
        assert 'Invalid coverage:' in str(e)

    try:
        # test where  format is not supported
        wcs.getCoverage(
            identifier=contents[0],
            # format='GeoTIFF',
            bbox=pytest.helpers.disjoint_bbox(bbox),
            crs='EPSG:4326',
            width=400,
            height=300)
    except ServiceException as e:
        assert 'Unsupported format:' in str(e)

    try:
        # test where crs is not provided
        wcs.getCoverage(
            identifier=contents[0],
            format='GeoTIFF',
            bbox=pytest.helpers.disjoint_bbox(bbox),
            # crs='EPSG:4326',
            width=400,
            height=300)
    except ServiceException as e:
        assert 'No request CRS specified' in str(e)

    try:
        # test where crs is not supported
        wcs.getCoverage(identifier=contents[0],
                        format='GeoTIFF',
                        bbox=pytest.helpers.disjoint_bbox(bbox),
                        crs='EPSG:432676',
                        width=400,
                        height=300)
    except ServiceException as e:
        assert 'is not a supported CRS' in str(e)

    try:
        # test where bbox is not correctly provided
        wcs.getCoverage(
            identifier=contents[0],
            format='GeoTIFF',
            # bbox=(10,40,18,45),
            crs='EPSG:4326',
            width=400,
            height=300)
    except ServiceException as e:
        assert 'Invalid BBOX parameter' in str(e)
Пример #30
0
def test_wcs_200():
    """
    Web Coverage Service
    WCS Version 2.0.x

    rewritten doctest/wcs_200.txt
    """
    wcs = WebCoverageService(SERVICE_URL, version="2.0.1")
    assert wcs.version == '2.0.1'
    assert wcs.url == SERVICE_URL
    assert wcs.identification.title == 'Marine Science Data Service'
    assert wcs.identification.service == 'OGC WCS'
    assert wcs.provider.name == 'Plymouth Marine Laboratory'
    assert 'OCCCI_V3_1_chlor_a_monthly' in wcs.contents.keys()
    assert len(wcs.contents.keys()) >= 29
    cvg = wcs.contents['OCCCI_V3_1_chlor_a_monthly']
    assert cvg.boundingboxes[0]['bbox'] == (-89.9999973327159,
                                            -180.00000333371918,
                                            89.9999973327159,
                                            180.00000333371918)
    assert cvg.timelimits == [
        datetime.datetime(1997, 9, 4, 0, 0),
        datetime.datetime(2016, 12, 1, 0, 0)
    ]
    assert cvg.timepositions[0:5] == [
        datetime.datetime(1997, 9, 4, 0, 0),
        datetime.datetime(1997, 10, 1, 0, 0),
        datetime.datetime(1997, 11, 1, 0, 0),
        datetime.datetime(1997, 12, 1, 0, 0),
        datetime.datetime(1998, 1, 1, 0, 0)
    ]
    assert cvg.supportedFormats == [
        'application/gml+xml', 'image/jpeg', 'image/png', 'image/tiff',
        'image/bmp', 'image/jp2', 'application/netcdf', 'text/csv',
        'application/json', 'application/dem', 'application/x-ogc-dted',
        'application/x-ogc-ehdr', 'application/x-ogc-elas',
        'application/x-ogc-envi', 'application/x-ogc-ers',
        'application/x-ogc-fit', 'application/x-ogc-fits', 'image/gif',
        'application/x-netcdf-gmt', 'application/x-ogc-gs7bg',
        'application/x-ogc-gsag', 'application/x-ogc-gsbg',
        'application/x-ogc-gta', 'application/x-ogc-hf2',
        'application/x-erdas-hfa', 'application/x-ogc-ida',
        'application/x-ogc-ingr', 'application/x-ogc-isis2',
        'application/x-erdas-lan', 'application/x-ogc-mff2',
        'application/x-ogc-nitf', 'application/x-ogc-paux',
        'application/x-ogc-pcidsk', 'application/x-ogc-pcraster',
        'application/x-ogc-pdf', 'application/x-ogc-pnm', 'text/x-r',
        'application/x-ogc-rmf', 'image/x-sgi', 'application/x-ogc-vrt',
        'image/xpm', 'application/x-ogc-zmap'
    ]
    assert cvg.grid.axislabels == ['Lat', 'Long', 'ansi']
    assert cvg.grid.dimension == 3
    assert cvg.grid.lowlimits == ['0', '0', '0']
    assert cvg.grid.highlimits == ['4319', '8639', '231']
    covID = 'OCCCI_V3_1_chlor_a_monthly'
    time_subset = ("ansi", "2004-06-01T00:00:00Z")
    lat_subset = ('Lat', 40, 50)
    long_subset = ('Long', -10, 0)
    formatType = 'application/netcdf'
    output = wcs.getCoverage(identifier=[covID],
                             format=formatType,
                             subsets=[long_subset, lat_subset, time_subset])
    f = open(scratch_file('test_wcs_200.nc'), 'wb')
    bytes_written = f.write(output.read())
    f.close()
Пример #31
0
from owslib.wcs import WebCoverageService
wcs = WebCoverageService('http://cida.usgs.gov/thredds/wcs/prism',
                         version='1.0.0')
# Take a look at the contents (coverages) of the wcs.
print('contents: ', wcs.contents)
tmin = wcs['tmn']
# Take a look at the attributes of the coverage
dir(tmin)
print('bounding box: ', tmin.boundingBoxWGS84)
#print('time positions: ', tmin.timepositions)
print('supported formats: ', tmin.supportedFormats)
# mock up a simple GetCoverage request.
output = wcs.getCoverage(identifier='tmn',
                         time=['1895-01-01T00:00:00Z'],
                         bbox=(-125, 25, -67, 50),
                         format='GeoTIFF')
# Write the file out to disk.
f = open('data.tif', 'wb')
f.write(output.read())
f.close()
print('done')
Пример #32
0
def download_scenario(user, scenario_id):
    """
    Generate a working directory for running ANUGA
    Create/Copy all of the files to that directory that are necessary
    for running the simulation. Generate a json run file and call run_tsudat.
    Notes here: https://github.com/AIFDR/tsudat2/wiki/Create-anuga-run-script
    """
    create_dir()
    # Get the scenario object from the Database
    scenario = Scenario.objects.get(id=scenario_id)
    
    # the base of the TsuDAT user directory structures from settings.py 
    TsuDATBase = settings.TSUDAT_BASE_DIR
    TsuDATMux = settings.TSUDAT_MUX_DIR

    # change setup value to one of expected strings
    print('original scenario.model_setup=%s' % scenario.model_setup)
    trial_edit = {'t': 'trial', 'T': 'trial', 'trial': 'trial', 'TRIAL': 'trial',
                  'f': 'final', 'F': 'final', 'final': 'final', 'FINAL': 'final'}
    actual_setup = trial_edit.get(scenario.model_setup, 'trial')
    print('actual_setup=%s' % actual_setup)

    # fake a prject name                                  ##?
    if not scenario.project.name:                         ##?
        scenario.project.name = _slugify(scenario.name)   ##?
               
    # create the user working directory
    (work_dir, raw_elevations, boundaries, meshes, polygons, gauges,
     topographies, user_dir) = run_tsudat.make_tsudat_dir(TsuDATBase, user.username,
                                                          _slugify(scenario.project.name),
                                                          _slugify(scenario.name),
##?                                                          scenario.model_setup,
                                                          actual_setup,
                                                          scenario.event.tsudat_id)

    project_geom = scenario.project.geom
    project_extent = scenario.project.geom.extent
    centroid = project_geom.centroid

    # This somewhat naively assumes that the whole bounding polygon is in the same zone
    (UTMZone, UTMEasting, UTMNorthing) = LLtoUTM(23, centroid.coords[1], centroid.coords[0])
    if(len(UTMZone) == 3):
        utm_zone = int(UTMZone[0:2])
    else:
        utm_zone = int(UTMZone[0:1])
    if(centroid.coords[1] > 0):
        srid_base = 32600
    else:
        srid_base = 32700
    srid = srid_base + utm_zone
    scenario.project.srid = srid
    scenario.project.save()

    project_geom.transform(srid) 

    # Polygons
    print polygons
    bounding_polygon_file = open(os.path.join(polygons, 'bounding_polygon.csv'), 'w')
    for coord in project_geom.coords[0][:-1]:
        bounding_polygon_file.write('%f,%f\n' % (coord[0], coord[1]))
    bounding_polygon_file.close()
 
    # Internal Polygons 
    internal_polygons = InternalPolygon.objects.filter(project=scenario.project).order_by('value')
    count = 0
    InteriorRegions = []
    for ip in internal_polygons:
        ipfile = open(os.path.join(polygons, 'ip%s.csv' % count), 'w')
        geom = ip.geom
        geom.transform(srid)
        for coord in geom.coords[0][:-1]:
            ipfile.write('%f,%f\n' % (coord[0], coord[1]))
        if(ip.type == 1):
            type = "resolution"
        elif(ip.type == 2):
            type = "friction"
        elif(ip.type == 3):
            type = "aoi"
        InteriorRegions.append([type, ipfile.name, ip.value])
        ipfile.close()
        geom = ipfile = None
        count += 1

    # Raw Elevation Files
    RawElevationFiles = []
    elevation_files = []

    wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
    wcs = WebCoverageService(wcs_url, version='1.0.0')
    pds = ProjectDataSet.objects.filter(project=scenario.project).order_by('ranking')
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(srid)
    dst_wkt = srs.ExportToPrettyWkt()
    eResampleAlg = None
    create_options = None
    
    output_format = "AAIGrid"
    driver = gdal.GetDriverByName(output_format)
    
    for ds in pds:
        layer = Layer.objects.get(typename=ds.dataset.typename)
        elevation_files.append(layer.typename)
        logger.info(wcs.contents)
        metadata = wcs.contents[layer.typename]
        print metadata.grid
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        formats = metadata.supportedFormats
        print formats
        cvg = wcs.getCoverage(identifier=layer.typename, 
                format='GeoTIFF', 
                crs="EPSG:4326", 
                bbox=(project_extent[0], 
                    project_extent[1], 
                    project_extent[2], 
                    project_extent[3]), 
                resx=resx, 
                resy=resy)
        # Need to make sure the ranking numbers are unique for each project (enforced with DB constraint?)
        tif_file_name = '%s.tif' % ds.ranking
        tif_file_path = os.path.join(raw_elevations, tif_file_name)
        asc_file_name = '%s.asc' % ds.ranking
        asc_file_path = os.path.join(raw_elevations, asc_file_name)
        out = open(tif_file_path, 'wb')
        out.write(cvg.read())
        out.close()
       
        # Warp to UTM
        cmd = "/usr/bin/gdalwarp -srcnodata -9999 -dstnodata -9999 -t_srs EPSG:%d %s %s.tmp" % (srid, tif_file_path, tif_file_path)
        os.system(cmd)
        # Convert to AAIGrid
        cmd = "/usr/bin/gdal_translate -a_nodata -9999 -of %s %s.tmp %s" % (output_format, tif_file_path, asc_file_path)
        os.system(cmd)
        # Remove Intermediate files
        #os.remove(tif_file_path)
        #os.remove(tif_file_path + ".tmp")
      
        # Rename the .prj file to .prj.wkt
        shutil.move(asc_file_path.replace('.asc', '.prj'), asc_file_path.replace('.asc', '.prj.wkt'))
         
        # Generate a prj.adf style prj file
        # NOTE: Not sure if this will work in all cases?
        prj_file_name = '%s.prj' % ds.ranking
        prj_file = open(os.path.join(raw_elevations, prj_file_name), 'w')
        prj_file.write('Projection    UTM\n')
        prj_file.write('Zone          %d\n' % utm_zone)
        prj_file.write('Datum         WGS1984\n')
        prj_file.write('Zunits        NO\n')
        prj_file.write('Units         METERS\n')
        prj_file.write('Spheroid      WGS_1984\n')
        prj_file.write('Xshift        500000\n')
        prj_file.write('Yshift        10000000\n')
        prj_file.write('Parameters\n')
        prj_file.write('NODATA_value  -9999')
        prj_file.close()        

        RawElevationFiles.append(asc_file_path)
         
        '''
        src_ds = gdal.Open( str(tif_file_path), GA_ReadOnly )
        dst_ds_tmp = driver.CreateCopy( str(asc_file_name + '.tmp'), src_ds, 0)
        dst_ds = driver.Create( str(asc_file_path), dst_ds_tmp.RasterXSize, dst_ds_tmp.RasterYSize)
        gdal.ReprojectImage(src_ds, dst_ds, None, dst_wkt)
        dst_ds = None
        dst_ds_tmp = None
        src_ds = None
        '''

    # Landward Boundary
    
    # Iterate over the in the project geometry and add a l or s flag and call landward.landward with them
    points_list = []
    for coord in project_geom.coords[0][:-1]:
        pnt_wkt = 'SRID=%s;POINT(%f %f)' % (srid, coord[0], coord[1])
        land = Land.objects.filter(the_geom__intersects=pnt_wkt)
        if(land.count() > 0):
            points_list.append((coord[0], coord[1], "l")) 
        else:
            points_list.append((coord[0], coord[1], "s")) 
    print('points_list=%s' % str(points_list))
    landward_points = landward.landward(points_list)
    print('landward_points=%s' % str(landward_points))
    
    # Write out the landward points to a file
    landward_boundary_file = open(os.path.join(boundaries, 'landward_boundary.csv'), 'w')
    for pt in landward_points:
        landward_boundary_file.write('%f,%f\n' % (pt[0], pt[1]))
    landward_boundary_file.close()

    # Interior Hazard Points File
    interior_hazard_points_file = open(os.path.join(boundaries, 'interior_hazard_points.csv'), 'w')
    hps = HazardPoint.objects.filter(geom__intersects=project_geom).order_by('tsudat_id')
    for hp in hps:
        the_geom = hp.geom
        latitude=the_geom.coords[1]
        longitude=the_geom.coords[0]
        the_geom.transform(srid)
        interior_hazard_points_file.write('%d,%f,%f,%f,%f\n' % (hp.tsudat_id,longitude,latitude,the_geom.coords[0], the_geom.coords[1]))
    interior_hazard_points_file.close()
    
    # Gauges
    gauge_file = open(os.path.join(gauges, 'gauges.csv'), 'w')
    gauge_file.write('easting,northing,name,elevation\n')
    gauge_points = GaugePoint.objects.filter(project=scenario.project)
    for gauge in gauge_points:
        gauge_geom = gauge.geom
        gauge_geom.transform(srid)
        gauge_file.write('%f,%f,%s,%f\n' % (gauge_geom.coords[0], gauge_geom.coords[1], gauge.name, 0.0))
    gauge_file.close()
   
    # Layers 
    scenario_layers = scenario.output_layers.all()
    layers = []
    for layer in scenario_layers:
        layers.append(layer.name)

    # build the scenario json data file
    date_time = strftime("%Y%m%d%H%M%S", gmtime()) 
    json_file = os.path.join(work_dir, '%s.%s.json' % (_slugify(scenario.name), date_time))

    json_dict = {
                    'user': user.username,
                    'user_directory': user_dir,
                    'project': _slugify(scenario.project.name),
                    'project_id': scenario.project.id,
                    'scenario': _slugify(scenario.name),
                    'scenario_id': scenario.id,
##?                    'setup': scenario.model_setup,
                    'setup': actual_setup,
                    'event_number': scenario.event.tsudat_id,
                    'working_directory': TsuDATBase,
                    'mux_directory': TsuDATMux,
                    'initial_tide': scenario.initial_tidal_stage,
                    'start_time': scenario.start_time,
                    'end_time': scenario.end_time,
                    'smoothing': scenario.smoothing_param,
                    'bounding_polygon_file': bounding_polygon_file.name,
                    'raw_elevation_directory': raw_elevations,
                    'elevation_data_list': RawElevationFiles,
                    'mesh_friction': scenario.default_friction_value,
                    'raster_resolution': scenario.raster_resolution,
                    'export_area': "AOI" if scenario.use_aoi == True else "ALL",
                    'gauge_file': gauge_file.name,
                    'bounding_polygon_maxarea': scenario.project.max_area,
                    'interior_regions_list': InteriorRegions,
                    'interior_hazard_points_file': interior_hazard_points_file.name, 
                    'landward_boundary_file': landward_boundary_file.name,
                    'zone_number': utm_zone,
                    'layers_list': layers, 
                    'get_results_max': True,
                    'get_timeseries': True 
                }

    with open(json_file, 'w') as fd:
        json.dump(json_dict, fd, indent=2, separators=(',', ':'))

    scenario.tsudat_payload = json.dumps(json_dict) 
    scenario.save()
    
    # now run the simulation
    run_tsudat.run_tsudat(json_file)
    scenario.anuga_status = "QUEUE"
    scenario.save()
    return True
Пример #33
0
def to_tif(mask_poly,tif_file,layer='ahn3_05m_dtm',cell_size=0.5,src_nodata=None,overviews=None):
    '''
    Download an ahn-layer clipped by a shapely-polygon
    
    Parameters
    ----------    
    mask_poly: shapely polygon geometry
        Polygon used as clipping mask
    tif_file: str, file object or pathlib.Path object
        Location of the tif-file to be stored
    layer: str, optional
        NGR WCS layer to be downloaded. By default 'ahn3_05m_dtm'
    cell_size: int,float
        Cell_size in which the layer will be downloaded and stored
    src_nodata: int, float, optional
        Over-write the nodata value returned by the WCS. Usefull @ ahn2, as 
        nodata is not provided in gtiff profile
    overviews: list, optional
        Specify a list of raster-overviews in m. E.g.: overviews=[5,25] and 
        cell_size=0.5 will create 2 overviews with a cell size of 5m and 25m. 
        With the same overviews and cell_size=5 only an overview of 25m will 
        be included
    '''

    bounds = list(mask_poly.bounds)
    bounds[0], bounds[2] = [round(bounds[idx] / cell_size - cell_size) * cell_size for idx in [0,2]] # xmin, ymin rounddown 2 cellsize
    bounds[1], bounds[3] = [round(bounds[idx] / cell_size + cell_size) * cell_size for idx in [1,3]] # xmax, ymax rounddown 2 cellsize
    
    profile = {'driver': 'GTiff', 
               'dtype': dtype, 
               'nodata': -32768, 
               'width': int((bounds[2] - bounds[0]) / cell_size), 
               'height': int((bounds[3] - bounds[1]) / cell_size), 
               'count': 1, 
               'crs': 'epsg:28992',
               'BIGTIFF': "IF_SAFER",
               'transform': Affine(cell_size, 0.0, bounds[0], 0.0, -cell_size, bounds[3]), 
               'tiled': True, 
               'interleave': 'band', 
               'compress': 'deflate',
               'predictor': 2,
               'blockxsize': 256, 
               'blockysize': 256}
    
    url = 'https://geodata.nationaalgeoregister.nl/{}/wcs?'.format(layer[:layer.find('_')])
    wcs = WebCoverageService(url,version='1.0.0')
    
    cols = int(np.ceil((bounds[2]-bounds[0])/cell_size/max_size))
    rows = int(np.ceil((bounds[3]-bounds[1])/cell_size/max_size))
    
    window_width = int((bounds[2] - bounds[0])/cols / cell_size)
    window_height = int((bounds[3] - bounds[1])/rows / cell_size)

    with rasterio.open(tif_file,'w',**profile) as dst:
        dst.scales = [0.01]
        for row in range(rows):
            for col in range(cols):
                xmin = bounds[0] + (col * window_width * cell_size)
                ymax = bounds[3] - (row * window_height * cell_size)
                xmax = xmin + (window_width * cell_size)
                ymin = ymax - (window_height * cell_size)
                
                bound_poly = Polygon([(xmin,ymin),(xmin,ymax),(xmax,ymax),(xmax,ymin),(xmin,ymin)])
                if bound_poly.intersects(mask_poly):
                    print('NGR download: (row: {}/{}, col: {}/{})'.format(row+1,rows,col+1,cols))
                    
                    attempt = 1
                    succeed = False
                    
                    while not succeed or attempt == attempts:
                        try:
                            requestbbox=(xmin,ymin,xmax,ymax)
                            requestwidth = window_width
                            requestheight = window_height   
                            gc = wcs.getCoverage(identifier=layer,
                                                  bbox=requestbbox,
                                                  format='GEOTIFF_FLOAT32',width=requestwidth,
                                                  height=requestheight,
                                                  crs='EPSG:28992')
                            with MemoryFile(gc) as memfile:
                                 with memfile.open() as src:
                                     data = src.read(1)
                                     if src_nodata == None: src_nodata = src.profile['nodata']
                                     data = np.where(data == src_nodata, nodata, (data * 100).astype(rasterio.int16))
                                     if not bound_poly.within(mask_poly):
                                         geometry = bound_poly.intersection(mask_poly)
                                         mask = rasterio.features.rasterize(
                                             [(geometry, 1)],
                                             out_shape=data.shape,
                                             transform=src.profile['transform'],
                                             fill=0,
                                             all_touched=True,
                                             dtype=dtype)
                                         data = np.where(mask == 1, data, nodata)
                            succeed = True
                        except Exception as e:
                            print('FAILED ATTEMPT ({}/{}): {} RETRYING 5 SECS'.format(attempt, attempts, e))
                            attempt += 1
                            time.sleep(5)
                            pass
                                           
                    dst.write(data.astype(rasterio.int16), window=Window(col * window_width, row * window_height, 
                                                  window_width, 
                                                  window_height), indexes=1)
        if not overviews == None:
            print('creating overviews')
            factors = [int(size/cell_size) for size in [5,25] if size > cell_size]
            dst.build_overviews(factors, Resampling.average)
            dst.update_tags(ns='rio_overview', resampling='average')         
Пример #34
0
def getSoilsRasterDataForBoundingBox(config,
                                     outputDir,
                                     bbox,
                                     srs='EPSG:4326',
                                     resx=0.000277777777778,
                                     resy=0.000277777777778,
                                     interpolation='bilinear',
                                     fmt=FORMAT_GEOTIFF,
                                     overwrite=False,
                                     verbose=False,
                                     outfp=sys.stdout):
    """
        Download soil property rasters from http://www.clw.csiro.au/aclep/soilandlandscapegrid/
        For each property, rasters for the first 1-m of the soil profile will be downloaded
        from which the depth-weighted mean of the property will be calculated and stored in outpufDir
    
        @param config A Python ConfigParser (not currently used)
        @param outputDir String representing the absolute/relative path of the directory into which output raster should be written
        @param bbox Dict representing the lat/long coordinates and spatial reference of the bounding box area
            for which the raster is to be extracted.  The following keys must be specified: minX, minY, maxX, maxY, srs.
        @param srs String representing the spatial reference of the raster to be returned.
        @param resx Float representing the X resolution of the raster(s) to be returned
        @param resy Float representing the Y resolution of the raster(s) to be returned
        @param interpolation String representing resampling method to use. Must be one of spatialdatalib.utils.RASTER_RESAMPLE_METHOD.
        @param fmt String representing format of raster file.  Must be one of FORMATS.
        @param overwrite Boolean True if existing data should be overwritten
        @param verbose Boolean True if detailed output information should be printed to outfp
        @param outfp File-like object to which verbose output should be printed
    
        @return A dictionary mapping soil property names to soil property file path and WCS URL, i.e.
            dict[soilPropertyName] = (soilPropertyFilePath, WCS URL)
    
        @exception Exception if interpolation method is not known
        @exception Exception if fmt is not a known format
        @exception Exception if output already exists by overwrite is False
        @exception Exception if a gdal_calc.py command fails
    """
    if interpolation not in RASTER_RESAMPLE_METHOD:
        raise Exception(
            "Interpolation method {0} is not of a known method {1}".format(
                interpolation, RASTER_RESAMPLE_METHOD))
    if fmt not in FORMATS:
        raise Exception("Format {0} is not of a known format {1}".format(
            fmt, str(FORMATS)))
    if verbose:
        outfp.write("Acquiring soils data from {0}\n".format(DC_PUBLISHER))

    soilPropertyRasters = {}

    #import logging
    #logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
    #owslib_log = logging.getLogger('owslib')
    # Add formatting and handlers as needed
    #owslib_log.setLevel(logging.DEBUG)

    # Set-up gdal_calc.py command
    gdalBase = None
    try:
        gdalBase = config.get('GDAL/OGR', 'GDAL_BASE')
    except configparser.NoOptionError:
        gdalBase = os.path.dirname(config.get('GDAL/OGR', 'PATH_OF_GDAL_WARP'))

    gdalCmdPath = os.path.join(gdalBase, 'gdal_calc.py')
    if not os.access(gdalCmdPath, os.X_OK):
        raise IOError(
            errno.EACCES,
            "The gdal_calc.py binary at %s is not executable" % gdalCmdPath)
    gdalCmdPath = os.path.abspath(gdalCmdPath)

    tmpdir = tempfile.mkdtemp()
    #print(tmpdir)

    bbox = [bbox['minX'], bbox['minY'], bbox['maxX'], bbox['maxY']]

    # For each soil variable, download desired depth layers
    for v in list(VARIABLE.keys()):
        variable = VARIABLE[v]

        soilPropertyName = "soil_raster_pct{var}".format(var=v)
        soilPropertyFilename = "{name}.tif".format(name=soilPropertyName)
        soilPropertyFilepathTmp = os.path.join(tmpdir, soilPropertyFilename)
        soilPropertyFilepath = os.path.join(outputDir, soilPropertyFilename)

        if verbose:
            outfp.write("Getting attribute {0} ...\n".format(soilPropertyName))

        delete = False
        if os.path.exists(soilPropertyFilepath):
            if not overwrite:
                raise Exception(
                    "File {0} already exists, and overwrite is false".format(
                        soilPropertyFilepath))
            else:
                delete = True

        url = URL_BASE.format(variable=variable)

        wcs = WebCoverageService(url, version='1.0.0')
        (coverages, weights_abs) = _getCoverageIDsAndWeightsForCoverageTitle(
            wcs, variable)

        outfiles = []
        weights = []
        for c in list(coverages.keys()):
            coverage = coverages[c]
            weights.append(weights_abs[c])
            #coverage = c.format(variable=variable)
            wcsfp = wcs.getCoverage(
                identifier=coverage,
                bbox=bbox,
                crs='EPSG:4326',
                resx=resx,  # their WCS seems to accept resx, resy in meters
                resy=resy,
                format=fmt)
            filename = os.path.join(tmpdir,
                                    "{coverage}.tif".format(coverage=c))
            outfiles.append(filename)
            f = open(filename, 'wb')
            f.write(wcsfp.read())
            f.close()

        # Compute depth-length weighted-average for each coverage using gdal_calc.py
        assert (len(outfiles) == len(COVERAGES))
        gdalCommand = gdalCmdPath

        calcStr = '0'  # Identity element for addition
        for (i, outfile) in enumerate(outfiles):
            ord = i + 1
            var_label = ordinalToAlpha(ord)
            gdalCommand += " -{var} {outfile}".format(var=var_label,
                                                      outfile=outfile)
            calcStr += "+({weight}*{var})".format(weight=weights[i],
                                                  var=var_label)

        gdalCommand += " --calc='{calc}' --outfile={outfile} --type='Float32' --format=GTiff --co='COMPRESS=LZW'".format(
            calc=calcStr, outfile=soilPropertyFilepathTmp)
        #print("GDAL command:\n{0}".format(gdalCommand))
        process = Popen(gdalCommand,
                        cwd=outputDir,
                        shell=True,
                        stdout=PIPE,
                        stderr=PIPE)
        (process_stdout, process_stderr) = process.communicate()
        if process.returncode != 0:
            raise Exception(
                "GDAL command {0} failed, returning {1}\nstdout:\n{2}\nstderr:\n{3}\n."
                .format(gdalCommand, process.returncode, process_stdout,
                        process_stderr))
        if verbose:
            outfp.write(process_stdout)
            outfp.write(process_stderr)

        # Resample raster
        if delete:
            os.unlink(soilPropertyFilepath)
        resampleRaster(config,
                       outputDir,
                       soilPropertyFilepathTmp,
                       soilPropertyFilename,
                       'EPSG:4326',
                       srs,
                       resx,
                       resy,
                       resampleMethod=interpolation)

        soilPropertyRasters[soilPropertyName] = (soilPropertyFilepath, wcs.url)

    # Clean-up
    shutil.rmtree(tmpdir)

    return soilPropertyRasters
Пример #35
0
def getSoilsRasterDataForBoundingBox(config, outputDir, bbox, 
                                     srs='EPSG:4326',
                                     resx=0.000277777777778,
                                     resy=0.000277777777778,
                                     interpolation='bilinear',
                                     fmt=FORMAT_GEOTIFF, 
                                     overwrite=False,
                                     verbose=False,
                                     outfp=sys.stdout):
    """
        Download soil property rasters from http://www.clw.csiro.au/aclep/soilandlandscapegrid/
        For each property, rasters for the first 1-m of the soil profile will be downloaded
        from which the depth-weighted mean of the property will be calculated and stored in outpufDir
    
        @param config A Python ConfigParser (not currently used)
        @param outputDir String representing the absolute/relative path of the directory into which output raster should be written
        @param bbox Dict representing the lat/long coordinates and spatial reference of the bounding box area
            for which the raster is to be extracted.  The following keys must be specified: minX, minY, maxX, maxY, srs.
        @param srs String representing the spatial reference of the raster to be returned.
        @param resx Float representing the X resolution of the raster(s) to be returned
        @param resy Float representing the Y resolution of the raster(s) to be returned
        @param interpolation String representing resampling method to use. Must be one of spatialdatalib.utils.RASTER_RESAMPLE_METHOD.
        @param fmt String representing format of raster file.  Must be one of FORMATS.
        @param overwrite Boolean True if existing data should be overwritten
        @param verbose Boolean True if detailed output information should be printed to outfp
        @param outfp File-like object to which verbose output should be printed
    
        @return A dictionary mapping soil property names to soil property file path and WCS URL, i.e.
            dict[soilPropertyName] = (soilPropertyFilePath, WCS URL)
    
        @exception Exception if interpolation method is not known
        @exception Exception if fmt is not a known format
        @exception Exception if output already exists by overwrite is False
        @exception Exception if a gdal_calc.py command fails
    """
    if interpolation not in RASTER_RESAMPLE_METHOD:
        raise Exception("Interpolation method {0} is not of a known method {1}".format(interpolation,
                                                                                       RASTER_RESAMPLE_METHOD))
    if fmt not in FORMATS:
        raise Exception("Format {0} is not of a known format {1}".format(fmt, str(FORMATS)))
    if verbose:
        outfp.write("Acquiring soils data from {0}\n".format(DC_PUBLISHER))
    
    soilPropertyRasters = {}
    
    #import logging
    #logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
    #owslib_log = logging.getLogger('owslib')
    # Add formatting and handlers as needed
    #owslib_log.setLevel(logging.DEBUG)
    
    # Set-up gdal_calc.py command
    gdalBase = None
    try:
        gdalBase = config.get('GDAL/OGR', 'GDAL_BASE')
    except ConfigParser.NoOptionError:
        gdalBase = os.path.dirname(config.get('GDAL/OGR', 'PATH_OF_GDAL_WARP'))
    
    gdalCmdPath = os.path.join(gdalBase, 'gdal_calc.py')
    if not os.access(gdalCmdPath, os.X_OK):
        raise IOError(errno.EACCES, "The gdal_calc.py binary at %s is not executable" %
                      gdalCmdPath)
    gdalCmdPath = os.path.abspath(gdalCmdPath)
    
    tmpdir = tempfile.mkdtemp()
    #print(tmpdir)
    
    bbox = [bbox['minX'], bbox['minY'], bbox['maxX'], bbox['maxY']]
    
    # For each soil variable, download desired depth layers
    for v in VARIABLE.keys():
        variable = VARIABLE[v]
        
        soilPropertyName = "soil_raster_pct{var}".format(var=v)
        soilPropertyFilename = "{name}.tif".format(name=soilPropertyName)
        soilPropertyFilepathTmp = os.path.join(tmpdir, soilPropertyFilename)
        soilPropertyFilepath = os.path.join(outputDir, soilPropertyFilename)
        
        if verbose:
            outfp.write("Getting attribute {0} ...\n".format(soilPropertyName))
        
        delete = False
        if os.path.exists(soilPropertyFilepath):
            if not overwrite:
                raise Exception("File {0} already exists, and overwrite is false".format(soilPropertyFilepath))
            else:
                delete = True
        
        url = URL_BASE.format(variable=variable)

        wcs = WebCoverageService(url, version='1.0.0')
        (coverages, weights_abs) = _getCoverageIDsAndWeightsForCoverageTitle(wcs, variable)
        
        outfiles = []
        weights = []
        for c in coverages.keys():
            coverage = coverages[c]
            weights.append(weights_abs[c])
            #coverage = c.format(variable=variable)
            wcsfp = wcs.getCoverage(identifier=coverage, bbox=bbox,
                                    crs='EPSG:4326',
                                    resx=resx, # their WCS seems to accept resx, resy in meters
                                    resy=resy,
                                    format=fmt)
            filename = os.path.join(tmpdir, "{coverage}.tif".format(coverage=c))
            outfiles.append(filename)
            f = open(filename, 'wb')
            f.write(wcsfp.read())
            f.close()
        
        # Compute depth-length weighted-average for each coverage using gdal_calc.py
        assert(len(outfiles) == len(COVERAGES))
        gdalCommand = gdalCmdPath
        
        calcStr = '0' # Identity element for addition
        for (i, outfile) in enumerate(outfiles):
            ord = i + 1
            var_label = ordinalToAlpha(ord)
            gdalCommand += " -{var} {outfile}".format(var=var_label, outfile=outfile)
            calcStr += "+({weight}*{var})".format(weight=weights[i],
                                                  var=var_label)
            
        gdalCommand += " --calc='{calc}' --outfile={outfile} --type='Float32' --format=GTiff --co='COMPRESS=LZW'".format(calc=calcStr,
                                                                                                                         outfile=soilPropertyFilepathTmp)     
        #print("GDAL command:\n{0}".format(gdalCommand))
        process = Popen(gdalCommand, cwd=outputDir, shell=True,
                        stdout=PIPE, stderr=PIPE)
        (process_stdout, process_stderr) = process.communicate()
        if process.returncode != 0:
            raise Exception("GDAL command {0} failed, returning {1}\nstdout:\n{2}\nstderr:\n{3}\n.".format(gdalCommand, 
                                                                                                           process.returncode,
                                                                                                           process_stdout,
                                                                                                           process_stderr))
        if verbose:
            outfp.write(process_stdout)
            outfp.write(process_stderr)
    
        # Resample raster
        if delete:
            os.unlink(soilPropertyFilepath)
        resampleRaster(config, outputDir, soilPropertyFilepathTmp, soilPropertyFilename,
                       'EPSG:4326', srs, resx, resy, resampleMethod=interpolation)
    
        soilPropertyRasters[soilPropertyName] = (soilPropertyFilepath, wcs.url)
    
    # Clean-up
    shutil.rmtree(tmpdir)
    
    return soilPropertyRasters
        
        
Пример #36
0
#width, height = 500, 500
width, height = None, None
## resx, resy are resolution in CRS units, in this case m
#resx, resy = None, None
resx, resy = 200, 200
# Note: bbox (llx, lly, urx, ury)
bbox = coverage.boundingboxes[0]['bbox']
print(bbox)
crs = coverage.supportedCRS[0]
print(crs)
format = "GeoTIFF"
response = wcs.getCoverage(identifier=layer_name,
                           width=width,
                           height=height,
                           resx=resx,
                           resy=resy,
                           bbox=bbox,
                           format=format,
                           crs=crs)
import urllib

print(urllib.unquote(response.geturl()))

#driver = gdal.GetDriverByName("Gtiff")
#ds = driver.CreateCopy('', response.read())
ds = gdal.Open(response.geturl())
print(ds.RasterXSize, ds.RasterYSize, ds.RasterCount)

band = ds.GetRasterBand(1)
nodata = band.GetNoDataValue()
ar = band.ReadAsArray()
Пример #37
0
    def test_raster_wcs_reprojection(self):
        """UTM Raster can be reprojected by Geoserver and downloaded correctly
        """
        # FIXME (Ole): Still need to do this with assertions

        filename = 'tsunami_max_inundation_depth_BB_utm.asc'
        projected_tif_file = os.path.join(TESTDATA, filename)

        #projected_tif = file_upload(projected_tif_file, overwrite=True)
        projected_tif = save_to_geonode(projected_tif_file,
                                        user=self.user,
                                        overwrite=True)
        check_layer(projected_tif)

        wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        #logger.info(wcs.contents)
        metadata = wcs.contents[projected_tif.typename]
        #logger.info(metadata.grid)
        bboxWGS84 = metadata.boundingBoxWGS84
        #logger.info(bboxWGS84)
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        #logger.info("resx=%s resy=%s" % (str(resx), str(resy)))
        formats = metadata.supportedFormats
        #logger.info(formats)
        supportedCRS = metadata.supportedCRS
        #logger.info(supportedCRS)
        width = metadata.grid.highlimits[0]
        height = metadata.grid.highlimits[1]
        #logger.info("width=%s height=%s" % (width, height))
        gs_cat = Layer.objects.gs_catalog
        cvg_store = gs_cat.get_store(projected_tif.name)
        cvg_layer = gs_cat.get_resource(projected_tif.name, store=cvg_store)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)

        # FIXME: A patch was submitted OWSlib 20110808
        # Can delete the following once patch appears
        # In the future get bboxNative and nativeSRS from get_metadata
        descCov = metadata._service.getDescribeCoverage(projected_tif.typename)
        envelope = (descCov.find(ns('CoverageOffering/') + ns('domainSet/') +
                                 ns('spatialDomain/') +
                                 '{http://www.opengis.net/gml}Envelope'))
        nativeSrs = envelope.attrib['srsName']
        #logger.info(nativeSrs)
        gmlpositions = envelope.findall('{http://www.opengis.net/gml}pos')
        lc = gmlpositions[0].text
        uc = gmlpositions[1].text
        bboxNative = (float(lc.split()[0]), float(lc.split()[1]),
                      float(uc.split()[0]), float(uc.split()[1]))
        #logger.info(bboxNative)
        # ---- END PATCH

        # Make a temp dir to store the saved files
        tempdir = '/tmp/%s' % str(time.time())
        os.mkdir(tempdir)

        # Check that the layer can be downloaded in its native projection
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % (nativeSrs, t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # _exactly_ the same size and bbox of the original

        # Test that the layer can be downloaded in ARCGRID format
        cvg_layer.supported_formats = cvg_layer.supported_formats + ['ARCGRID']
        gs_cat.save(cvg_layer)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='ARCGRID',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("ARCGRID in %s = %s" % (nativeSrs, t.name))
        # Check that the downloaded file is a valid ARCGRID file and that it
        # the required projection information
        # (FIXME: There is no prj file here. GS bug)

        # Check that the layer can downloaded in WGS84
        cvg_layer.request_srs_list += ['EPSG:4326']
        cvg_layer.response_srs_list += ['EPSG:4326']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:4326',
                bbox=bboxWGS84,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % ("EPSG:4326", t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # the correct size and bbox based on the resx and resy or width
        # and height specified

        # Check that we can download the layer in another projection
        cvg_layer.request_srs_list += ['EPSG:32356']
        cvg_layer.response_srs_list += ['EPSG:32356']
        cvg_layer.request_srs_list += ['EPSG:900913']
        cvg_layer.response_srs_list += ['EPSG:900913']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        # How do we get the bboxes for the newly assigned
        # request/response SRS??

        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:32356',  # Should not be hardcoded for a test,
                                   # or should use 900913 (need bbox)
                bbox=bboxNative,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
Пример #38
0
from owslib.wcs import WebCoverageService
wcs = WebCoverageService(
    'https://oceanwatch.pfeg.noaa.gov/thredds/wcs/satellite/QA/wekm/14day',
    version='1.0.0')
# Take a look at the contents (coverages) of the wcs.
print('contents: ', wcs.contents)
qawekm = wcs['QAwekm']
# Take a look at the attributes of the coverage
dir(qawekm)
print('bounding box: ', qawekm.boundingBoxWGS84)
#print('time positions: ', qawekm.timepositions)
print('supported formats: ', qawekm.supportedFormats)
# mock up a simple GetCoverage request.
output = wcs.getCoverage(identifier='QAwekm',
                         time=['2019-07-02T03:11:59.000Z'],
                         bbox=(0, -74, 350, 60),
                         format='GeoTIFF')
# Write the file out to disk.
f = open('oceans.tif', 'wb')
f.write(output.read())
f.close()
print('done')
Пример #39
0
def getNLCDRasterDataForBoundingBox(config, outputDir, bbox, 
                                    coverage=DEFAULT_COVERAGE,
                                    filename='NLCD',
                                    srs='EPSG:4326',
                                    resx=0.000277777777778,
                                    resy=0.000277777777778,
                                    interpolation='near',
                                    fmt=FORMAT_GEOTIFF, 
                                    overwrite=False,
                                    verbose=False,
                                    outfp=sys.stdout):
    """
        Download NLCD rasters from 
        http://raster.nationalmap.gov/arcgis/rest/services/LandCover/USGS_EROS_LandCover_NLCD/MapServer
        
        @param config A Python ConfigParser (not currently used)
        @param outputDir String representing the absolute/relative path of the directory into which output raster should be written
        @param bbox Dict representing the lat/long coordinates and spatial reference of the bounding box area
            for which the raster is to be extracted.  The following keys must be specified: minX, minY, maxX, maxY, srs.
        @param srs String representing the spatial reference of the raster to be returned.
        @param resx Float representing the X resolution of the raster(s) to be returned
        @param resy Float representing the Y resolution of the raster(s) to be returned
        @param interpolation String representing resampling method to use. Must be one of spatialdatalib.utils.RASTER_RESAMPLE_METHOD.
        @param fmt String representing format of raster file.  Must be one of FORMATS.
        @param overwrite Boolean True if existing data should be overwritten
        @param verbose Boolean True if detailed output information should be printed to outfp
        @param outfp File-like object to which verbose output should be printed
    
        @return A dictionary mapping soil property names to soil property file path and WCS URL, i.e.
            dict[soilPropertyName] = (soilPropertyFilePath, WCS URL)
    
        @exception Exception if coverage is not known
        @exception Exception if interpolation method is not known
        @exception Exception if fmt is not a known format
        @exception Exception if output already exists by overwrite is False
    """
    if coverage not in COVERAGES:
        raise Exception("Coverage {0} is not known".format(coverage))
    if interpolation not in INTERPOLATION_METHODS:
        raise Exception("Interpolation method {0} is not of a known method {1}".format(interpolation,
                                                                                       INTERPOLATION_METHODS.keys()))
    if fmt not in FORMATS:
        raise Exception("Format {0} is not of a known format {1}".format(fmt, str(FORMATS)))
    if verbose:
        outfp.write("Acquiring NLCD coverage {lctype} from {pub}\n".format(lctype=coverage,
                                                                           pub=DC_PUBLISHER))
    
    outFilename = os.path.extsep.join([filename, FORMAT_EXT[fmt]])
    outFilepath = os.path.join(outputDir, outFilename)
        
    delete = False
    if os.path.exists(outFilepath):
        if not overwrite:
            raise Exception("File {0} already exists, and overwrite is false".format(outFilepath))
        else:
            delete = True
    
    try:
        if delete:
            os.unlink(outFilepath)
        
        wcs = WebCoverageService(URL_BASE, version='1.0.0')
        bbox = [bbox['minX'], bbox['minY'], bbox['maxX'], bbox['maxY']]
        wcsfp = wcs.getCoverage(identifier=COVERAGES[coverage], bbox=bbox,
                                crs=srs,
                                response_crs=srs,
                                resx=resx, # their WCS seems to accept resx, resy in meters
                                resy=resy,
                                format=fmt,
                                interpolation=INTERPOLATION_METHODS[interpolation],
                                **{'band': '1'})
        url = urllib.unquote(wcsfp.geturl())
        f = open(outFilepath, 'wb')
        f.write(wcsfp.read())
        f.close()
        
        return (True, url, outFilename)
    except Exception as e:
        traceback.print_exc(file=outfp)
        raise(e)
    finally:
        # Clean-up
        pass
Пример #40
0
def get_records(post_code_for_bounding_box):

    pc_info = utils.post_codes[int(post_code_for_bounding_box)]

    print "Getting images for: %s - %s, %s" % (post_code_for_bounding_box, pc_info['suburb'], pc_info['state'])

    lower_corner = '%s %s' % (float(pc_info['lon']) - 1.0, float(pc_info['lat']) - 0.5)
    lower_corner_sml = '%s %s' % (float(pc_info['lon']) - 0.2, float(pc_info['lat']) - 0.1)
    upper_corner = '%f %f' % (float(pc_info['lon']) + 1.0, float(pc_info['lat']) + 0.5)
    upper_corner_sml = '%f %f' % (float(pc_info['lon']) + 0.2, float(pc_info['lat']) + 0.1)

    body = """<?xml version="1.0" encoding="UTF-8"?>
<csw:GetRecords xmlns:gml="http://www.opengis.net/gml"
xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:csw="http://www.opengis.net/cat/csw/2.0.2"
outputSchema="http://www.opengis.net/cat/csw/2.0.2"
outputFormat="application/xml" version="2.0.2" service="CSW"
resultType="results" maxRecords="10" nextRecord="0"
xsi:schemaLocation="http://www.opengis.net/cat/csw/2.0.2
http://schemas.opengis.net/csw/2.0.2/CSW-discovery.xsd">
<csw:Query typeNames="csw:Record">
<csw:ElementSetName>full</csw:ElementSetName>
<csw:Constraint version="1.1.0">
<ogc:Filter>
<ogc:And>
<ogc:PropertyIsLike escape="\" singleChar="_" wildCard="%">
<ogc:PropertyName>Title</ogc:PropertyName>
<ogc:Literal>%Landsat%</ogc:Literal>
</ogc:PropertyIsLike>
<ogc:BBOX>
<ogc:PropertyName>ows:BoundingBox</ogc:PropertyName>
<gml:Envelope>
<gml:lowerCorner>""" + lower_corner_sml + """</gml:lowerCorner> 
<gml:upperCorner>""" + upper_corner_sml + """</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:And>
</ogc:Filter>
</csw:Constraint>
<ogc:SortBy>
<ogc:SortProperty>
<ogc:PropertyName>apiso:TempExtent_begin</ogc:PropertyName>
<ogc:SortOrder>ASC</ogc:SortOrder>
</ogc:SortProperty>
</ogc:SortBy>
</csw:Query>
</csw:GetRecords>"""

    headers = {
        'Accept-Encoding': 'gzip,deflate',
        'Content-Type': 'text/xml;charset=UTF-8',
        'Content-Length': len(body),
        'Host': HOST_NAME,
        'Connection': 'Keep-Alive',
        'User-Agent': 'GovHack - Team A Kicking Wheel'
        }

    #request_path = '%s?%s&format=json' % (HOST_PATH, query_params)
    request_path = '%s?request=GetRecords' % (HOST_PATH)

    conn = httplib.HTTPConnection(HOST_NAME, HOST_PORT)
    conn.request('POST', request_path, body, headers)
    response = conn.getresponse()

    print 'GetRecords: %d %s' % (response.status, response.reason)

    if response.status == 200:
        data = response.read()
        conn.close()
        
        result_tree = ET.ElementTree(ET.fromstring(data))

        namespaces = {
            'csw': 'http://www.opengis.net/cat/csw/2.0.2',
            'dc': 'http://purl.org/dc/elements/1.1/',
            'ows': 'http://www.opengis.net/ows',
            }
        results = result_tree.getroot().findall(
            'csw:SearchResults/csw:Record', namespaces=namespaces
            )

        for result in results:
            get_capabilities_response = result.find(
                "dc:URI[@protocol='OGC:WCS']",
                namespaces=namespaces
                )
            wcs_url = get_capabilities_response.text
            print 'Record URL: %s' % wcs_url

            service = WebCoverageService(wcs_url, version='1.0.0')
            
            for content in service.contents:
                bounding_box_strs = lower_corner_sml.split(' ') + upper_corner_sml.split(' ')
                bounding_box = tuple([float(i) for i in bounding_box_strs])

                print 'Bounding Box: ' + str(bounding_box)

                img = service.getCoverage(identifier=content, bbox=bounding_box, format='GeoTIFF')

                identifier = result.find('dc:identifier', namespaces=namespaces).text
                directory = 'landsat_images/%s' % pc_info['suburb']

                if not os.path.exists(directory):
                    os.makedirs(directory)

                file_name = '%s/%s_%s.png' % (directory, identifier, content)

                print 'Writing file: %s' % file_name

                out = open(file_name, 'wb')
                out.write(img.read())
                out.close()
Пример #41
0
wcs = WebCoverageService(endpoint,version='1.0.0',timeout=60)

# <codecell>

wcs.contents

# <codecell>

for k,v in wcs.contents.iteritems():
    print v.title

# <codecell>

# try Boston Harbor
bbox = (-71.05592748611777, 42.256890708126605, -70.81446033774644, 42.43833963977496)
output = wcs.getCoverage(identifier="topo",bbox=bbox,format='GeoTIFF',
                         resx=0.0003, resy=0.0003)

# <codecell>

f=open('test.tif','wb')
f.write(output.read())
f.close()

# <codecell>

from osgeo import gdal
gdal.UseExceptions()
ds = gdal.Open('test.tif')

# <codecell>
Пример #42
0
cvg = wcs['1']
print cvg.title
print cvg.boundingBoxWGS84

# <codecell>

print cvg.supportedFormats

# <codecell>

print cvg.supportedCRS

# <codecell>

output = wcs.getCoverage(identifier=['1'],bbox=(-75.4,37.8,-75.2,38.0),crs='EPSG:4326',format='GeoTIFF')

# <codecell>

f=open('test.tif','wb')
f.write(output.read())
f.close()

# <codecell>

!more test.tif

# <codecell>


Пример #43
0
def get_ahn_within_extent(extent=None,
                          url='ahn3',
                          identifier='ahn3_5m_dtm',
                          res=5.,
                          version='1.0.0',
                          format='GEOTIFF_FLOAT32',
                          crs='EPSG:28992',
                          cache=True,
                          cache_dir=None,
                          return_fname=False,
                          maxsize=4000,
                          verbose=True,
                          fname=None):
    """

    Parameters
    ----------
    extent : list, tuple or np.array, optional
        extent. The default is None.
    url : str, optional
        possible values 'ahn3' and 'ahn2'. The default is 'ahn3'.
    identifier : str, optional
        Possible values for identifier are:
            'ahn2_05m_int'
            'ahn2_05m_non'
            'ahn2_05m_ruw'
            'ahn2_5m'
            'ahn3_05m_dsm'
            'ahn3_05m_dtm'
            'ahn3_5m_dsm'
            'ahn3_5m_dtm'

        The default is 'ahn3_5m_dtm'.
    res : float, optional
        resolution of requested ahn raster. The default is 5..
    version : str, optional
        version of wcs service, options are '1.0.0' and '2.0.1'.
        The default is '1.0.0'.
    format : str, optional
        geotif format . The default is 'GEOTIFF_FLOAT32'.
    crs : str, optional
        coördinate reference system. The default is 'EPSG:28992'.
    cache : boolean, optional
        used cached data if available. The default is True.
    return_fname : boolean, optional
        return path instead of gdal dataset. The default is False.
    maxsize : float, optional
        max width and height of the result of the wcs service. The default is
        4000.
    verbose : boolean, optional
        additional information is printed to the terminal. The default is True.

    Returns
    -------
    osgeo.gdal.dataset or str
        gdal dataset or filename if return_fname is True

    """

    if extent is None:
        extent = [253000, 265000, 481000, 488000]
    if url == 'ahn3':
        url = ('https://geodata.nationaalgeoregister.nl/ahn3/wcs?'
               'request=GetCapabilities&service=WCS')
    elif url == 'ahn2':
        url = ('https://geodata.nationaalgeoregister.nl/ahn2/wcs?'
               'request=GetCapabilities&service=WCS')

    # check if ahn is within limits
    dx = extent[1] - extent[0]
    dy = extent[3] - extent[2]

    if dx > maxsize:
        x_segments = int(np.ceil((dx / res) / maxsize))
    else:
        x_segments = 1

    if dy > maxsize:
        y_segments = int(np.ceil((dy / res) / maxsize))
    else:
        y_segments = 1

    if (x_segments * y_segments) > 1:
        if verbose:
            st = f'''requested ahn raster width or height bigger than {maxsize}
            -> splitting extent into {x_segments * y_segments} tiles'''
            print(st)
        return split_ahn_extent(extent,
                                res,
                                x_segments,
                                y_segments,
                                maxsize,
                                url=url,
                                identifier=identifier,
                                version=version,
                                format=format,
                                crs=crs,
                                cache=cache,
                                cache_dir=cache_dir,
                                return_fname=return_fname,
                                fname=fname)
    if fname is None:
        fname = 'ahn_{:.0f}_{:.0f}_{:.0f}_{:.0f}_{:.0f}.tiff'
        fname = fname.format(*extent, res)
        if cache_dir is None:
            cache_dir = os.path.join(tempfile.gettempdir(), 'ahn', identifier)
        if not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)
        fname = os.path.join(cache_dir, fname)
    else:
        cache = False
    if not cache or not os.path.exists(fname):
        # url='https://geodata.nationaalgeoregister.nl/ahn3/wcs?request=GetCapabilities'
        # identifier='ahn3:ahn3_5m_dsm'

        wcs = WebCoverageService(url, version=version)
        # wcs.contents
        # cvg = wcs.contents[identifier]
        if version == '1.0.0':
            bbox = (extent[0], extent[2], extent[1], extent[3])
            output = wcs.getCoverage(identifier=identifier,
                                     bbox=bbox,
                                     format=format,
                                     crs=crs,
                                     resx=res,
                                     resy=res)
        elif version == '2.0.1':
            # bbox, resx and resy do nothing in version 2.0.1
            subsets = [('x', extent[0], extent[1]),
                       ('y', extent[2], extent[3])]
            output = wcs.getCoverage(identifier=[identifier],
                                     subsets=subsets,
                                     format=format,
                                     crs=crs)
        else:
            raise (Exception('Version {} not yet supported'.format(version)))
        f = open(fname, 'wb')
        f.write(output.read())
        f.close()
        if verbose:
            print(f"- downloaded {fname}")
    else:
        if verbose:
            print(f"- from cache {fname}")

    if return_fname:
        return fname
    else:
        # load ahn
        ds = load_ahn_tif(fname)

        return ds
Пример #44
0
    def __init__(self, mask, ESPG="", s="", i=1, j=1):
        self.mask = mask
        self.varname = s
        self.layer = i
        self.outlayer = j
        self.debug = Debug_
        self.ESPG = ESPG

        if self.layer == 1: ID = '_0-5cm_mean'
        if self.layer == 2: ID = '_5-15cm_mean'
        if self.layer == 3: ID = '_15-30cm_mean'
        if self.layer == 4: ID = '_30-60cm_mean'
        if self.layer == 5: ID = '_60-100cm_mean'
        if self.layer == 6: ID = '_100-200cm_mean'

        #if self.debug == 1:
        print("===> Processing layer " + str(self.outlayer) + ": " +
              self.varname + ID,
              flush=True)

        # raster=gdal.Open(self.mask)
        ESPG = 'urn:ogc:def:crs:EPSG::{0}'.format(self.ESPG)

        if self.debug == 1:
            print("Mask ESPG and bounding box:" + ESPG,
                  llx,
                  lly,
                  urx,
                  ury,
                  dx,
                  dy,
                  flush=True)

        if self.debug == 1:
            print("Open SOILGRIDS WCS", flush=True)

        url = "http://maps.isric.org/mapserv?map=/map/{}.map".format(
            self.varname)
        wcs = WebCoverageService(url, version='1.0.0')
        # show some info:
        # cov_list = list(wcs.contents)
        # mean_covs = [k for k in wcs.contents.keys() if k.find("mean") != -1]
        # print(mean_covs)

        variable = self.varname + ID
        varout = self.varname + str(self.outlayer)
        outputnametif = "{0}.tif".format(varout)
        outputnamemap = "{0}.map".format(varout)
        #outputnametmp = '_temp_.tif'

        if self.debug == 1:
            print("Downloading " + variable, flush=True)

        # get data as temp geotif and save to disk
        response = wcs.getCoverage(identifier=variable,
                                   crs=ESPG,
                                   bbox=maskbox,
                                   resx=dx,
                                   resy=dx,
                                   format='GEOTIFF_INT16')
        with open(outputnametif, 'wb') as file:
            file.write(response.read())

        # warp to some interpolation
        src = gdal.Open(outputnametif, gdalconst.GA_ReadOnly)
        src_proj = src.GetProjection()
        src_geotrans = src.GetGeoTransform()
        dst = gdal.GetDriverByName('PCRaster').Create(
            outputnamemap, nrCols, nrRows, 1, gdalconst.GDT_Float32,
            ["PCRASTER_VALUESCALE=VS_SCALAR"])
        dst.SetGeoTransform(src_geotrans)
        dst.SetProjection(src_proj)
        gdal.ReprojectImage(src, dst, src_proj, src_proj,
                            gdalconst.GRA_Bilinear)
        #gdalconst.GRA_Cubic)

        # brute force convert tif to map by calling pcrcalc !!!!
        # CMD = "pcrcalc.exe"
        # arg = outputnamemap+"="+outputnametif
        # arg = '{0}{1}.map={0}{1}.tif'.format(self.varname,str(self.outlayer))
        # subprocess.run([CMD,arg])

        dst = None
        src = None
Пример #45
0
    def fetch(self,name):
        #print " * Fetching layer '%s'"%(name)
        if name in self._layers:
            target_file=os.path.join(self._cache,"%s"%(name))
            url=urlparse(self._layers[name])
            qs=parse_qs(url.query)
            query_params={}
            for p in list(qs):
                query_params.update({p:qs[p][0]})

            print "Creating a coverage url:"
            print url.geturl()
            wcs=WebCoverageService(url.geturl(), version='1.0.0')
            meta=wcs.contents[name]

            mapformat=meta.supportedFormats[0]
            
            supported_crses=[crs.code for crs in meta.supportedCRS]            
            
            try:
                crs=meta.supportedCRS[supported_crses.index(self._grid['srid'])]
            except ValueError:
                raise Exception("WCS provider could not retrieve layer '%s' because it is not available in the UTM grid that the model needs to run at. We need a raster in epsg %d, available are only epsg %s."%(name,self._grid['srid']," ".join(map(str,supported_crses))))

            resx=self._grid['cellsize']
            resy=self._grid['cellsize']
            bbox=self._grid['bbox']
            cov=wcs.getCoverage(identifier=name,crs=crs,bbox=bbox,format=mapformat,resx=resx,resy=resy,**query_params)
            
            #Dict used for converting geotiffs to pcraster format. Any data
            #types not set explicitly will become Float32/VS_SCALARS
            pcraster_valuescale = defaultdict(lambda: ('Float32','VS_SCALAR'))
            pcraster_valuescale.update({
                'Float32':  ('Float32','VS_SCALAR'),
                'Int32':    ('Int32','VS_NOMINAL'),
                'Int16':    ('Int32','VS_NOMINAL'), 
                'Byte':     ('Int32','VS_NOMINAL')
            })
            print " * Downloading %s"%(cov.url)
            with open(target_file+".tif",'w') as f:
                f.write(cov.read())
            print " * Converting to PCRaster format..."


            dataset=gdal.Open(target_file+".tif",GA_ReadOnly)
            print 'Driver: ', dataset.GetDriver().ShortName,'/', dataset.GetDriver().LongName
            print 'Size is ',dataset.RasterXSize,'x',dataset.RasterYSize, 'x',dataset.RasterCount
            print 'Projection is ',dataset.GetProjection()         
            print 'Geotransform is ',dataset.GetGeoTransform()       
            band = dataset.GetRasterBand(1)
            gdal_type=gdal.GetDataTypeName(band.DataType)
            print 'Band Type=',gdal_type
            pcraster_type=pcraster_valuescale.get(gdal_type)
            print "Pcraster type=",pcraster_type
            
            try:
                #-of PCRaster -co "PCRASTER_VALUESCALE=VS_SCALAR" -ot Float32 -a_nodata 500.1
                c=[
                    '/usr/bin/gdal_translate','-q',
                    '-ot',pcraster_type[0],
                    '-of','PCRaster',
                    '-co','PCRASTER_VALUESCALE=%s'%(pcraster_type[1]),
                    target_file+".tif",target_file+".map"
                ]
                print "Conversion command: "
                print " ".join(c)
                rc=subprocess.call(c)
            except Exception as e:
                print " * Conversion to pcraster format failed!!! Hint: %s"%(e)
            print " * Completed!"
        else:
            print " * Layer not found"
            return None
Пример #46
0
# <codecell>

lidar = wcs['1']
print lidar.title
print lidar.boundingBoxWGS84
print lidar.timelimits
print lidar.supportedFormats

# <codecell>

# try Plum Island Sound Region
bbox = (-70.825, 42.701, -70.7526, 42.762)
output = wcs.getCoverage(identifier="1",
                         bbox=bbox,
                         crs='EPSG:4326',
                         format='GeoTIFF',
                         resx=0.0001,
                         resy=0.0001)

# <codecell>

f = open('test.tif', 'wb')
f.write(output.read())
f.close()

# <codecell>

from osgeo import gdal
gdal.UseExceptions()

# <codecell>
Пример #47
0
for k,v in wcs.contents.iteritems():
    print v.title

# <codecell>

lidar = wcs['3']
print lidar.title
print lidar.boundingBoxWGS84
print lidar.timelimits
print lidar.supportedFormats

# <codecell>

# try Boston Harbor
bbox = (-71.05592748611777, 42.256890708126605, -70.81446033774644, 42.43833963977496)
output = wcs.getCoverage(identifier="3",bbox=bbox,crs='EPSG:4326',format='GeoTIFF',
                         resx=0.0003, resy=0.0003)

# <codecell>

f=open('test.tif','wb')
f.write(output.read())
f.close()

# <codecell>

from osgeo import gdal
gdal.UseExceptions()

# <codecell>

ds = gdal.Open('test.tif')
Пример #48
0
    def provide(self, name, options={}):
        """
        The providers' provide() method returns a numpy with the
        correct data type and propotions given the layer name and
        possibly some extra options. The readmap() in the model 
        will convert the numpy array to a pcraster map when it is 
        requested.
        """
        logging.debug("WCS: provide request for layer '%s'" % (name))
        target_file = os.path.join(self._cache, "%s" % (name))
        crs = None
        srid = None
        dataset = None
        logging.debug("WCS: geometry: %s" % (self._geom.wkt))
        url = urlparse(self._layers[name])
        qs = parse_qs(url.query)
        query_params = {}
        for p in list(qs):
            query_params.update({p: qs[p][0]})

        wcs = WebCoverageService(url.geturl(), version="1.0.0")
        meta = wcs.contents[name]
        mapformat = meta.supportedFormats[0]
        supported_crses = [crs_.code for crs_ in meta.supportedCRS]
        logging.debug("WCS: Service supports the following crs: %s" % (", ".join(map(str, supported_crses))))

        if self._grid["srid"] in supported_crses:
            crs = meta.supportedCRS[supported_crses.index(self._grid["srid"])]
            srid = self._grid["srid"]
        elif 4326 in supported_crses:
            crs = meta.supportedCRS[supported_crses.index(4326)]
            srid = 4326
        else:
            crs = meta.supportedCRS[supported_crses.index(supported_crses[0])]
            srid = supported_crses[0]

        if crs is not None and srid is not None:
            logging.debug("WCS: using %s (epsg:%d) to fetch the file from the wcs server" % (crs, srid))
        else:
            logging.debug("WCS: could not agree upon a projection format to fetch data with")
            raise Exception("WCS: no valid projections found")

        logging.debug("WCS: reprojecting the chunk mask to the required projection")

        project = partial(
            pyproj.transform, pyproj.Proj(init="epsg:%d" % (self._grid["srid"])), pyproj.Proj(init="epsg:%d" % (srid))
        )

        # Add a small buffer to the request so we fetch an area slightly larger
        # than what we really need. This will prevent some edge effects due
        # to the reprojection.
        projected_geom = transform(project, self._geom.buffer(200))

        logging.debug("WCS: original geom: %s" % (self._geom.wkt))
        logging.debug("WCS: reprojected geom: %s" % (projected_geom.wkt))

        try:
            logging.debug("WCS: fetching wcs data in %s" % (crs))
            logging.debug("WCS: saving to: %s" % (target_file + ".tif"))
            cov = wcs.getCoverage(
                identifier=name,
                crs=crs,
                bbox=projected_geom.bounds,
                format=mapformat,
                width=self._grid["cols"],
                height=self._grid["rows"],
                **query_params
            )
            with open(target_file + ".tif", "w") as f:
                f.write(cov.read())
            dataset = gdal.Open(target_file + ".tif", gdalconst.GA_ReadOnly)
        except Exception as e:
            logger.error("WCS: failure: %s" % (e))

        utm_data = self.warp_to_grid(dataset)
        dataset = None
        return utm_data