示例#1
0
class AhnWebCoverageDatasource(object):
    def __init__(self):
        try:
            self.wcs_ahn3 = WebCoverageService(
                'https://geodata.nationaalgeoregister.nl/ahn3/wcs',
                version='1.0.0')
            self.wcs_ahn2 = WebCoverageService(
                'https://geodata.nationaalgeoregister.nl/ahn2/wcs',
                version='1.0.0')
        except:
            print("AHN WCS host unavailable")

    def retrieve_tile_ahn3(self, srid, x_min, y_min, x_max, y_max):
        output_ahn3 = self.wcs_ahn3.getCoverage(
            identifier='ahn3_05m_dtm',
            bbox=[x_min, y_min, x_max, y_max],
            format='GEOTIFF_FLOAT32',
            crs=srid,
            resx=0.5,
            resy=0.5)

        return output_ahn3

    def retrieve_tile_ahn2(self, srid, x_min, y_min, x_max, y_max):
        output_ahn2 = self.wcs_ahn2.getCoverage(
            identifier='ahn2_05m_int',
            bbox=[x_min, y_min, x_max, y_max],
            format='GEOTIFF_FLOAT32',
            crs=srid,
            resx=0.5,
            resy=0.5)

        return output_ahn2
示例#2
0
class WCSHelper(object):
	"""docstring for WCSHelper"""
	def __init__(self, url, dates, variable, bbox, single=False):
		super(WCSHelper, self).__init__()
		self.url = url
		self.single = single
		self.dates = dates
		self.variable = variable
		self.bbox = bbox
		self.owslib_log = logging.getLogger('owslib')
		self.owslib_log.setLevel(logging.DEBUG)
		self.wcs = WebCoverageService(url, version="1.0.0")

	def __repr__(self):
		return str(self.wcs)

	def getData(self):
		#print '-'*20
		#print self.bbox
		#print self.dates
		if self.single :
			output = self.wcs.getCoverage(identifier=self.variable, time=[self.dates], bbox=self.bbox, format="NetCDF3")
		else:	
			output = self.wcs.getCoverage(identifier=self.variable, time=self.dates, bbox=self.bbox, format="NetCDF3")
		return output
示例#3
0
    def wcsRequest(self, layer='AGRICULTURE'):
        
        
        self.layer = layer
        ID = 'your ID'
        wcs_url = 'https://services.sentinel-hub.com/ogc/wcs/'+ID
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        
        self.x, self.y = self.deg2num(self.lat_center, self.lon_center, self.zoom)
        self.lat_max, self.lon_min = self.num2deg(self.x, self.y, self.zoom)
        self.lat_min, self.lon_max = self.num2deg(self.x+1, self.y+1, self.zoom)
        

        inProj = Proj(init='epsg:4326')
        outProj = Proj(init='epsg:3857')
        x1,y1 = transform(inProj,outProj,self.lon_min,self.lat_min)
        x2,y2 = transform(inProj,outProj,self.lon_max,self.lat_max)
        
        bb=(x1, y1, x2, y2)
        
        self.wcsOut = wcs.getCoverage(identifier=self.layer,
                                      time=None,
                                      width=800,
                                      height=800,
                                      bbox = bb,
                                      format = 'GeoTIFF')
        
        self.imgTiff = Image.open(self.wcsOut)
        self.imgArr = np.array(self.imgTiff)
        
        imgurl = image_to_url(image=self.imgArr)
        self.map.add_layer(ImageOverlay(url=imgurl,
                                        bounds=[[self.lat_min, self.lon_min],
                                                [self.lat_max, self.lon_max]]))
def test_ows_interfaces_wcs():
    service = WebCoverageService(WCS_SERVICE_URL, version='1.0.0')
    # Check each service instance conforms to OWSLib interface
    service.alias = 'WCS'
    isinstance(service, owslib.coverage.wcs100.WebCoverageService_1_0_0)
    # URL attribute
    assert service.url == WCS_SERVICE_URL
    # version attribute
    assert service.version == '1.0.0'
    # Identification object
    assert hasattr(service, 'identification')
    # Check all ServiceIdentification attributes
    assert service.identification.type == 'OGC:WCS'
    for attribute in ['type', 'version', 'title', 'abstract', 'keywords', 'fees']:
        assert hasattr(service.identification, attribute)
    # Check all ServiceProvider attributes
    for attribute in ['name', 'url', 'contact']:
        assert hasattr(service.provider, attribute)
    # Check all operations implement IOperationMetadata
    for op in service.operations:
        for attribute in ['name', 'methods']:
            assert hasattr(op, attribute)
    # Check all contents implement IContentMetadata as a dictionary
    isinstance(service.contents, dict)
    # Check any item (WCS coverage, WMS layer etc) from the contents of each service
    # Check it conforms to IContentMetadata interface
    # get random item from contents dictionary -has to be a nicer way to do this!
    content = service.contents[list(service.contents.keys())[0]]
    for attribute in ['id', 'title', 'boundingBox', 'boundingBoxWGS84', 'crsOptions', 'styles', 'timepositions']:
        assert hasattr(content, attribute)
示例#5
0
def test_ows_interfaces_wcs():
    service = WebCoverageService(WCS_SERVICE_URL, version='1.0.0')
    # Check each service instance conforms to OWSLib interface
    service.alias = 'WCS'
    isinstance(service, owslib.coverage.wcs100.WebCoverageService_1_0_0)
    # URL attribute
    assert service.url == WCS_SERVICE_URL
    # version attribute
    assert service.version == '1.0.0'
    # Identification object
    assert hasattr(service, 'identification')
    # Check all ServiceIdentification attributes
    assert service.identification.type == 'OGC:WCS'
    for attribute in [
            'type', 'version', 'title', 'abstract', 'keywords', 'fees'
    ]:
        assert hasattr(service.identification, attribute)
    # Check all ServiceProvider attributes
    for attribute in ['name', 'url', 'contact']:
        assert hasattr(service.provider, attribute)
    # Check all operations implement IOperationMetadata
    for op in service.operations:
        for attribute in ['name', 'methods']:
            assert hasattr(op, attribute)
    # Check all contents implement IContentMetadata as a dictionary
    isinstance(service.contents, dict)
    # Check any item (WCS coverage, WMS layer etc) from the contents of each service
    # Check it conforms to IContentMetadata interface
    # get random item from contents dictionary -has to be a nicer way to do this!
    content = service.contents[list(service.contents.keys())[0]]
    for attribute in [
            'id', 'title', 'boundingBox', 'boundingBoxWGS84', 'crsOptions',
            'styles', 'timepositions'
    ]:
        assert hasattr(content, attribute)
示例#6
0
    def test_metadata_available_after_upload(self):
        """Test metadata is available after upload
        """
        # Upload hazard data for this test
        name = 'jakarta_flood_design.tif'
        exposure_filename = os.path.join(UNITDATA, 'hazard', name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user,
                                         overwrite=True)
        layer_name = exposure_layer.typename
        server_url = settings.GEOSERVER_BASE_URL + '/ows'
        wcs = WebCoverageService(server_url, version='1.0.0')
        layer_appears_immediately = layer_name in wcs.contents

        wait_time = 0.5
        time.sleep(wait_time)

        wcs2 = WebCoverageService(server_url, version='1.0.0')
        layer_appears_afterwards = layer_name in wcs2.contents

        msg = ('Layer %s was not found after %s seconds in WxS contents '
               'on server %s.\n'
               'WCS contents: %s\n' %
               (layer_name, wait_time, server_url, wcs.contents))

        assert layer_appears_afterwards, msg

        msg = ('Layer %s was not found in WxS contents on server %s.\n'
               'WCS contents: %s\n' % (layer_name, server_url, wcs.contents))

        assert layer_appears_immediately, msg
示例#7
0
def get_products(products, dt, bbox=None, fcast='000', outpath='.'):
    ''' Get a list of products for a given date and time 
        Example:
        >> get_products(['Temperature'], datetime(2010,10,10,12,0))
    ''' 
    saved_files = []
    url_params = (dt.strftime('%Y%m'), dt.strftime('%Y%m%d'), dt.strftime('%Y%m%d'), dt.strftime('%H%M'), fcast)
    server_url = 'http://nomads.ncdc.noaa.gov/thredds/wcs/ruc13/%s/%s/ruc2_130_%s_%s_%s.grb2' % url_params
    try:
        wcs = WebCoverageService(server_url,version='1.0.0')
    except:
        print >> sys.stderr, "Bad URL!"
    else:
        if set(products).issubset(wcs.contents.keys()):
            print >> sys.stderr, 'Requested products are a subset of available products'
        else:
            print >> sys.stderr, 'Some product(s) selected do not exist'
        if bbox is None:
            bbox = (-139, -57.995, 16, 55) # CONUS
        date_time = dt.isoformat() + 'Z'
        for product in products:
            filename = os.path.join(outpath, model + '_' + date_time.replace(':','') + '_' + product + '.tif')
            if os.path.exists(filename): # Check if file exists so we don't download duplicates
               pass
            else: 
                try:
                    output = wcs.getCoverage(identifier = product, time = [date_time], bbox= bbox, format='GeoTiff_float')
                except urllib2.HTTPError:
                    print "Bad URL"
                f = open(filename, 'wb')
                f.write(output.read())
                f.close() 
                saved_files.append(filename)
                print >> sys.stderr, 'Saved: ' + filename
    return saved_files
示例#8
0
def getGeotiffFromWebCoverageService(url, version, bbox, identifier, frmt,
                                     directory, filename):
    """ Obtain data from WebCoverageService.
    
    :url: url from which to obtain the data
    :version: version of the data to obtain
    :bbox: bounding box
    :identifier: user-specified filename
    :format: data format to obtain
    :directory: folder in which file is to be written
    :filename: filename of file to write
    """

    # create path
    direc = "./" + directory + "/"
    dirFile = direc + filename + ".tif"

    # check if directory and file exist
    if not os.path.exists(direc):
        os.makedirs(direc)

    if not os.path.isfile(dirFile):
        wcs = WebCoverageService(url, version=version)
        response = wcs.getCoverage(identifier=identifier,
                                   bbox=bbox,
                                   format=frmt,
                                   crs='urn:ogc:def:crs:EPSG::28992',
                                   resx=0.5,
                                   resy=0.5)
        with open(dirFile, 'wb') as file:
            file.write(response.read())
示例#9
0
def test_wcs1_getcoverage_netcdf(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="1.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    test_layer_name = contents[0]
    test_layer = wcs.contents[test_layer_name]

    bbox = test_layer.boundingBoxWGS84

    output = wcs.getCoverage(identifier=contents[0],
                             format='netCDF',
                             bbox=pytest.helpers.enclosed_bbox(bbox),
                             crs='EPSG:4326',
                             width=400,
                             height=300)

    assert output
    assert output.info()['Content-Type'] == 'application/x-netcdf'

    output = wcs.getCoverage(identifier=contents[0],
                             format='netCDF',
                             bbox=pytest.helpers.enclosed_bbox(bbox),
                             crs='I-CANT-BELIEVE-ITS-NOT-EPSG:4326',
                             width=400,
                             height=300)

    assert output
    assert output.info()['Content-Type'] == 'application/x-netcdf'
示例#10
0
    def test_metadata_available_after_upload(self):
        """Test metadata is available after upload
        """
        # Upload exposure data for this test
        name = 'Population_2010'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)
        layer_name = exposure_layer.typename
        server_url = settings.GEOSERVER_BASE_URL + '/ows'
        wcs = WebCoverageService(server_url, version='1.0.0')
        layer_appears_immediately = layer_name in wcs.contents

        wait_time = 0.5
        import time
        time.sleep(wait_time)

        wcs2 = WebCoverageService(server_url, version='1.0.0')
        layer_appears_afterwards = layer_name in wcs2.contents

        msg = ('Layer %s was not found after %s seconds in WxS contents '
               'on server %s.\n'
               'WCS contents: %s\n' % (layer_name,
                                       wait_time,
                                       server_url,
                                       wcs.contents))

        assert layer_appears_afterwards, msg

        msg = ('Layer %s was not found in WxS contents on server %s.\n'
               'WCS contents: %s\n' % (layer_name, server_url, wcs.contents))

        assert layer_appears_immediately, msg
示例#11
0
def loadWCS(folder, out_name, WCS_URL, Version, layerName, bbox, srid):
    folder = folder
    urls = WCS_URL
    version = Version
    bbox = bbox
    input_value_raster = layerName
    crs_num = 'urn:ogc:def:crs:EPSG::' + str(srid)

    if urls == 'None':
        return ("Please provide WCS parameters")

    wcs = WebCoverageService(urls, version)
    #print(list(wcs.contents))

    #
    # print([op.name for op in wcs.operations])

    cvg = wcs.contents[input_value_raster]
    if bbox == 'None':
        bbox = cvg.boundingBoxWGS84

    response = wcs.getCoverage(identifier=input_value_raster,
                               bbox=bbox,
                               format='GEOTIFF_FLOAT32',
                               crs='urn:ogc:def:crs:EPSG::28992',
                               resx=0.5,
                               resy=0.5)
    temp_raster = folder + '//' + out_name + '.tif'
    with open(temp_raster, 'wb') as file:
        file.write(response.read())
示例#12
0
    def getCube(self, maptype, depths, datatypes, bounds, overwrite=False):
        #check if all files are already downloaded
        fpaths = [
            '../tmp/Hadocha_' + maptype + '_' + depth + 'cm_' + datatype +
            '.tif' for depth in depths for datatype in datatypes
        ]
        if (min([os.path.isfile(fpath) for fpath in fpaths])
                and not overwrite):
            print('all files already downloaded for: ' + maptype)
            return fpaths

        #set up the Web Coverage Service. This sometimes raises errors, so retry
        print('setting up WCS for maptype: ' + maptype)
        for attempt in range(5):
            try:
                wcs = WebCoverageService(
                    'http://maps.isric.org/mapserv?map=/map/' + maptype +
                    '.map',
                    version='1.0.0')
            except:
                print('failed to connect to WCS ' + str(attempt + 1) +
                      ' times to: ' + maptype)
            else:
                break
        else:
            print('failed to connect to WCS, try again later')
            return []

        #start retrieving map data
        for depth in depths:
            for datatype in datatypes:
                fpath = '../tmp/Hadocha_' + maptype + '_' + depth + 'cm_' + datatype + '.tif'
                #also handle exceptions for retrieving files
                if (not os.path.isfile(fpath) or overwrite):
                    for attempt in range(5):
                        try:
                            response = wcs.getCoverage(
                                identifier=maptype + '_' + depth + 'cm_' +
                                datatype,
                                crs='urn:ogc:def:crs:EPSG::152160',
                                bbox=(bounds.minx, bounds.miny, bounds.maxx,
                                      bounds.maxy),
                                resx=20,
                                resy=20,
                                format='GEOTIFF_INT16')
                        except:
                            print('coverage failed ' + str(attempt + 1) +
                                  ' times for: ' + fpath)
                        else:
                            with open(fpath, 'wb') as file:
                                file.write(response.read())
                                # print('file: '+fpath+'downloaded')
                            break
                    else:
                        print('file not retrieved: ' + fpath)
                        fpaths.remove(fpath)
                    # else:
                    #     print('file already downloaded: '+fpath)
        # print('map at location: '+fpath)
        return fpaths
示例#13
0
def get_raster_wcs(
    coordinates: Union[Iterable, Sequence[Union[float, str]]],
    geographic: bool = True,
    layer: str = None,
    geoserver: str = GEO_URL,
) -> bytes:
    """Return a subset of a raster image from the local GeoServer via WCS 2.0.1 protocol.

    For geographic rasters, subsetting is based on WGS84 (Long, Lat) boundaries. If not geographic, subsetting based
    on projected coordinate system (Easting, Northing) boundaries.

    Parameters
    ----------
    coordinates : Sequence[Union[int, float, str]]
      Geographic coordinates of the bounding box (left, down, right, up)
    geographic : bool
      If True, uses "Long" and "Lat" in WCS call. Otherwise uses "E" and "N".
    layer : str
      Layer name of raster exposed on GeoServer instance, e.g. 'public:CEC_NALCMS_LandUse_2010'
    geoserver: str
      The address of the geoserver housing the layer to be queried. Default: http://pavics.ouranos.ca/geoserver/.

    Returns
    -------
    bytes
      A GeoTIFF array.

    """
    (left, down, right, up) = coordinates

    if geographic:
        x, y = "Long", "Lat"
    else:
        x, y = "E", "N"

    wcs = WebCoverageService(url=urljoin(geoserver, "ows"), version="2.0.1")

    try:
        resp = wcs.getCoverage(
            identifier=[layer],
            format="image/tiff",
            subsets=[(x, left, right), (y, down, up)],
            timeout=120,
        )

    except Exception as e:
        raise Exception(e)

    data = resp.read()

    try:
        etree.fromstring(data)
        # The response is an XML file describing the server error.
        raise ChildProcessError(data)

    except etree.XMLSyntaxError:
        # The response is the DEM array.
        return data
示例#14
0
def GetWCSLayer(u, p):
  start = time.time()
  # Manage the WFS URL & the layer name
  split_url = u.split('?')
  server_url = split_url[0]
  ows = server_url[-3:]
  print 'The OGC standard is: '+ ows
  
  if ows == 'ows' or ows == 'wcs':
    server_url = server_url[:-3]+ 'wcs' 
    spacename_wcs = split_url[1]
    chemin = p + spacename_wcs +'.tif'
    
    if not os.path.exists(chemin):
      
      # Get the raster layer using OGC WCS standard
      wcs = WebCoverageService(server_url ,version='1.0.0')
      image = wcs[spacename_wcs]
      
      # Download the GeoTIFF image file
      info = (image.boundingboxes)[0]
          
      epsg = info['nativeSrs']
      bboxx = info['bbox']
      
      offset = image.grid.offsetvectors
      cellsize_x= offset[0]
      x = cellsize_x[0]
      X = str(abs(float(x)))
      
      cellsize_y= offset[1]
      y = cellsize_y[1]
      Y = str(abs(float(y)))
      
  #    img_formats = image.supportedFormats
  #    img_format = img_formats[0]
      img_format = 'GeoTIFF'        
      
      print "Downloading the GeoTIFF file... : "+spacename_wcs
      print "From: "+server_url
      output = wcs.getCoverage(identifier = spacename_wcs,
                                 bbox = bboxx,
                                   crs = epsg,
                                     format = img_format,
                                       resx = X,
                                         resy = Y)                            
                               
      data = output.read()
      f = open(chemin,'wb')
      f.write(data)
      f.close()
    
    # Calculat time
    temps =time.time() - start
    tps = round(temps,2)
    temps_ms = str(tps)
    print "GetWCSLayer download time : " + temps_ms +" ms"
示例#15
0
	def __init__(self, url, dates, variable, bbox, single=False):
		super(WCSHelper, self).__init__()
		self.url = url
		self.single = single
		self.dates = dates
		self.variable = variable
		self.bbox = bbox
		self.owslib_log = logging.getLogger('owslib')
		self.owslib_log.setLevel(logging.DEBUG)
		self.wcs = WebCoverageService(url, version="1.0.0")
示例#16
0
 def __init__(self):
     try:
         self.wcs_ahn3 = WebCoverageService(
             'https://geodata.nationaalgeoregister.nl/ahn3/wcs',
             version='1.0.0')
         self.wcs_ahn2 = WebCoverageService(
             'https://geodata.nationaalgeoregister.nl/ahn2/wcs',
             version='1.0.0')
     except:
         print("AHN WCS host unavailable")
示例#17
0
def wcs_links(wcs_url,
              identifier,
              bbox=None,
              crs=None,
              height=None,
              width=None,
              exclude_formats=True,
              quiet=True,
              version='1.0.0'):
    # FIXME(Ariel): This would only work for layers marked for public view,
    # what about the ones with permissions enabled?

    try:
        wcs = WebCoverageService(wcs_url, version=version)
    except ServiceException as err:
        err_msg = 'WCS server returned exception: %s' % err
        if not quiet:
            logger.warn(err_msg)
        raise GeoNodeException(err_msg)

    msg = ('Could not create WCS links for layer "%s",'
           ' it was not in the WCS catalog,'
           ' the available layers were: "%s"' %
           (identifier, wcs.contents.keys()))

    output = []
    formats = []

    if identifier not in wcs.contents:
        if not quiet:
            raise RuntimeError(msg)
        else:
            logger.warn(msg)
    else:
        coverage = wcs.contents[identifier]
        formats = coverage.supportedFormats
        for f in formats:
            if exclude_formats and f in DEFAULT_EXCLUDE_FORMATS:
                continue
            # roundabout, hacky way to accomplish getting a getCoverage url.
            # nonetheless, it's better than having to load an entire large
            # coverage just to generate a URL
            fakeUrl = wcs.getCoverage(identifier=coverage.id,
                                      format=f,
                                      bbox=bbox,
                                      crs=crs,
                                      height=20,
                                      width=20).geturl()
            url = sub(r'(height=)20(\&width=)20', r'\g<1>{0}\g<2>{1}',
                      fakeUrl).format(height, width)
            # The outputs are: (ext, name, mime, url)
            # FIXME(Ariel): Find a way to get proper ext, name and mime
            # using format as a default for all is not good enough
            output.append((f, f, f, url))
    return output
示例#18
0
文件: gis.py 项目: bird-house/raven
def get_raster_wcs(coordinates, geographic=True, layer=None):
    """Return a subset of a raster image from the local GeoServer via WCS 2.0.1 protocol.

    For geoggraphic rasters, subsetting is based on WGS84 (Long, Lat) boundaries. If not geographic, subsetting based
    on projected coordinate system (Easting, Northing) boundries.

    Parameters
    ----------
    coordinates : sequence
      Geographic coordinates of the bounding box (left, down, right, up)
    geographic : bool
      If True, uses "Long" and "Lat" in WCS call. Otherwise uses "E" and "N".
    layer : str
      Layer name of raster exposed on GeoServer instance. E.g. 'public:CEC_NALCMS_LandUse_2010'

    Returns
    -------
    bytes
      A GeoTIFF array.

    """
    from owslib.wcs import WebCoverageService
    from lxml import etree

    (left, down, right, up) = coordinates

    if geographic:
        x, y = 'Long', 'Lat'
    else:
        x, y = 'E', 'N'

    wcs = WebCoverageService('http://boreas.ouranos.ca/geoserver/ows',
                             version='2.0.1')

    try:
        resp = wcs.getCoverage(identifier=[
            layer,
        ],
                               format='image/tiff',
                               subsets=[(x, left, right), (y, down, up)])

    except Exception as e:
        raise Exception(e)

    data = resp.read()

    try:
        etree.fromstring(data)
        # The response is an XML file describing the server error.
        raise ChildProcessError(data)

    except etree.XMLSyntaxError:
        # The response is the DEM array.
        return data
示例#19
0
def wcs_links(
        wcs_url,
        identifier,
        bbox=None,
        crs=None,
        height=None,
        width=None,
        exclude_formats=True,
        quiet=True,
        version='1.0.0'):
    # FIXME(Ariel): This would only work for layers marked for public view,
    # what about the ones with permissions enabled?

    try:
        wcs = WebCoverageService(wcs_url, version=version)
    except ServiceException as err:
        err_msg = 'WCS server returned exception: %s' % err
        if not quiet:
            logger.warn(err_msg)
        raise GeoNodeException(err_msg)

    msg = ('Could not create WCS links for layer "%s",'
           ' it was not in the WCS catalog,'
           ' the available layers were: "%s"' % (
               identifier, wcs.contents.keys()))

    output = []
    formats = []

    if identifier not in wcs.contents:
        if not quiet:
            raise RuntimeError(msg)
        else:
            logger.warn(msg)
    else:
        coverage = wcs.contents[identifier]
        formats = coverage.supportedFormats
        for f in formats:
            if exclude_formats and f in DEFAULT_EXCLUDE_FORMATS:
                continue
            # roundabout, hacky way to accomplish getting a getCoverage url.
            # nonetheless, it's better than having to load an entire large
            # coverage just to generate a URL
            fakeUrl = wcs.getCoverage(identifier=coverage.id, format=f,
                                      bbox=bbox, crs=crs, height=20,
                                      width=20).geturl()
            url = sub(r'(height=)20(\&width=)20', r'\g<1>{0}\g<2>{1}',
                      fakeUrl).format(height, width)
            # The outputs are: (ext, name, mime, url)
            # FIXME(Ariel): Find a way to get proper ext, name and mime
            # using format as a default for all is not good enough
            output.append((f, f, f, url))
    return output
示例#20
0
def test_wcs1_describecoverage(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="1.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    test_layer_name = contents[0]

    resp = wcs.getDescribeCoverage(test_layer_name)

    gc_xds = get_xsd("1.0.0/describeCoverage.xsd")
    assert gc_xds.validate(resp)
示例#21
0
def get_dtm(path_out, minlong,maxlong,minlat,maxlat):


    bbox=(minlong,minlat,maxlong,maxlat)

    url="http://services.ga.gov.au/gis/services/DEM_SRTM_1Second_over_Bathymetry_Topography/MapServer/WCSServer?"
    wcs = WebCoverageService(url,version='1.0.0')

    cvg=wcs.getCoverage(identifier='1',  bbox=bbox, format='GeoTIFF', crs=4326, width=200, height=200)

    f = open(path_out, 'wb')
    bytes_written = f.write(cvg.read())
    f.close()
    print("dtm geotif saved as",path_out)
示例#22
0
def test_wcs_200():
    """
    Web Coverage Service
    WCS Version 2.0.x

    rewritten doctest/wcs_200.txt
    """
    wcs = WebCoverageService(SERVICE_URL, version="2.0.1")
    assert wcs.version == '2.0.1'
    assert wcs.url == SERVICE_URL
    assert wcs.identification.title == 'rasdaman'
    assert wcs.identification.service == 'OGC WCS'
    assert wcs.provider.name == 'Jacobs University Bremen'
    assert 'AvgLandTemp' in wcs.contents.keys()
    assert len(wcs.contents.keys()) >= 20
    cvg = wcs.contents['AvgLandTemp']
    assert cvg.boundingboxes[0]['bbox'] == (-90, -180,
                                            90, 180)
    assert cvg.timelimits == [datetime.datetime(2000, 2, 1, 0, 0), datetime.datetime(2015, 6, 1, 0, 0)]
    assert cvg.timepositions[0:5] == [datetime.datetime(2000, 2, 1, 0, 0), datetime.datetime(2000, 3, 1, 0, 0),
                                      datetime.datetime(2000, 4, 1, 0, 0), datetime.datetime(2000, 5, 1, 0, 0),
                                      datetime.datetime(2000, 6, 1, 0, 0)]
    assert cvg.supportedFormats == ['application/gml+xml', 'image/jpeg', 'image/png', 'image/tiff', 'image/bmp',
                                    'image/jp2', 'application/netcdf', 'text/csv', 'application/json',
                                    'application/dem', 'application/x-ogc-dted', 'application/x-ogc-ehdr',
                                    'application/x-ogc-elas', 'application/x-ogc-envi', 'application/x-ogc-ers',
                                    'application/x-ogc-fit', 'application/x-ogc-fits', 'image/gif',
                                    'application/x-netcdf-gmt', 'application/x-ogc-gs7bg', 'application/x-ogc-gsag',
                                    'application/x-ogc-gsbg', 'application/x-ogc-gta', 'application/x-ogc-hf2',
                                    'application/x-erdas-hfa', 'application/x-ogc-ida', 'application/x-ogc-ingr',
                                    'application/x-ogc-isis2', 'application/x-erdas-lan', 'application/x-ogc-mff2',
                                    'application/x-ogc-nitf', 'application/x-ogc-paux', 'application/x-ogc-pcidsk',
                                    'application/x-ogc-pcraster', 'application/x-ogc-pdf', 'application/x-ogc-pnm',
                                    'text/x-r', 'application/x-ogc-rmf', 'image/x-sgi', 'application/x-ogc-vrt',
                                    'image/xpm', 'application/x-ogc-zmap']
    assert cvg.grid.axislabels == ['Lat', 'Long', 'ansi']
    assert cvg.grid.dimension == 3
    assert cvg.grid.lowlimits == ['0', '0', '0']
    assert cvg.grid.highlimits == ['1799', '3599', '184']
    covID = 'AvgLandTemp'
    time_subset = ("ansi", "2000-02-01T00:00:00Z")
    lat_subset = ('Lat', 40, 50)
    long_subset = ('Long', -10, 0)
    formatType = 'application/netcdf'
    output = wcs.getCoverage(identifier=[covID], format=formatType, subsets=[long_subset, lat_subset, time_subset])
    f = open(scratch_file('test_wcs_200.nc'), 'wb')
    bytes_written = f.write(output.read())
    f.close()
示例#23
0
def test_wcs20_getcoverage_netcdf(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="2.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    output = wcs.getCoverage(
        identifier=[contents[0]],
        format='netCDF',
        subsets=[('x', 144, 144.3), ('y', -42.4, -42), ('time', '2019-11-05')],
        subsettingcrs="EPSG:4326",
        scalesize="x(400),y(300)",
    )

    assert output
    assert output.info()['Content-Type'] == 'application/x-netcdf'
示例#24
0
 def __init__(self, config, grid):
     """
     Current Issues:
     - only 'now' works as starttime in the model. any other model gives an 
     error at line 132 (req_url).
     Somethingsomething dictionary not fed the correct keys.
     """
     #
     # Initialize the provider. This sets up a temporary directory and some
     # general stuff that all providers need.
     #
     provider.Provider.__init__(self, config, grid)
     
     #
     # First figure out which GFS forecast has been most recently uploaded.
     #
     gfs_layers=[]
     self.gfs_wcs_access_urls={}
     gfs_dt=[]
     start = round_datetime(dt=datetime.datetime.utcnow(), seconds=-21600)
     
     logger.debug("Looking for most recent GFS dataset")
     for offset in xrange(-21600*1, -21600*12, -21600):
         logger.debug("trying date")
         gfs_latest = start + datetime.timedelta(seconds=offset)
         gfs_dt.append(gfs_latest)      
         logger.debug("create date")
         wcs_url=gfs_latest.strftime("http://nomads.ncdc.noaa.gov/thredds/wcs/gfs-004/%Y%m/%Y%m%d/gfs_4_%Y%m%d_%H%M_000.grb2")
         try:
             logger.debug("Trying WCS at %s"%(wcs_url))
             wcs = WebCoverageService(wcs_url, version='1.0.0')
             logger.debug("Fine...")
         except:
             logger.debug("Exception! Continue to next loop!!")
             continue
         logger.debug("moving on...")
         logger.debug("Connected to GFS WCS at %s"%(wcs_url))
         contents = list(wcs.contents)
         if len(contents) > 0:
             for layer in contents:
                 gfs_layers.append(layer)
                 
                 self.available_layers.append(layer)
                 
             for hours in xrange(0,300,3):
                 timestamp = gfs_latest + datetime.timedelta(hours=hours)
                 timestamp = timestamp.replace(tzinfo=pytz.utc)
                 hr = "%.3d"%(hours)
                 wcs_access_url = gfs_latest.strftime("http://nomads.ncdc.noaa.gov/thredds/wcs/gfs-004/%Y%m/%Y%m%d/gfs_4_%Y%m%d_%H%M_"+hr+".grb2")
                 cache_key = gfs_latest.strftime("gfsrun-%Y%m%d%H%M-"+hr+"-")
                 self.gfs_wcs_access_urls.update({timestamp.isoformat():{'url':wcs_access_url,'cache_key':cache_key}})
                 
             self.gfs_run_timestamp = gfs_latest
             break
         else:
             continue
         
     logger.debug("The following GFS WCS urls will be used for timesteps:")
     for k in sorted(self.gfs_wcs_access_urls):
         logger.debug("Timestamp: %s WCS URL: %s"%(k,self.gfs_wcs_access_urls[k]))
示例#25
0
文件: wcs.py 项目: pcraster/gems
    def __init__(self, config, grid):
        """
        Initialization code for this specific provider. First initialize the
        base provider and then to some custom setup stuff specific to the
        wcs provider.

        The provider needs to update the self.available_layers attribute and
        append all the layers that it can provide to this attribute. Since
        a provider can literally provide any sort of layer and any number of
        them, this cannot be done automatically, and therefore must be done
        explicitly in the provider's __init__.
        """
        provider.Provider.__init__(self, config, grid)

        try:
            self._layers = {}
            for wcs_url in config:
                wcs = WebCoverageService(wcs_url, version='1.0.0')
                contents = list(wcs.contents)
                for layer in contents:
                    self.available_layers.append(layer)
                    #make a mapping in the _layers variable about which wcs url
                    #we need to query to fetch a particular layer
                    self._layers.update({layer: wcs_url})
        except:
            logger.debug(
                " - %s provider couldn't find any layers to make available.")
示例#26
0
def getGeoTransformImStack(mSet, verbose_query=True, remove=True):

    from osgeo import gdal
    import os

    wcs = WebCoverageService(
        'http://saocompute.eurac.edu/sincohmap/rasdaman/ows?', version='2.0.1')
    mContents = wcs.contents[mSet]

    indxDate = getIndexLabelFromWCScontents('date', mContents)

    subset = 'date(' + mContents.grid.origin[indxDate] + ')'
    query = 'for c in ( ' + mSet + ' ) return encode (c[' + subset + '], "tiff", "nodata=-999")'

    if verbose_query: print(query)

    raster_path = wcps_rasdaman(query,
                                ip='saocompute.eurac.edu/sincohmap',
                                verbose=verbose_query)

    raster = gdal.Open(raster_path)

    if remove:
        os.remove(raster_path)
        if verbose_query: print('Temporary file has been deleted')

    return (raster.GetGeoTransform())
示例#27
0
文件: example.py 项目: pcraster/gems
    def __init__(self, config, grid):
        self.name = "example"

        self._config = config
        self._layers = {}

        print "Initializing wxs provider:"
        print grid
        print config

        ##
        #
        # todo: move all this grid/caching stuff to the base class? providers
        # shouldnt have to worry about this, and really not about reprojecting
        # to a equidistant projction either. all that a provider needs to do
        # is provide data for a layer name
        #
        self._grid = grid
        self._cache = os.path.join(
            os.environ.get('GEM_WORKING_DIR', '/tmp/gem'), "wcs_cache",
            self._grid['uuid'])

        if not os.path.isdir(self._cache):
            os.makedirs(self._cache)  #create the cache directory

        for wcs_url in config:
            print "* init url="
            print wcs_url
            wcs = WebCoverageService(wcs_url, version='1.0.0')
            contents = list(wcs.contents)
            for layer in contents:
                #make a mapping in the _layers variable about which wcs url
                #we need to query to fetch a particular layer
                self._layers.update({layer: wcs_url})
示例#28
0
def test_wcs1_server(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="1.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    assert contents
示例#29
0
 def init_config(self, config_file):
     _wcs = WebCoverageService(self.url, version="2.0.1")
     config = {}
     _covs = _wcs.contents.keys()
     config['coverages'] = {}
     for _cov in _covs:
         #print "writting coverage info for "+_cov
         #if _cov not in self.banned:
         try:
             t_cov = {}
             try:
                 t_cov['time_axis_name'] = [
                     item for item in _wcs.contents[_cov].grid.axislabels
                     if item not in self.axis_removes
                 ][0]
             except Exception, e:
                 t_cov['time_axis_name'] = ''
             spatial_axis = [
                 item for item in _wcs.contents[_cov].grid.axislabels
                 if item in self.axis_removes
             ]
             del _wcs.contents[_cov]
             for p in spatial_axis:
                 t_cov[get_lat_long(p) + '_axis_name'] = p
             # gather X and Y coords definition
             t_cov['name'] = _cov
             config['coverages'][_cov] = t_cov
             self.coverages[_cov] = t_cov
         except Exception, e:
             print e
             print "coverage {} failed you should check it".format(_cov)
示例#30
0
def getReferenceImage(mSet,
                      verbose_query=True,
                      rotate=False,
                      x0=0,
                      xN=0,
                      y0=0,
                      yN=0):

    if x0 != 0 and xN != 0 and y0 != 0 and yN != 0:
        DO_CROP = 1
    else:
        DO_CROP = 0

    print(DO_CROP)

    # Select WCS server and service version
    wcs = WebCoverageService(
        'http://saocompute.eurac.edu/sincohmap/rasdaman/ows?', version='2.0.1')
    mContents = wcs.contents[mSet]

    indxMaster = getIndexLabelFromWCScontents('master_date', mContents)
    indxSlave = getIndexLabelFromWCScontents('slave_date', mContents)
    indxN = getIndexLabelFromWCScontents('N', mContents)
    indxE = getIndexLabelFromWCScontents('E', mContents)

    masterDates = getMasterTimeAxis(mSet, mContents, indxSlave, indxN, indxE)
    slaveDates = getSlaveTimeAxis(mSet, mContents, indxMaster, indxN, indxE)

    date0M = masterDates[0].strftime('%Y-%m-%d')
    date0S = slaveDates[0].strftime('%Y-%m-%d')

    print(date0M)
    print(date0S)

    #subset = 'master_date(\"' + date0M +'\"), slave_date(\"' + date0S  +'\")'
    if DO_CROP:
        subset = 'E(' + str(x0) + ':' + str(xN) + '), N(' + str(y0) + ':' + str(yN) + \
                    '), master_date(\"' + date0M +'\"), slave_date(\"' + date0S  +'\")'
    else:
        subset = 'master_date(\"' + date0M + '\"), slave_date(\"' + date0S + '\")'

    query = 'for c in ( ' + mSet + ' ) return encode (c[' + subset + '], "tiff")'
    subset_coherence = wcps_rasdaman(query,
                                     ip='saocompute.eurac.edu/sincohmap',
                                     verbose=verbose_query)

    if rotate:
        refPath = mSet + '_reference_image_rotated.tiff'

        print(subset_coherence)
        print(refPath)

        rotate_tiff_file(subset_coherence, refPath)
        os.remove(subset_coherence)
    else:
        refPath = mSet + '_reference_image.tiff'
        shutil.move(subset_coherence, refPath)

    return (refPath)
示例#31
0
def test_wcs_idee():
    """
    COWS Web Coverage Service
    WCS Version 1.0.0

    rewritten doctest/wcs_idee.txt
    """
    wcs = WebCoverageService(SERVICE_URL)
    assert wcs.version == '1.0.0'
    assert wcs.url == SERVICE_URL
    assert wcs.identification.title == 'WCS UTM30N - MDT Peninsula y Baleares'
    assert wcs.identification.service == 'IDEE-WCS-UTM30N'
    assert wcs.provider.name == u'Instituto Geogr\xe1fico Nacional'
    assert sorted(wcs.contents.keys()) == [
        'MDT1000_peninsula_baleares',
        'MDT1000_peninsula_baleares_aspecto',
        'MDT1000_peninsula_baleares_pendientes',
        'MDT25_peninsula_ZIP',
        'MDT25_peninsula_aspecto',
        'MDT25_peninsula_pendientes',
        'MDT500_peninsula_baleares',
        'MDT500_peninsula_baleares_aspecto',
        'MDT500_peninsula_baleares_pendientes',
        'MDT_peninsula_baleares',
        'MDT_peninsula_baleares_aspecto',
        'MDT_peninsula_baleares_pendientes']
    cvg = wcs['MDT25_peninsula_pendientes']
    assert cvg.title == 'MDT25 Pendientes Peninsula'
    assert cast_tuple_int_list(cvg.boundingBoxWGS84) == [-8, 35, 3, 43]
    assert cvg.timelimits == []
    assert sorted(cvg.supportedFormats) == ['AsciiGrid', 'FloatGrid_Zip', 'GeoTIFF']
    assert sorted(map(lambda x: x.getcode(), cvg.supportedCRS)) == [
        'EPSG:23028',
        'EPSG:23029',
        'EPSG:23030',
        'EPSG:23030',
        'EPSG:23031',
        'EPSG:4230',
        'EPSG:4326']
    output = wcs.getCoverage(
        identifier='MDT25_peninsula_pendientes',
        bbox=(600000, 4200000, 601000, 4201000),
        crs='EPSG:23030', format='AsciiGrid', resX=25, resY=25)
    f = open(scratch_file('test_idee.grd'), 'wb')
    bytes_written = f.write(output.read())
    f.close()
def main(input_zone, input_value_raster, earsource, **kwargs):
    wcs = kwargs.get("WCS", 'no')
    if wcs == 'yes':
        folder = kwargs.get("WCS_temp", '/home/')
        urls = kwargs.get("WCS_url", 'None')
        version = kwargs.get("WCS_version", '1.0.0')
        bbox = kwargs.get("bbox", 'None')

        if urls == 'None':
            return ("Please provide WCS parameters")

        wcs = WebCoverageService(url, version)
        ##print(list(wcs.contents))

        #print([op.name for op in wcs.operations])

        cvg = wcs.contents[input_value_raster]
        if bbox == 'None':
            bbox = cvg.boundingBoxWGS84
        response = wcs.getCoverage(identifier=input_value_raster,
                                   bbox=bbox,
                                   format='GEOTIFF_FLOAT32',
                                   crs='urn:ogc:def:crs:EPSG::4326',
                                   resx=0.5,
                                   resy=0.5)
        temp_raster = folder + '//' + input_value_raster + '.tif'
        with open(temp_raster, 'wb') as file:
            file.write(response.read())
        input_value_raster = temp_raster

    if earsource == "shp":
        return loop_zonal_stats(input_zone, input_value_raster)

    elif earsource == "pgtable":
        connString = kwargs.get('connString', "None")
        if (connString == "None"):
            print("Please Supply valid connection string")
        #print("please supply valid data and their source")
        return loop_zonal_statsPG(input_zone, input_value_raster, connString)
    elif earsource == "wfs":
        wfsURL = kwargs.get('wfsURL', "None")
        if (wfsURL == "None"):
            return ("Please provide valid WFS URL")
        return loop_zonal_statsWFS(input_zone, input_value_raster, wfsURL)
    else:
        print("please supply valid data and their source")
示例#33
0
def test_wcs21_getcoverage(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="2.0.1")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    output = wcs.getCoverage(
        identifier=[contents[0]],
        format='image/geotiff',
        subsets=[('x', 144, 144.3), ('y', -42.4, -42)],
        # timeSequence=['2019-11-05'],
        subsettingcrs="EPSG:4326",
        subset='time("2019-11-05")',
        scalesize="x(400),y(300)")

    assert output
    assert output.info()['Content-Type'] == 'image/geotiff'
示例#34
0
def test_wcs20_getcoverage_multidate(ows_server):
    # Use owslib to confirm that we have a somewhat compliant WCS service
    wcs = WebCoverageService(url=ows_server.url + "/wcs", version="2.0.0")

    # Ensure that we have at least some layers available
    contents = list(wcs.contents)
    try:
        resp = wcs.getCoverage(
            identifier=[contents[0]],
            format='image/geotiff',
            subsets=[('x', 144, 144.3), ('y', -42.4, -42),
                     ('time', '2019-11-05', "2019-12-05")],
            subsettingcrs="EPSG:4326",
            scalesize="x(400),y(300)",
        )
    except ServiceException as e:
        assert 'Format does not support multi-time datasets' in str(e)
def get_wcs(server_url, spacename_wcs):

    chemin = '/home/tmp/'+spacename_wcs+'.tif'
    
    if not os.path.exists(chemin):
        
        wcs = WebCoverageService(server_url +"/wcs/",version='1.0.0')
    
        image = wcs[spacename_wcs]
        
        info = (image.boundingboxes)[0]
        
        epsg = info['nativeSrs']
        bboxx = info['bbox']
        
        offset = image.grid.offsetvectors
        cellsize_x= offset[0]
        x = cellsize_x[0]
        X = str(abs(float(x)))
        
        cellsize_y= offset[1]
        y = cellsize_y[1]
        Y = str(abs(float(y)))
        
#        img_formats = image.supportedFormats
#        img_format = img_formats[0]
        img_format = 'GeoTIFF'        
        
        print "Downloading the WCS: "+spacename_wcs
        print "From: "+server_url
        output = wcs.getCoverage(identifier = spacename_wcs,
                                 bbox = bboxx,
                                 crs = epsg,
                                 format = img_format,
                                 resx = X,
                                 resy = Y)                            
                                 
        data = output.read()
        f = open(chemin,'wb')
        f.write(data)
        f.close()
        print "Done"
    else: print "Done"
        
    return chemin
示例#36
0
def ws_init(access_type, endpoint):
    '''Initialise a Web Service object'''
    if access_type == 'wfs': 
        ws = WebFeatureService(endpoint)
    elif access_type == 'wms: 
        ws = WebMapService(endpoint)
    else:
        ws = WebCoverageService(endpoint)
    return ws
示例#37
0
 def _getWCSObj(self, endpoint):
     oldProxy = proxyFix(endpoint)
     try:
         
         log.debug("wcs endpoint = %s" % (endpoint,))
         
         getCapabilitiesEndpoint = parseEndpointString(endpoint, 
                     {'Service':'WCS', 'Request':'GetCapabilities'})
         
         log.debug("wcs endpoint = %s" % (getCapabilitiesEndpoint,))
         #requires OWSLib with cookie support
         wcs=WebCoverageService(getCapabilitiesEndpoint, version='1.0.0',cookies= request.headers.get('Cookie', ''))
         
         layers = [x[0] for x in wcs.items()]
     finally:
         resetProxy(oldProxy)
     
     return wcs, layers
示例#38
0
def run_test_resource(resource_type, url):
    """tests a CSW service and provides run metrics"""

    if resource_type not in RESOURCE_TYPES.keys():
        msg = gettext('Invalid resource type')
        msg2 = '%s: %s' % (msg, resource_type)
        LOGGER.error(msg2)
        raise RuntimeError(msg2)

    title = None
    start_time = datetime.datetime.utcnow()
    message = None

    try:
        if resource_type == 'OGC:WMS':
            ows = WebMapService(url)
        elif resource_type == 'OGC:WFS':
            ows = WebFeatureService(url)
        elif resource_type == 'OGC:WCS':
            ows = WebCoverageService(url)
        elif resource_type == 'OGC:WPS':
            ows = WebProcessingService(url)
        elif resource_type == 'OGC:CSW':
            ows = CatalogueServiceWeb(url)
        elif resource_type == 'OGC:SOS':
            ows = SensorObservationService(url)
        elif resource_type in ['WWW:LINK', 'urn:geoss:waf']:
            ows = urlopen(url)
            if resource_type == 'WWW:LINK':
                import re
                try:
                    title_re = re.compile("<title>(.+?)</title>")
                    title = title_re.search(ows.read()).group(1)
                except:
                    title = url
            elif resource_type == 'urn:geoss:waf':
                title = 'WAF %s %s' % (gettext('for'), urlparse(url).hostname)
        elif resource_type == 'FTP':
            ows = urlopen(url)
            title = urlparse(url).hostname
        success = True
        if resource_type.startswith('OGC:'):
            title = ows.identification.title
        if title is None:
            title = '%s %s %s' % (resource_type, gettext('for'), url)
    except Exception as err:
        msg = str(err)
        LOGGER.exception(msg)
        message = msg
        success = False

    end_time = datetime.datetime.utcnow()

    delta = end_time - start_time
    response_time = '%s.%s' % (delta.seconds, delta.microseconds)

    return [title, success, response_time, message, start_time]
示例#39
0
    def _getWCSObj(self, endpoint):
        oldProxy = proxyFix(endpoint)
        try:

            log.debug("wcs endpoint = %s" % (endpoint,))

            getCapabilitiesEndpoint = parseEndpointString(endpoint, {"Service": "WCS", "Request": "GetCapabilities"})

            log.debug("wcs endpoint = %s" % (getCapabilitiesEndpoint,))
            # requires OWSLib with cookie support
            wcs = WebCoverageService(
                getCapabilitiesEndpoint, version="1.0.0", cookies=request.headers.get("Cookie", "")
            )

            layers = [x[0] for x in wcs.items()]
        finally:
            resetProxy(oldProxy)

        return wcs, layers
示例#40
0
文件: gfs.py 项目: pcraster/gems
 def provide(self,name,options={}):
     """
     
     Return a numpy array of the requested data
     
     todo:
     - split into a download() or get_from_cache()
     
     """
     logger.debug("Looking for layer '%s' at timestamp '%s'"%(name,options['timestamp']))        
     if name not in self.available_layers:
         logger.error("Layer '%s' is not available in this provider."%(name))
     if options['timestamp'].isoformat() not in self.gfs_wcs_access_urls:
         logger.error("Timestamp %s was not found in the list of available GFS timesteps."%(options['timestamp'].isoformat()))
     
     logger.debug("Downloading coverage from url:")
     logger.debug(self.gfs_wcs_access_urls.get(options['timestamp'].isoformat()))
     
     extent=box(*self._grid["bounds"]).buffer(1.0)
     logger.debug("request bounds: %s"%(str(extent.bounds)))      
     
     
     req_url=self.gfs_wcs_access_urls[options['timestamp'].isoformat()]['url']
     cache_key=self.gfs_wcs_access_urls[options['timestamp'].isoformat()]['cache_key']+name
     logger.debug("req_url=%s"%(req_url))
     
     wcs=WebCoverageService(req_url, version='1.0.0')
     meta=wcs.contents[name]
         
     #cov = wcs.getCoverage(identifier=name,bbox=extent.bounds, format="GeoTIFF_Float")
     try:
         cov = wcs.getCoverage(identifier=name, bbox=extent.bounds, format="GeoTIFF_Float")
         filename=os.path.join(self._cache,"%s.tif"%(cache_key))
         logger.debug("WCS: saving file to: %s"%(filename))
         with open(filename,'w') as f:
             f.write(cov.read())
         dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
     except Exception as e:
         logger.error("WCS: failure: %s"%(e))
     
     return self.warp_to_grid(dataset)
示例#41
0
def get_coverage(wcs_url, layer, verbose=False):
    """Get coverage from Web Coverage Service (WCS) in GeoTIFF format
    
    Input:
       wcs_url: URL for web ceature service. E.g. http://www.aifdr.org:8080/geoserver/ows?
       layer: Coverage layer name as <workspace>:<layer>
       verbose [optional]: Flag controlling the verbosity level. Default is False.
       
    Output:
       GeoTIFF data or None.    
    """
        
    if verbose:
        print('Retrieving %s from %s' % (layer, wcs_url))
            
    wcs = WebCoverageService(wcs_url, version='1.1.1')
    if layer not in wcs.contents.keys():
        return None

    response = wcs.getcoverage(typename=[layer], format='GeoTIFF')
    return response
示例#42
0
文件: thredds.py 项目: bruso/thredds
from owslib.wcs import WebCoverageService
from scipy.io import netcdf
from pyproj import Proj
import io
import datetime
import json


def to_celsius(kelvin):
    return kelvin - 273.15


wcs = WebCoverageService('http://thredds.met.no/thredds/wcs/arome25/arome_metcoop_test2_5km_latest.nc?service=WCS&version=1.0.0&request=GetCapabilities')


def geojson_feature(lon, lat):
    return {
        'type': 'Feature',
        'properties': {},
        'geometry': {'type': 'Point', 'coordinates': [lon, lat]}
    }


def lambert_to_latlon(x, y):
    proj_str = '+proj=lcc +lat_0=63 +lon_0=15 +lat_1=63 +lat_2=63 +no_defs +R=6.371e+06 +units=m'
    p1 = Proj(proj_str)
    lon, lat = p1(x, y, inverse=True)
    return lat, lon


def get_coverage(identifier, bbox):
示例#43
0
文件: wcs.py 项目: pcraster/gems
    def provide(self, name, options={}):
        """
        The providers' provide() method returns a numpy with the
        correct data type and propotions given the layer name and
        possibly some extra options. The readmap() in the model 
        will convert the numpy array to a pcraster map when it is 
        requested.
        """
        logging.debug("WCS: provide request for layer '%s'" % (name))
        target_file = os.path.join(self._cache, "%s" % (name))
        crs = None
        srid = None
        dataset = None
        logging.debug("WCS: geometry: %s" % (self._geom.wkt))
        url = urlparse(self._layers[name])
        qs = parse_qs(url.query)
        query_params = {}
        for p in list(qs):
            query_params.update({p: qs[p][0]})

        wcs = WebCoverageService(url.geturl(), version="1.0.0")
        meta = wcs.contents[name]
        mapformat = meta.supportedFormats[0]
        supported_crses = [crs_.code for crs_ in meta.supportedCRS]
        logging.debug("WCS: Service supports the following crs: %s" % (", ".join(map(str, supported_crses))))

        if self._grid["srid"] in supported_crses:
            crs = meta.supportedCRS[supported_crses.index(self._grid["srid"])]
            srid = self._grid["srid"]
        elif 4326 in supported_crses:
            crs = meta.supportedCRS[supported_crses.index(4326)]
            srid = 4326
        else:
            crs = meta.supportedCRS[supported_crses.index(supported_crses[0])]
            srid = supported_crses[0]

        if crs is not None and srid is not None:
            logging.debug("WCS: using %s (epsg:%d) to fetch the file from the wcs server" % (crs, srid))
        else:
            logging.debug("WCS: could not agree upon a projection format to fetch data with")
            raise Exception("WCS: no valid projections found")

        logging.debug("WCS: reprojecting the chunk mask to the required projection")

        project = partial(
            pyproj.transform, pyproj.Proj(init="epsg:%d" % (self._grid["srid"])), pyproj.Proj(init="epsg:%d" % (srid))
        )

        # Add a small buffer to the request so we fetch an area slightly larger
        # than what we really need. This will prevent some edge effects due
        # to the reprojection.
        projected_geom = transform(project, self._geom.buffer(200))

        logging.debug("WCS: original geom: %s" % (self._geom.wkt))
        logging.debug("WCS: reprojected geom: %s" % (projected_geom.wkt))

        try:
            logging.debug("WCS: fetching wcs data in %s" % (crs))
            logging.debug("WCS: saving to: %s" % (target_file + ".tif"))
            cov = wcs.getCoverage(
                identifier=name,
                crs=crs,
                bbox=projected_geom.bounds,
                format=mapformat,
                width=self._grid["cols"],
                height=self._grid["rows"],
                **query_params
            )
            with open(target_file + ".tif", "w") as f:
                f.write(cov.read())
            dataset = gdal.Open(target_file + ".tif", gdalconst.GA_ReadOnly)
        except Exception as e:
            logger.error("WCS: failure: %s" % (e))

        utm_data = self.warp_to_grid(dataset)
        dataset = None
        return utm_data
示例#44
0
# <nbformat>3.0</nbformat>

# <headingcell level=1>

# Extract data from USGS CMG WCS Service

# <codecell>

from owslib.wcs import WebCoverageService
import numpy as np
import numpy.ma as ma
endpoint='http://coastalmap.marine.usgs.gov/cmgp/services/EastCoast/Mass_Seafloor/MapServer/WCSServer?request=GetCapabilities&service=WCS'

# <codecell>

wcs = WebCoverageService(endpoint,version='1.0.0',timeout=60)

# <codecell>

for k,v in wcs.contents.iteritems():
    print v.title

# <codecell>

lidar = wcs['3']
print lidar.title
print lidar.boundingBoxWGS84
print lidar.timelimits
print lidar.supportedFormats

# <codecell>
示例#45
0
def get_records(post_code_for_bounding_box):

    pc_info = utils.post_codes[int(post_code_for_bounding_box)]

    print "Getting images for: %s - %s, %s" % (post_code_for_bounding_box, pc_info['suburb'], pc_info['state'])

    lower_corner = '%s %s' % (float(pc_info['lon']) - 1.0, float(pc_info['lat']) - 0.5)
    lower_corner_sml = '%s %s' % (float(pc_info['lon']) - 0.2, float(pc_info['lat']) - 0.1)
    upper_corner = '%f %f' % (float(pc_info['lon']) + 1.0, float(pc_info['lat']) + 0.5)
    upper_corner_sml = '%f %f' % (float(pc_info['lon']) + 0.2, float(pc_info['lat']) + 0.1)

    body = """<?xml version="1.0" encoding="UTF-8"?>
<csw:GetRecords xmlns:gml="http://www.opengis.net/gml"
xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:csw="http://www.opengis.net/cat/csw/2.0.2"
outputSchema="http://www.opengis.net/cat/csw/2.0.2"
outputFormat="application/xml" version="2.0.2" service="CSW"
resultType="results" maxRecords="10" nextRecord="0"
xsi:schemaLocation="http://www.opengis.net/cat/csw/2.0.2
http://schemas.opengis.net/csw/2.0.2/CSW-discovery.xsd">
<csw:Query typeNames="csw:Record">
<csw:ElementSetName>full</csw:ElementSetName>
<csw:Constraint version="1.1.0">
<ogc:Filter>
<ogc:And>
<ogc:PropertyIsLike escape="\" singleChar="_" wildCard="%">
<ogc:PropertyName>Title</ogc:PropertyName>
<ogc:Literal>%Landsat%</ogc:Literal>
</ogc:PropertyIsLike>
<ogc:BBOX>
<ogc:PropertyName>ows:BoundingBox</ogc:PropertyName>
<gml:Envelope>
<gml:lowerCorner>""" + lower_corner_sml + """</gml:lowerCorner> 
<gml:upperCorner>""" + upper_corner_sml + """</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:And>
</ogc:Filter>
</csw:Constraint>
<ogc:SortBy>
<ogc:SortProperty>
<ogc:PropertyName>apiso:TempExtent_begin</ogc:PropertyName>
<ogc:SortOrder>ASC</ogc:SortOrder>
</ogc:SortProperty>
</ogc:SortBy>
</csw:Query>
</csw:GetRecords>"""

    headers = {
        'Accept-Encoding': 'gzip,deflate',
        'Content-Type': 'text/xml;charset=UTF-8',
        'Content-Length': len(body),
        'Host': HOST_NAME,
        'Connection': 'Keep-Alive',
        'User-Agent': 'GovHack - Team A Kicking Wheel'
        }

    #request_path = '%s?%s&format=json' % (HOST_PATH, query_params)
    request_path = '%s?request=GetRecords' % (HOST_PATH)

    conn = httplib.HTTPConnection(HOST_NAME, HOST_PORT)
    conn.request('POST', request_path, body, headers)
    response = conn.getresponse()

    print 'GetRecords: %d %s' % (response.status, response.reason)

    if response.status == 200:
        data = response.read()
        conn.close()
        
        result_tree = ET.ElementTree(ET.fromstring(data))

        namespaces = {
            'csw': 'http://www.opengis.net/cat/csw/2.0.2',
            'dc': 'http://purl.org/dc/elements/1.1/',
            'ows': 'http://www.opengis.net/ows',
            }
        results = result_tree.getroot().findall(
            'csw:SearchResults/csw:Record', namespaces=namespaces
            )

        for result in results:
            get_capabilities_response = result.find(
                "dc:URI[@protocol='OGC:WCS']",
                namespaces=namespaces
                )
            wcs_url = get_capabilities_response.text
            print 'Record URL: %s' % wcs_url

            service = WebCoverageService(wcs_url, version='1.0.0')
            
            for content in service.contents:
                bounding_box_strs = lower_corner_sml.split(' ') + upper_corner_sml.split(' ')
                bounding_box = tuple([float(i) for i in bounding_box_strs])

                print 'Bounding Box: ' + str(bounding_box)

                img = service.getCoverage(identifier=content, bbox=bounding_box, format='GeoTIFF')

                identifier = result.find('dc:identifier', namespaces=namespaces).text
                directory = 'landsat_images/%s' % pc_info['suburb']

                if not os.path.exists(directory):
                    os.makedirs(directory)

                file_name = '%s/%s_%s.png' % (directory, identifier, content)

                print 'Writing file: %s' % file_name

                out = open(file_name, 'wb')
                out.write(img.read())
                out.close()
示例#46
0
文件: example.py 项目: pcraster/gems
    def fetch(self,name):
        #print " * Fetching layer '%s'"%(name)
        if name in self._layers:
            target_file=os.path.join(self._cache,"%s"%(name))
            url=urlparse(self._layers[name])
            qs=parse_qs(url.query)
            query_params={}
            for p in list(qs):
                query_params.update({p:qs[p][0]})

            print "Creating a coverage url:"
            print url.geturl()
            wcs=WebCoverageService(url.geturl(), version='1.0.0')
            meta=wcs.contents[name]

            mapformat=meta.supportedFormats[0]
            
            supported_crses=[crs.code for crs in meta.supportedCRS]            
            
            try:
                crs=meta.supportedCRS[supported_crses.index(self._grid['srid'])]
            except ValueError:
                raise Exception("WCS provider could not retrieve layer '%s' because it is not available in the UTM grid that the model needs to run at. We need a raster in epsg %d, available are only epsg %s."%(name,self._grid['srid']," ".join(map(str,supported_crses))))

            resx=self._grid['cellsize']
            resy=self._grid['cellsize']
            bbox=self._grid['bbox']
            cov=wcs.getCoverage(identifier=name,crs=crs,bbox=bbox,format=mapformat,resx=resx,resy=resy,**query_params)
            
            #Dict used for converting geotiffs to pcraster format. Any data
            #types not set explicitly will become Float32/VS_SCALARS
            pcraster_valuescale = defaultdict(lambda: ('Float32','VS_SCALAR'))
            pcraster_valuescale.update({
                'Float32':  ('Float32','VS_SCALAR'),
                'Int32':    ('Int32','VS_NOMINAL'),
                'Int16':    ('Int32','VS_NOMINAL'), 
                'Byte':     ('Int32','VS_NOMINAL')
            })
            print " * Downloading %s"%(cov.url)
            with open(target_file+".tif",'w') as f:
                f.write(cov.read())
            print " * Converting to PCRaster format..."


            dataset=gdal.Open(target_file+".tif",GA_ReadOnly)
            print 'Driver: ', dataset.GetDriver().ShortName,'/', dataset.GetDriver().LongName
            print 'Size is ',dataset.RasterXSize,'x',dataset.RasterYSize, 'x',dataset.RasterCount
            print 'Projection is ',dataset.GetProjection()         
            print 'Geotransform is ',dataset.GetGeoTransform()       
            band = dataset.GetRasterBand(1)
            gdal_type=gdal.GetDataTypeName(band.DataType)
            print 'Band Type=',gdal_type
            pcraster_type=pcraster_valuescale.get(gdal_type)
            print "Pcraster type=",pcraster_type
            
            try:
                #-of PCRaster -co "PCRASTER_VALUESCALE=VS_SCALAR" -ot Float32 -a_nodata 500.1
                c=[
                    '/usr/bin/gdal_translate','-q',
                    '-ot',pcraster_type[0],
                    '-of','PCRaster',
                    '-co','PCRASTER_VALUESCALE=%s'%(pcraster_type[1]),
                    target_file+".tif",target_file+".map"
                ]
                print "Conversion command: "
                print " ".join(c)
                rc=subprocess.call(c)
            except Exception as e:
                print " * Conversion to pcraster format failed!!! Hint: %s"%(e)
            print " * Completed!"
        else:
            print " * Layer not found"
            return None
示例#47
0
# Very simple script demonstrating how to interact with a THREDDS based WCS.
# ---
#
# The GetCapabilities and DescribeCoverage requests for this dataset are: 
# http://cida.usgs.gov/thredds/wcs/prism?service=WCS&version=1.0.0&request=GetCapabilities
# http://cida.usgs.gov/thredds/wcs/prism?service=WCS&version=1.0.0&request=DescribeCoverage
#
# The equivalent GetCoverage request that is equivalent ot hte example is:
# http://cida.usgs.gov/thredds/wcs/prism?request=GetCoverage&version=1.0.0&service=WCS&format=GeoTIFF&coverage=tmx&time=1895-01-01T00:00:00Z&bbox=-90,40,-89,41
# ---
# 
# Example to find the equivalent information using OWSLib:
# 
from owslib.wcs import WebCoverageService
wcs=WebCoverageService('http://cida.usgs.gov/thredds/wcs/prism',version='1.0.0')
# Take a look at the contents (coverages) of the wcs.
print wcs.contents
tmax=wcs['tmx']
# Take a look at the attributes of the coverage
dir(tmax)
print tmax.boundingBoxWGS84
print tmax.timepositions
print tmax.supportedFormats
# mock up a simple GetCoverage request.
output=wcs.getCoverage(identifier='tmx',time=['1895-01-01T00:00:00Z'],bbox=(-90,40,-89,41),format='GeoTIFF')
# Write the file out to disk.
f=open('foo.tif','wb')
f.write(output.read())
f.close()
# Extract data from USGS ScienceBase

# <codecell>

%matplotlib inline

# <codecell>

from owslib.wcs import WebCoverageService
import numpy as np
import numpy.ma as ma
endpoint='https://www.sciencebase.gov/catalogMaps/mapping/ows/5638cf1fe4b0d6133fe73040?service=wcs&request=getcapabilities&version=1.1.1'

# <codecell>

wcs = WebCoverageService(endpoint,version='1.1',timeout=60)

# <codecell>

wcs.contents

# <codecell>

for k,v in wcs.contents.iteritems():
    print v.title

# <codecell>

g = wcs['bh_30mbathy']
print g.title
print g.boundingBoxWGS84
示例#49
0
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>

# <headingcell level=1>

# Test ESRI WCS with OWSLib

# <codecell>

from owslib.wcs import WebCoverageService
endpoint='http://olga.er.usgs.gov/stpgis/services/lidar/Bare_Earth_Lidar/MapServer/WCSServer?request=GetCapabilities&service=WCS'

# <codecell>

wcs = WebCoverageService(endpoint,version='1.0.0',timeout=60)

# <codecell>

for k,v in wcs.contents.iteritems():
    print v.title

# <codecell>

wcs['1'].title

# <codecell>

cvg = wcs['1']
print cvg.title
print cvg.boundingBoxWGS84
示例#50
0
    def test_raster_wcs_reprojection(self):
        """UTM Raster can be reprojected by Geoserver and downloaded correctly
        """
        # FIXME (Ole): Still need to do this with assertions

        filename = 'tsunami_max_inundation_depth_BB_utm.asc'
        projected_tif_file = os.path.join(TESTDATA, filename)

        #projected_tif = file_upload(projected_tif_file, overwrite=True)
        projected_tif = save_to_geonode(projected_tif_file,
                                        user=self.user,
                                        overwrite=True)
        check_layer(projected_tif)

        wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        #logger.info(wcs.contents)
        metadata = wcs.contents[projected_tif.typename]
        #logger.info(metadata.grid)
        bboxWGS84 = metadata.boundingBoxWGS84
        #logger.info(bboxWGS84)
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        #logger.info("resx=%s resy=%s" % (str(resx), str(resy)))
        formats = metadata.supportedFormats
        #logger.info(formats)
        supportedCRS = metadata.supportedCRS
        #logger.info(supportedCRS)
        width = metadata.grid.highlimits[0]
        height = metadata.grid.highlimits[1]
        #logger.info("width=%s height=%s" % (width, height))
        gs_cat = Layer.objects.gs_catalog
        cvg_store = gs_cat.get_store(projected_tif.name)
        cvg_layer = gs_cat.get_resource(projected_tif.name, store=cvg_store)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)

        # FIXME: A patch was submitted OWSlib 20110808
        # Can delete the following once patch appears
        # In the future get bboxNative and nativeSRS from get_metadata
        descCov = metadata._service.getDescribeCoverage(projected_tif.typename)
        envelope = (descCov.find(ns('CoverageOffering/') + ns('domainSet/') +
                                 ns('spatialDomain/') +
                                 '{http://www.opengis.net/gml}Envelope'))
        nativeSrs = envelope.attrib['srsName']
        #logger.info(nativeSrs)
        gmlpositions = envelope.findall('{http://www.opengis.net/gml}pos')
        lc = gmlpositions[0].text
        uc = gmlpositions[1].text
        bboxNative = (float(lc.split()[0]), float(lc.split()[1]),
                      float(uc.split()[0]), float(uc.split()[1]))
        #logger.info(bboxNative)
        # ---- END PATCH

        # Make a temp dir to store the saved files
        tempdir = '/tmp/%s' % str(time.time())
        os.mkdir(tempdir)

        # Check that the layer can be downloaded in its native projection
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % (nativeSrs, t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # _exactly_ the same size and bbox of the original

        # Test that the layer can be downloaded in ARCGRID format
        cvg_layer.supported_formats = cvg_layer.supported_formats + ['ARCGRID']
        gs_cat.save(cvg_layer)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='ARCGRID',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("ARCGRID in %s = %s" % (nativeSrs, t.name))
        # Check that the downloaded file is a valid ARCGRID file and that it
        # the required projection information
        # (FIXME: There is no prj file here. GS bug)

        # Check that the layer can downloaded in WGS84
        cvg_layer.request_srs_list += ['EPSG:4326']
        cvg_layer.response_srs_list += ['EPSG:4326']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:4326',
                bbox=bboxWGS84,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % ("EPSG:4326", t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # the correct size and bbox based on the resx and resy or width
        # and height specified

        # Check that we can download the layer in another projection
        cvg_layer.request_srs_list += ['EPSG:32356']
        cvg_layer.response_srs_list += ['EPSG:32356']
        cvg_layer.request_srs_list += ['EPSG:900913']
        cvg_layer.response_srs_list += ['EPSG:900913']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        # How do we get the bboxes for the newly assigned
        # request/response SRS??

        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:32356',  # Should not be hardcoded for a test,
                                   # or should use 900913 (need bbox)
                bbox=bboxNative,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
示例#51
0
文件: tasks.py 项目: AIFDR/tsudat2
def download_scenario(user, scenario_id):
    """
    Generate a working directory for running ANUGA
    Create/Copy all of the files to that directory that are necessary
    for running the simulation. Generate a json run file and call run_tsudat.
    Notes here: https://github.com/AIFDR/tsudat2/wiki/Create-anuga-run-script
    """
    create_dir()
    # Get the scenario object from the Database
    scenario = Scenario.objects.get(id=scenario_id)
    
    # the base of the TsuDAT user directory structures from settings.py 
    TsuDATBase = settings.TSUDAT_BASE_DIR
    TsuDATMux = settings.TSUDAT_MUX_DIR

    # change setup value to one of expected strings
    print('original scenario.model_setup=%s' % scenario.model_setup)
    trial_edit = {'t': 'trial', 'T': 'trial', 'trial': 'trial', 'TRIAL': 'trial',
                  'f': 'final', 'F': 'final', 'final': 'final', 'FINAL': 'final'}
    actual_setup = trial_edit.get(scenario.model_setup, 'trial')
    print('actual_setup=%s' % actual_setup)

    # fake a prject name                                  ##?
    if not scenario.project.name:                         ##?
        scenario.project.name = _slugify(scenario.name)   ##?
               
    # create the user working directory
    (work_dir, raw_elevations, boundaries, meshes, polygons, gauges,
     topographies, user_dir) = run_tsudat.make_tsudat_dir(TsuDATBase, user.username,
                                                          _slugify(scenario.project.name),
                                                          _slugify(scenario.name),
##?                                                          scenario.model_setup,
                                                          actual_setup,
                                                          scenario.event.tsudat_id)

    project_geom = scenario.project.geom
    project_extent = scenario.project.geom.extent
    centroid = project_geom.centroid

    # This somewhat naively assumes that the whole bounding polygon is in the same zone
    (UTMZone, UTMEasting, UTMNorthing) = LLtoUTM(23, centroid.coords[1], centroid.coords[0])
    if(len(UTMZone) == 3):
        utm_zone = int(UTMZone[0:2])
    else:
        utm_zone = int(UTMZone[0:1])
    if(centroid.coords[1] > 0):
        srid_base = 32600
    else:
        srid_base = 32700
    srid = srid_base + utm_zone
    scenario.project.srid = srid
    scenario.project.save()

    project_geom.transform(srid) 

    # Polygons
    print polygons
    bounding_polygon_file = open(os.path.join(polygons, 'bounding_polygon.csv'), 'w')
    for coord in project_geom.coords[0][:-1]:
        bounding_polygon_file.write('%f,%f\n' % (coord[0], coord[1]))
    bounding_polygon_file.close()
 
    # Internal Polygons 
    internal_polygons = InternalPolygon.objects.filter(project=scenario.project).order_by('value')
    count = 0
    InteriorRegions = []
    for ip in internal_polygons:
        ipfile = open(os.path.join(polygons, 'ip%s.csv' % count), 'w')
        geom = ip.geom
        geom.transform(srid)
        for coord in geom.coords[0][:-1]:
            ipfile.write('%f,%f\n' % (coord[0], coord[1]))
        if(ip.type == 1):
            type = "resolution"
        elif(ip.type == 2):
            type = "friction"
        elif(ip.type == 3):
            type = "aoi"
        InteriorRegions.append([type, ipfile.name, ip.value])
        ipfile.close()
        geom = ipfile = None
        count += 1

    # Raw Elevation Files
    RawElevationFiles = []
    elevation_files = []

    wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
    wcs = WebCoverageService(wcs_url, version='1.0.0')
    pds = ProjectDataSet.objects.filter(project=scenario.project).order_by('ranking')
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(srid)
    dst_wkt = srs.ExportToPrettyWkt()
    eResampleAlg = None
    create_options = None
    
    output_format = "AAIGrid"
    driver = gdal.GetDriverByName(output_format)
    
    for ds in pds:
        layer = Layer.objects.get(typename=ds.dataset.typename)
        elevation_files.append(layer.typename)
        logger.info(wcs.contents)
        metadata = wcs.contents[layer.typename]
        print metadata.grid
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        formats = metadata.supportedFormats
        print formats
        cvg = wcs.getCoverage(identifier=layer.typename, 
                format='GeoTIFF', 
                crs="EPSG:4326", 
                bbox=(project_extent[0], 
                    project_extent[1], 
                    project_extent[2], 
                    project_extent[3]), 
                resx=resx, 
                resy=resy)
        # Need to make sure the ranking numbers are unique for each project (enforced with DB constraint?)
        tif_file_name = '%s.tif' % ds.ranking
        tif_file_path = os.path.join(raw_elevations, tif_file_name)
        asc_file_name = '%s.asc' % ds.ranking
        asc_file_path = os.path.join(raw_elevations, asc_file_name)
        out = open(tif_file_path, 'wb')
        out.write(cvg.read())
        out.close()
       
        # Warp to UTM
        cmd = "/usr/bin/gdalwarp -srcnodata -9999 -dstnodata -9999 -t_srs EPSG:%d %s %s.tmp" % (srid, tif_file_path, tif_file_path)
        os.system(cmd)
        # Convert to AAIGrid
        cmd = "/usr/bin/gdal_translate -a_nodata -9999 -of %s %s.tmp %s" % (output_format, tif_file_path, asc_file_path)
        os.system(cmd)
        # Remove Intermediate files
        #os.remove(tif_file_path)
        #os.remove(tif_file_path + ".tmp")
      
        # Rename the .prj file to .prj.wkt
        shutil.move(asc_file_path.replace('.asc', '.prj'), asc_file_path.replace('.asc', '.prj.wkt'))
         
        # Generate a prj.adf style prj file
        # NOTE: Not sure if this will work in all cases?
        prj_file_name = '%s.prj' % ds.ranking
        prj_file = open(os.path.join(raw_elevations, prj_file_name), 'w')
        prj_file.write('Projection    UTM\n')
        prj_file.write('Zone          %d\n' % utm_zone)
        prj_file.write('Datum         WGS1984\n')
        prj_file.write('Zunits        NO\n')
        prj_file.write('Units         METERS\n')
        prj_file.write('Spheroid      WGS_1984\n')
        prj_file.write('Xshift        500000\n')
        prj_file.write('Yshift        10000000\n')
        prj_file.write('Parameters\n')
        prj_file.write('NODATA_value  -9999')
        prj_file.close()        

        RawElevationFiles.append(asc_file_path)
         
        '''
        src_ds = gdal.Open( str(tif_file_path), GA_ReadOnly )
        dst_ds_tmp = driver.CreateCopy( str(asc_file_name + '.tmp'), src_ds, 0)
        dst_ds = driver.Create( str(asc_file_path), dst_ds_tmp.RasterXSize, dst_ds_tmp.RasterYSize)
        gdal.ReprojectImage(src_ds, dst_ds, None, dst_wkt)
        dst_ds = None
        dst_ds_tmp = None
        src_ds = None
        '''

    # Landward Boundary
    
    # Iterate over the in the project geometry and add a l or s flag and call landward.landward with them
    points_list = []
    for coord in project_geom.coords[0][:-1]:
        pnt_wkt = 'SRID=%s;POINT(%f %f)' % (srid, coord[0], coord[1])
        land = Land.objects.filter(the_geom__intersects=pnt_wkt)
        if(land.count() > 0):
            points_list.append((coord[0], coord[1], "l")) 
        else:
            points_list.append((coord[0], coord[1], "s")) 
    print('points_list=%s' % str(points_list))
    landward_points = landward.landward(points_list)
    print('landward_points=%s' % str(landward_points))
    
    # Write out the landward points to a file
    landward_boundary_file = open(os.path.join(boundaries, 'landward_boundary.csv'), 'w')
    for pt in landward_points:
        landward_boundary_file.write('%f,%f\n' % (pt[0], pt[1]))
    landward_boundary_file.close()

    # Interior Hazard Points File
    interior_hazard_points_file = open(os.path.join(boundaries, 'interior_hazard_points.csv'), 'w')
    hps = HazardPoint.objects.filter(geom__intersects=project_geom).order_by('tsudat_id')
    for hp in hps:
        the_geom = hp.geom
        latitude=the_geom.coords[1]
        longitude=the_geom.coords[0]
        the_geom.transform(srid)
        interior_hazard_points_file.write('%d,%f,%f,%f,%f\n' % (hp.tsudat_id,longitude,latitude,the_geom.coords[0], the_geom.coords[1]))
    interior_hazard_points_file.close()
    
    # Gauges
    gauge_file = open(os.path.join(gauges, 'gauges.csv'), 'w')
    gauge_file.write('easting,northing,name,elevation\n')
    gauge_points = GaugePoint.objects.filter(project=scenario.project)
    for gauge in gauge_points:
        gauge_geom = gauge.geom
        gauge_geom.transform(srid)
        gauge_file.write('%f,%f,%s,%f\n' % (gauge_geom.coords[0], gauge_geom.coords[1], gauge.name, 0.0))
    gauge_file.close()
   
    # Layers 
    scenario_layers = scenario.output_layers.all()
    layers = []
    for layer in scenario_layers:
        layers.append(layer.name)

    # build the scenario json data file
    date_time = strftime("%Y%m%d%H%M%S", gmtime()) 
    json_file = os.path.join(work_dir, '%s.%s.json' % (_slugify(scenario.name), date_time))

    json_dict = {
                    'user': user.username,
                    'user_directory': user_dir,
                    'project': _slugify(scenario.project.name),
                    'project_id': scenario.project.id,
                    'scenario': _slugify(scenario.name),
                    'scenario_id': scenario.id,
##?                    'setup': scenario.model_setup,
                    'setup': actual_setup,
                    'event_number': scenario.event.tsudat_id,
                    'working_directory': TsuDATBase,
                    'mux_directory': TsuDATMux,
                    'initial_tide': scenario.initial_tidal_stage,
                    'start_time': scenario.start_time,
                    'end_time': scenario.end_time,
                    'smoothing': scenario.smoothing_param,
                    'bounding_polygon_file': bounding_polygon_file.name,
                    'raw_elevation_directory': raw_elevations,
                    'elevation_data_list': RawElevationFiles,
                    'mesh_friction': scenario.default_friction_value,
                    'raster_resolution': scenario.raster_resolution,
                    'export_area': "AOI" if scenario.use_aoi == True else "ALL",
                    'gauge_file': gauge_file.name,
                    'bounding_polygon_maxarea': scenario.project.max_area,
                    'interior_regions_list': InteriorRegions,
                    'interior_hazard_points_file': interior_hazard_points_file.name, 
                    'landward_boundary_file': landward_boundary_file.name,
                    'zone_number': utm_zone,
                    'layers_list': layers, 
                    'get_results_max': True,
                    'get_timeseries': True 
                }

    with open(json_file, 'w') as fd:
        json.dump(json_dict, fd, indent=2, separators=(',', ':'))

    scenario.tsudat_payload = json.dumps(json_dict) 
    scenario.save()
    
    # now run the simulation
    run_tsudat.run_tsudat(json_file)
    scenario.anuga_status = "QUEUE"
    scenario.save()
    return True
示例#52
0
def getNLCDRasterDataForBoundingBox(config, outputDir, bbox, 
                                    coverage=DEFAULT_COVERAGE,
                                    filename='NLCD',
                                    srs='EPSG:4326',
                                    resx=0.000277777777778,
                                    resy=0.000277777777778,
                                    interpolation='near',
                                    fmt=FORMAT_GEOTIFF, 
                                    overwrite=False,
                                    verbose=False,
                                    outfp=sys.stdout):
    """
        Download NLCD rasters from 
        http://raster.nationalmap.gov/arcgis/rest/services/LandCover/USGS_EROS_LandCover_NLCD/MapServer
        
        @param config A Python ConfigParser (not currently used)
        @param outputDir String representing the absolute/relative path of the directory into which output raster should be written
        @param bbox Dict representing the lat/long coordinates and spatial reference of the bounding box area
            for which the raster is to be extracted.  The following keys must be specified: minX, minY, maxX, maxY, srs.
        @param srs String representing the spatial reference of the raster to be returned.
        @param resx Float representing the X resolution of the raster(s) to be returned
        @param resy Float representing the Y resolution of the raster(s) to be returned
        @param interpolation String representing resampling method to use. Must be one of spatialdatalib.utils.RASTER_RESAMPLE_METHOD.
        @param fmt String representing format of raster file.  Must be one of FORMATS.
        @param overwrite Boolean True if existing data should be overwritten
        @param verbose Boolean True if detailed output information should be printed to outfp
        @param outfp File-like object to which verbose output should be printed
    
        @return A dictionary mapping soil property names to soil property file path and WCS URL, i.e.
            dict[soilPropertyName] = (soilPropertyFilePath, WCS URL)
    
        @exception Exception if coverage is not known
        @exception Exception if interpolation method is not known
        @exception Exception if fmt is not a known format
        @exception Exception if output already exists by overwrite is False
    """
    if coverage not in COVERAGES:
        raise Exception("Coverage {0} is not known".format(coverage))
    if interpolation not in INTERPOLATION_METHODS:
        raise Exception("Interpolation method {0} is not of a known method {1}".format(interpolation,
                                                                                       INTERPOLATION_METHODS.keys()))
    if fmt not in FORMATS:
        raise Exception("Format {0} is not of a known format {1}".format(fmt, str(FORMATS)))
    if verbose:
        outfp.write("Acquiring NLCD coverage {lctype} from {pub}\n".format(lctype=coverage,
                                                                           pub=DC_PUBLISHER))
    
    outFilename = os.path.extsep.join([filename, FORMAT_EXT[fmt]])
    outFilepath = os.path.join(outputDir, outFilename)
        
    delete = False
    if os.path.exists(outFilepath):
        if not overwrite:
            raise Exception("File {0} already exists, and overwrite is false".format(outFilepath))
        else:
            delete = True
    
    try:
        if delete:
            os.unlink(outFilepath)
        
        wcs = WebCoverageService(URL_BASE, version='1.0.0')
        bbox = [bbox['minX'], bbox['minY'], bbox['maxX'], bbox['maxY']]
        wcsfp = wcs.getCoverage(identifier=COVERAGES[coverage], bbox=bbox,
                                crs=srs,
                                response_crs=srs,
                                resx=resx, # their WCS seems to accept resx, resy in meters
                                resy=resy,
                                format=fmt,
                                interpolation=INTERPOLATION_METHODS[interpolation],
                                **{'band': '1'})
        url = urllib.unquote(wcsfp.geturl())
        f = open(outFilepath, 'wb')
        f.write(wcsfp.read())
        f.close()
        
        return (True, url, outFilename)
    except Exception as e:
        traceback.print_exc(file=outfp)
        raise(e)
    finally:
        # Clean-up
        pass
示例#53
0
def getSoilsRasterDataForBoundingBox(config, outputDir, bbox, 
                                     srs='EPSG:4326',
                                     resx=0.000277777777778,
                                     resy=0.000277777777778,
                                     interpolation='bilinear',
                                     fmt=FORMAT_GEOTIFF, 
                                     overwrite=False,
                                     verbose=False,
                                     outfp=sys.stdout):
    """
        Download soil property rasters from http://www.clw.csiro.au/aclep/soilandlandscapegrid/
        For each property, rasters for the first 1-m of the soil profile will be downloaded
        from which the depth-weighted mean of the property will be calculated and stored in outpufDir
    
        @param config A Python ConfigParser (not currently used)
        @param outputDir String representing the absolute/relative path of the directory into which output raster should be written
        @param bbox Dict representing the lat/long coordinates and spatial reference of the bounding box area
            for which the raster is to be extracted.  The following keys must be specified: minX, minY, maxX, maxY, srs.
        @param srs String representing the spatial reference of the raster to be returned.
        @param resx Float representing the X resolution of the raster(s) to be returned
        @param resy Float representing the Y resolution of the raster(s) to be returned
        @param interpolation String representing resampling method to use. Must be one of spatialdatalib.utils.RASTER_RESAMPLE_METHOD.
        @param fmt String representing format of raster file.  Must be one of FORMATS.
        @param overwrite Boolean True if existing data should be overwritten
        @param verbose Boolean True if detailed output information should be printed to outfp
        @param outfp File-like object to which verbose output should be printed
    
        @return A dictionary mapping soil property names to soil property file path and WCS URL, i.e.
            dict[soilPropertyName] = (soilPropertyFilePath, WCS URL)
    
        @exception Exception if interpolation method is not known
        @exception Exception if fmt is not a known format
        @exception Exception if output already exists by overwrite is False
        @exception Exception if a gdal_calc.py command fails
    """
    if interpolation not in RASTER_RESAMPLE_METHOD:
        raise Exception("Interpolation method {0} is not of a known method {1}".format(interpolation,
                                                                                       RASTER_RESAMPLE_METHOD))
    if fmt not in FORMATS:
        raise Exception("Format {0} is not of a known format {1}".format(fmt, str(FORMATS)))
    if verbose:
        outfp.write("Acquiring soils data from {0}\n".format(DC_PUBLISHER))
    
    soilPropertyRasters = {}
    
    #import logging
    #logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
    #owslib_log = logging.getLogger('owslib')
    # Add formatting and handlers as needed
    #owslib_log.setLevel(logging.DEBUG)
    
    # Set-up gdal_calc.py command
    gdalBase = None
    try:
        gdalBase = config.get('GDAL/OGR', 'GDAL_BASE')
    except ConfigParser.NoOptionError:
        gdalBase = os.path.dirname(config.get('GDAL/OGR', 'PATH_OF_GDAL_WARP'))
    
    gdalCmdPath = os.path.join(gdalBase, 'gdal_calc.py')
    if not os.access(gdalCmdPath, os.X_OK):
        raise IOError(errno.EACCES, "The gdal_calc.py binary at %s is not executable" %
                      gdalCmdPath)
    gdalCmdPath = os.path.abspath(gdalCmdPath)
    
    tmpdir = tempfile.mkdtemp()
    #print(tmpdir)
    
    bbox = [bbox['minX'], bbox['minY'], bbox['maxX'], bbox['maxY']]
    
    # For each soil variable, download desired depth layers
    for v in VARIABLE.keys():
        variable = VARIABLE[v]
        
        soilPropertyName = "soil_raster_pct{var}".format(var=v)
        soilPropertyFilename = "{name}.tif".format(name=soilPropertyName)
        soilPropertyFilepathTmp = os.path.join(tmpdir, soilPropertyFilename)
        soilPropertyFilepath = os.path.join(outputDir, soilPropertyFilename)
        
        if verbose:
            outfp.write("Getting attribute {0} ...\n".format(soilPropertyName))
        
        delete = False
        if os.path.exists(soilPropertyFilepath):
            if not overwrite:
                raise Exception("File {0} already exists, and overwrite is false".format(soilPropertyFilepath))
            else:
                delete = True
        
        url = URL_BASE.format(variable=variable)

        wcs = WebCoverageService(url, version='1.0.0')
        (coverages, weights_abs) = _getCoverageIDsAndWeightsForCoverageTitle(wcs, variable)
        
        outfiles = []
        weights = []
        for c in coverages.keys():
            coverage = coverages[c]
            weights.append(weights_abs[c])
            #coverage = c.format(variable=variable)
            wcsfp = wcs.getCoverage(identifier=coverage, bbox=bbox,
                                    crs='EPSG:4326',
                                    resx=resx, # their WCS seems to accept resx, resy in meters
                                    resy=resy,
                                    format=fmt)
            filename = os.path.join(tmpdir, "{coverage}.tif".format(coverage=c))
            outfiles.append(filename)
            f = open(filename, 'wb')
            f.write(wcsfp.read())
            f.close()
        
        # Compute depth-length weighted-average for each coverage using gdal_calc.py
        assert(len(outfiles) == len(COVERAGES))
        gdalCommand = gdalCmdPath
        
        calcStr = '0' # Identity element for addition
        for (i, outfile) in enumerate(outfiles):
            ord = i + 1
            var_label = ordinalToAlpha(ord)
            gdalCommand += " -{var} {outfile}".format(var=var_label, outfile=outfile)
            calcStr += "+({weight}*{var})".format(weight=weights[i],
                                                  var=var_label)
            
        gdalCommand += " --calc='{calc}' --outfile={outfile} --type='Float32' --format=GTiff --co='COMPRESS=LZW'".format(calc=calcStr,
                                                                                                                         outfile=soilPropertyFilepathTmp)     
        #print("GDAL command:\n{0}".format(gdalCommand))
        process = Popen(gdalCommand, cwd=outputDir, shell=True,
                        stdout=PIPE, stderr=PIPE)
        (process_stdout, process_stderr) = process.communicate()
        if process.returncode != 0:
            raise Exception("GDAL command {0} failed, returning {1}\nstdout:\n{2}\nstderr:\n{3}\n.".format(gdalCommand, 
                                                                                                           process.returncode,
                                                                                                           process_stdout,
                                                                                                           process_stderr))
        if verbose:
            outfp.write(process_stdout)
            outfp.write(process_stderr)
    
        # Resample raster
        if delete:
            os.unlink(soilPropertyFilepath)
        resampleRaster(config, outputDir, soilPropertyFilepathTmp, soilPropertyFilename,
                       'EPSG:4326', srs, resx, resy, resampleMethod=interpolation)
    
        soilPropertyRasters[soilPropertyName] = (soilPropertyFilepath, wcs.url)
    
    # Clean-up
    shutil.rmtree(tmpdir)
    
    return soilPropertyRasters