Пример #1
0
    def __init__(self, netcdffile, logging):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode='r')
        else:
            logging.error(os.path.abspath(netcdffile) + " not found!")
            exit(ValueError)


        try:
            self.x = self.dataset.variables['x'][:]
        except:
            self.x = self.dataset.variables['lon'][:]
        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables['y'][:]
        except:
            self.y = self.dataset.variables['lat'][:]

        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        (self.latidx,) = logical_and(self.x >= x.min(), self.x < x.max()).nonzero()
        (self.lonidx,) = logical_and(self.y >= x.min(), self.y < y.max()).nonzero()


        logging.info("Reading static input from netCDF file: " + netcdffile + ": " + str(self.dataset).replace('\n', ' '))
Пример #2
0
def readMap(fileName, fileFormat,logger,unzipcmd='pigz -d -k'):
    """ 
    Read PCRaster geographical file into memory
    """
    unzipped = 0
    if not os.path.exists(fileName):
        # try and unzip
        if os.path.exists(fileName + ".gz"):
            os.system(unzipcmd + ' ' + fileName + ".gz")
            logger.info("unzipping: " + fileName + ".gz")
            unzipped = 1

    pcrdata =  _pcrut.readmap(fileName)
    x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))),NaN)[0,:]
    y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))),NaN)[:,0]

    FillVal =  float(1E31)
    data = _pcrut.pcr2numpy(pcrdata,FillVal)
    if unzipped:
        #Delete uncompressed file if compressed exsists
        if os.path.exists(fileName + ".gz"):
            logger.info("Removing: " + fileName)
            os.remove(fileName)


    return x, y, data, FillVal
Пример #3
0
    def __init__(self, netcdffile, logging):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode='r')
        else:
            logging.error(os.path.abspath(netcdffile) + " not found!")
            exit(ValueError)


        try:
            self.x = self.dataset.variables['x'][:]
        except:
            self.x = self.dataset.variables['lon'][:]
        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables['y'][:]
        except:
            self.y = self.dataset.variables['lat'][:]

        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        (self.latidx,) = logical_and(self.x >= x.min(), self.x < x.max()).nonzero()
        (self.lonidx,) = logical_and(self.y >= x.min(), self.y < y.max()).nonzero()


        logging.info("Reading static input from netCDF file: " + netcdffile + ": " + str(self.dataset).replace('\n', ' '))
Пример #4
0
def readMap(fileName, fileFormat, logger, unzipcmd='pigz -d -k'):
    """ 
    Read PCRaster geographical file into memory
    """
    unzipped = 0
    if not os.path.exists(fileName):
        # try and unzip
        if os.path.exists(fileName + ".gz"):
            os.system(unzipcmd + ' ' + fileName + ".gz")
            logger.info("unzipping: " + fileName + ".gz")
            unzipped = 1

    pcrdata = _pcrut.readmap(fileName)
    x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))),
                         NaN)[0, :]
    y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))),
                         NaN)[:, 0]

    FillVal = float(1E31)
    data = _pcrut.pcr2numpy(pcrdata, FillVal)
    if unzipped:
        #Delete uncompressed file if compressed exsists
        if os.path.exists(fileName + ".gz"):
            logger.info("Removing: " + fileName)
            os.remove(fileName)

    return x, y, data, FillVal
Пример #5
0
    def __init__(self,
                 netcdffile,
                 logger,
                 starttime,
                 timesteps,
                 EPSG="EPSG:4326",
                 timestepsecs=86400,
                 metadata={},
                 maxbuf=25,
                 least_significant_digit=None):
        """
        Under construction
        """
        def date_range(start, end, tdelta="days"):
            if tdelta == "days":
                r = (end + dt.timedelta(days=1) - start).days
                return [start + dt.timedelta(days=i) for i in range(r)]
            else:
                r = (end + dt.timedelta(days=1) - start).days * 24
                return [start + dt.timedelta(hours=i) for i in range(r)]

        self.least_significant_digit = least_significant_digit
        self.logger = logger
        # Do not allow a max buffer larger than the number of timesteps
        self.maxbuf = maxbuf if timesteps >= maxbuf else timesteps
        self.ncfile = netcdffile
        self.timesteps = timesteps
        rows = pcraster._pcraster.clone().nrRows()
        cols = pcraster._pcraster.clone().nrCols()
        cellsize = pcraster._pcraster.clone().cellSize()
        yupper = pcraster._pcraster.clone().north()
        xupper = pcraster._pcraster.clone().west()
        x = _pcrut.pcr2numpy(
            _pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(
            _pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        end = starttime + dt.timedelta(seconds=timestepsecs * self.timesteps -
                                       1)
        if timestepsecs == 86400:
            timeList = date_range(starttime, end, tdelta="days")
        else:
            timeList = date_range(starttime, end, tdelta="hours")

        self.timestepbuffer = zeros((self.maxbuf, len(y), len(x)))
        self.bufflst = {}

        globmetadata.update(metadata)

        prepare_nc(self.ncfile,
                   timeList,
                   x,
                   y,
                   globmetadata,
                   logger,
                   Format=netcdfformat,
                   EPSG=EPSG)
Пример #6
0
    def __init__(self, netcdffile, logging, vars=[]):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode='r')
        else:
            logging.error(os.path.abspath(netcdffile) + " not found!")
            exit(ValueError)

        logging.info("Reading state input from netCDF file: " + netcdffile +
                     ": " + str(self.dataset).replace('\n', ' '))
        self.alldat = {}
        a = pcr2numpy(cover(0.0), 0.0).flatten()
        # Determine steps to load in mem based on estimated memory usage
        floatspermb = 1048576 / 4
        maxmb = 40
        self.maxsteps = maxmb * len(a) / floatspermb + 1
        self.fstep = 0
        self.lstep = self.fstep + self.maxsteps

        try:
            self.x = self.dataset.variables['x'][:]
        except:
            self.x = self.dataset.variables['lon'][:]
        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables['y'][:]
        except:
            self.y = self.dataset.variables['lat'][:]

        x = _pcrut.pcr2numpy(
            _pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(
            _pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        (self.latidx, ) = logical_and(self.x >= x.min(),
                                      self.x < x.max()).nonzero()
        (self.lonidx, ) = logical_and(self.y >= x.min(),
                                      self.y < y.max()).nonzero()

        for var in vars:
            try:
                self.alldat[var] = self.dataset.variables[var][self.fstep:self.
                                                               maxsteps]
            except:
                self.alldat.pop(var, None)
                logging.warn("Variable " + var +
                             " not found in netcdf file: " + netcdffile)
Пример #7
0
    def __init__(self, netcdffile, logging, vars=[]):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode='r')
        else:
            logging.error(os.path.abspath(netcdffile) + " not found!")
            exit(ValueError)

        logging.info("Reading state input from netCDF file: " + netcdffile + ": " + str(self.dataset).replace('\n', ' '))
        self.alldat = {}
        a = pcr2numpy(cover(0.0), 0.0).flatten()
        # Determine steps to load in mem based on estimated memory usage
        floatspermb = 1048576 / 4
        maxmb = 40
        self.maxsteps = maxmb * len(a) / floatspermb + 1
        self.fstep = 0
        self.lstep = self.fstep + self.maxsteps

        try:
            self.x = self.dataset.variables['x'][:]
        except:
            self.x = self.dataset.variables['lon'][:]
        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables['y'][:]
        except:
            self.y = self.dataset.variables['lat'][:]


        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        (self.latidx,) = logical_and(self.x >= x.min(), self.x < x.max()).nonzero()
        (self.lonidx,) = logical_and(self.y >= x.min(), self.y < y.max()).nonzero()

        for var in vars:
            try:
                self.alldat[var] = self.dataset.variables[var][self.fstep:self.maxsteps]
            except:
                self.alldat.pop(var, None)
                logging.warn("Variable " + var + " not found in netcdf file: " + netcdffile)
Пример #8
0
    def __init__(self, netcdffile, logger, starttime, timesteps, EPSG="EPSG:4326", timestepsecs=86400,
                 metadata={}, zlib=True, Format="NETCDF4",
                 maxbuf=25, least_significant_digit=None):
        """
        Under construction
        """

        self.EPSG = EPSG
        self.zlib = zlib
        self.Format = Format
        self.least_significant_digit = least_significant_digit

        def date_range(start, end, timestepsecs):
                r = int((end + dt.timedelta(seconds=timestepsecs) - start).total_seconds()/timestepsecs)
                return [start + dt.timedelta(seconds=(timestepsecs * i)) for i in range(r)]

        self.logger = logger
        # Do not allow a max buffer larger than the number of timesteps
        self.maxbuf = maxbuf if timesteps >= maxbuf else timesteps
        self.ncfile = netcdffile
        self.timesteps = timesteps
        rows = pcraster._pcraster.clone().nrRows()
        cols = pcraster._pcraster.clone().nrCols()
        cellsize = pcraster._pcraster.clone().cellSize()
        yupper = pcraster._pcraster.clone().north()
        xupper = pcraster._pcraster.clone().west()
        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        # Shift one timestep as we output at the end
        #starttime = starttime + dt.timedelta(seconds=timestepsecs)
        end = starttime + dt.timedelta(seconds=timestepsecs * (self.timesteps -1))

        timeList = date_range(starttime, end, timestepsecs)
        self.timestepbuffer = zeros((self.maxbuf, len(y), len(x)))
        self.bufferdirty = True
        self.bufflst = {}

        globmetadata.update(metadata)

        prepare_nc(self.ncfile, timeList, x, y, globmetadata, logger, Format=self.Format, EPSG=EPSG,zlib=self.zlib,
                   least_significant_digit=self.least_significant_digit)

        self.nc_trg = None
Пример #9
0
    def __init__(self, netcdffile, logger, starttime, timesteps, EPSG="EPSG:4326", timestepsecs=86400,
                 metadata={}, zlib=True, Format="NETCDF4",
                 maxbuf=25, least_significant_digit=None):
        """
        Under construction
        """

        self.EPSG = EPSG
        self.zlib = zlib
        self.Format = Format
        self.least_significant_digit = least_significant_digit

        def date_range(start, end, timestepsecs):
                r = int((end + dt.timedelta(seconds=timestepsecs) - start).total_seconds()/timestepsecs)
                return [start + dt.timedelta(seconds=(timestepsecs * i)) for i in range(r)]

        self.logger = logger
        # Do not allow a max buffer larger than the number of timesteps
        self.maxbuf = maxbuf if timesteps >= maxbuf else timesteps
        self.ncfile = netcdffile
        self.timesteps = timesteps
        rows = pcraster._pcraster.clone().nrRows()
        cols = pcraster._pcraster.clone().nrCols()
        cellsize = pcraster._pcraster.clone().cellSize()
        yupper = pcraster._pcraster.clone().north()
        xupper = pcraster._pcraster.clone().west()
        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        # Shift one timestep as we output at the end
        #starttime = starttime + dt.timedelta(seconds=timestepsecs)
        end = starttime + dt.timedelta(seconds=timestepsecs * (self.timesteps -1))

        timeList = date_range(starttime, end, timestepsecs)
        self.timestepbuffer = zeros((self.maxbuf, len(y), len(x)))
        self.bufflst = {}
        self.buffdirty = False

        globmetadata.update(metadata)

        prepare_nc(self.ncfile, timeList, x, y, globmetadata, logger, Format=self.Format, EPSG=EPSG,zlib=self.zlib,
                   least_significant_digit=self.least_significant_digit)
Пример #10
0
def readMap(fileName, fileFormat,logger):
    """ 
    Read geographical file into memory
    """

    # Open file for binary-reading
    #pcrdata = _pcrut.readmap(fileName)
    # mapFormat = gdal.GetDriverByName(fileFormat)
    # mapFormat.Register()
    # ds = gdal.Open(fileName)
    # if ds is None:
    #     logger.error('Could not open ' + fileName + '. Something went wrong!! Shutting down')
    #     sys.exit(1)
    #     # Retrieve geoTransform info
    # geotrans = ds.GetGeoTransform()
    # originX = geotrans[0]
    # originY = geotrans[3]
    # resX    = geotrans[1]
    # resY    = geotrans[5]
    # cols = ds.RasterXSize
    # rows = ds.RasterYSize
    # x = linspace(originX+resX/2,originX+resX/2+resX*(cols-1),cols)
    # y = linspace(originY+resY/2,originY+resY/2+resY*(rows-1),rows)
    # # Retrieve raster
    # RasterBand = ds.GetRasterBand(1) # there's only 1 band, starting from 1
    # data = RasterBand.ReadAsArray(0,0,cols,rows)
    # FillVal = RasterBand.GetNoDataValue()
    # RasterBand = None
    # del ds
    # #ds = None

    pcrdata =  _pcrut.readmap(fileName)
    x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))),NaN)[0,:]
    y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))),NaN)[:,0]

    FillVal =  float(1E31)
    data = _pcrut.pcr2numpy(pcrdata,FillVal)


    return x, y, data, FillVal
Пример #11
0
    def __init__(self,netcdffile,logger,starttime,timesteps,timestepsecs=86400,metadata={},maxbuf=25,least_significant_digit=None):
        """
        Under construction
        """

        def date_range(start, end, tdelta="days"):
            if tdelta == "days":
                r = (end+dt.timedelta(days=1)-start).days
                return [start+dt.timedelta(days=i) for i in range(r)]
            else:
                r = (end+dt.timedelta(days=1)-start).days * 24
                return [start+dt.timedelta(hours=i) for i in range(r)]

        self.least_significant_digit=least_significant_digit
        self.logger = logger
        # Do not allow a max buffer larger than the number of timesteps
        self.maxbuf =  maxbuf if timesteps >= maxbuf else timesteps
        self.ncfile = netcdffile
        self.timesteps = timesteps
        rows = pcraster._pcraster.clone().nrRows()
        cols = pcraster._pcraster.clone().nrCols()
        cellsize = pcraster._pcraster.clone().cellSize()
        yupper = pcraster._pcraster.clone().north()
        xupper = pcraster._pcraster.clone().west()
        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))),NaN)[0,:]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))),NaN)[:,0]

        end=starttime + dt.timedelta(seconds=timestepsecs * self.timesteps-1)
        if timestepsecs == 86400:
            timeList = date_range(starttime, end, tdelta="days")
        else:
            timeList = date_range(starttime, end, tdelta="hours")

        self.timestepbuffer = zeros((self.maxbuf,len(y),len(x)))
        self.bufflst={}

        globmetadata.update(metadata)


        prepare_nc(self.ncfile,timeList,x,y,globmetadata,logger,Format=netcdfformat)
Пример #12
0
    def __init__(self, netcdffile, logging, vars=[]):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode='r')
        else:
            logging.error(os.path.abspath(netcdffile) + " not found!")
            exit(ValueError)

        logging.info("Reading input from netCDF file: " + netcdffile + ": " + str(self.dataset).replace('\n', ' '))
        self.alldat = {}
        a = pcr2numpy(cover(0.0), 0.0).flatten()
        # Determine steps to load in mem based on estimated memory usage
        floatspermb = 1048576 / 4
        maxmb = 40

        maxlentime = len(self.dataset.variables['time'])
        self.maxsteps = minimum(maxmb * len(a) / floatspermb + 1,maxlentime - 1)
        self.fstep = 0
        self.lstep = self.fstep + self.maxsteps

        self.datetime = self.dataset.variables['time'][:]
        if hasattr(self.dataset.variables['time'],'units'):
            self.timeunits=self.dataset.variables['time'].units
        else:
            self.timeunits ='Seconds since 1970-01-01 00:00:00'
        if hasattr(self.dataset.variables['time'], 'calendar'):
            self.calendar= self.dataset.variables['time'].calendar
        else:
            self.calendar ='gregorian'
        self.datetimelist=netCDF4.num2date(self.datetime,self.timeunits, calendar=self.calendar)

        try:
            self.x = self.dataset.variables['x'][:]
        except:
            self.x = self.dataset.variables['lon'][:]

        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables['y'][:]
        except:
            self.y = self.dataset.variables['lat'][:]

        # test if 1D or 2D array
        if len(self.y.shape) == 1:
            if self.y[0] > self.y[-1]:
                self.flip = False
            else:
                self.flip = True
        else: # not sure if this works
            self.y = self.y[:][0]
            if self.y[0] > self.y[-1]:
                self.flip = False
            else:
                self.flip = True


        x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        #Get average cell size
        acc = diff(x).mean() * 0.25
        if self.flip:
            (self.latidx,) = logical_and(self.y[::-1] +acc >= y.min(), self.y[::-1] <= y.max() + acc).nonzero()
            (self.lonidx,) = logical_and(self.x + acc >= x.min(), self.x <= x.max() + acc).nonzero()
        else:
            (self.latidx,) = logical_and(self.y +acc >= y.min(), self.y <= y.max() + acc).nonzero()
            (self.lonidx,) = logical_and(self.x +acc >= x.min(), self.x <= x.max() + acc).nonzero()

        if len(self.lonidx) != len(x):
            logging.error("error in determining X coordinates in netcdf...")

        if len(self.latidx) != len(y):
            logging.error("error in determining X coordinates in netcdf...")

        for var in vars:
            try:
                self.alldat[var] = self.dataset.variables[var][self.fstep:self.maxsteps]
            except:
                self.alldat.pop(var, None)
                logging.warn("Variable " + var + " not found in netcdf file: " + netcdffile)
Пример #13
0
            mapstackname.append(a)
            var.append(a)
            varname.append(a)

    # Use first timestep as clone-map
    logger = setlogger('pcr2netcdf.log', 'pcr2netcdf', thelevel=logging.DEBUG)

    count = 1
    below_thousand = count % 1000
    above_thousand = count / 1000
    clonemapname = str(mapstackname[0] + '%0' + str(8 - len(mapstackname[0])) +
                       '.f.%03.f') % (above_thousand, below_thousand)
    clonemap = os.path.join(mapstackfolder, clonemapname)
    _pcrut.setclone(clonemap)

    x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))),
                         NaN)[0, :]
    y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))),
                         NaN)[:, 0]

    start = dt.datetime.strptime(startstr, "%d-%m-%Y %H:%M:%S")
    end = dt.datetime.strptime(endstr, "%d-%m-%Y %H:%M:%S")
    if timestepsecs == 86400:
        timeList = date_range_peryear(start, end, tdelta="days")
    else:
        timeList = date_range_peryear(start, end, tdelta="hours")

    if inifile is not None:
        inimetadata = getnetcdfmetafromini(inifile)
        metadata.update(inimetadata)
Пример #14
0
    def __init__(self, netcdffile, logging, vars=[]):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        self.fname = netcdffile
        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode="r")
        else:
            msg = os.path.abspath(netcdffile) + " not found!"
            logging.error(msg)
            raise ValueError(msg)

        logging.info("Reading state input from netCDF file: " + netcdffile)
        self.alldat = {}
        a = pcr2numpy(cover(0.0), 0.0).flatten()
        # Determine steps to load in mem based on estimated memory usage
        floatspermb = 1048576 / 4
        maxmb = 40
        self.maxsteps = maxmb * len(a) / floatspermb + 1
        self.fstep = 0
        self.lstep = self.fstep + self.maxsteps

        self.datetime = self.dataset.variables["time"][:]
        if hasattr(self.dataset.variables["time"], "units"):
            self.timeunits = self.dataset.variables["time"].units
        else:
            self.timeunits = "Seconds since 1970-01-01 00:00:00"
        if hasattr(self.dataset.variables["time"], "calendar"):
            self.calendar = self.dataset.variables["time"].calendar
        else:
            self.calendar = "gregorian"
        self.datetimelist = netCDF4.num2date(self.datetime,
                                             self.timeunits,
                                             calendar=self.calendar)

        try:
            self.x = self.dataset.variables["x"][:]
        except:
            self.x = self.dataset.variables["lon"][:]

        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables["y"][:]
        except:
            self.y = self.dataset.variables["lat"][:]

        # test if 1D or 2D array
        if len(self.y.shape) == 1:
            if self.y[0] > self.y[-1]:
                self.flip = False
            else:
                self.flip = True
        else:  # not sure if this works
            self.y = self.y[:][0]
            if self.y[0] > self.y[-1]:
                self.flip = False
            else:
                self.flip = True

        x = _pcrut.pcr2numpy(
            _pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(
            _pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        # Get average cell size
        acc = (
            diff(x).mean() * 0.25
        )  # non-exact match needed becuase of possible rounding problems
        if self.flip:
            (self.latidx, ) = logical_and(
                self.y[::-1] + acc >= y.min(),
                self.y[::-1] <= y.max() + acc).nonzero()
            (self.lonidx, ) = logical_and(self.x + acc >= x.min(),
                                          self.x <= x.max() + acc).nonzero()
        else:
            (self.latidx, ) = logical_and(self.y + acc >= y.min(),
                                          self.y <= y.max() + acc).nonzero()
            (self.lonidx, ) = logical_and(self.x + acc >= x.min(),
                                          self.x <= x.max() + acc).nonzero()

        if len(self.lonidx) != len(x):
            logging.error("error in determining X coordinates in netcdf...")
            logging.error("model expects: " + str(x.min()) + " to " +
                          str(x.max()))
            logging.error("got coordinates  netcdf: " + str(self.x.min()) +
                          " to " + str(self.x.max()))
            logging.error("got len from  netcdf x: " + str(len(x)) +
                          " expected " + str(len(self.lonidx)))
            raise ValueError("X coordinates in netcdf do not match model")

        if len(self.latidx) != len(y):
            logging.error("error in determining Y coordinates in netcdf...")
            logging.error("model expects: " + str(y.min()) + " to " +
                          str(y.max()))
            logging.error("got from  netcdf: " + str(self.y.min()) + " to " +
                          str(self.y.max()))
            logging.error("got len from  netcdf y: " + str(len(y)) +
                          " expected " + str(len(self.latidx)))
            raise ValueError("Y coordinates in netcdf do not match model")

        for var in vars:
            try:
                self.alldat[var] = self.dataset.variables[var][self.fstep:self.
                                                               maxsteps]
            except:
                self.alldat.pop(var, None)
                logging.warn("Variable " + var +
                             " not found in netcdf file: " + netcdffile)
Пример #15
0
    def __init__(self, netcdffile, logging, vars=[]):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode='r')
        else:
            logging.error(os.path.abspath(netcdffile) + " not found!")
            exit(ValueError)

        logging.info("Reading input from netCDF file: " + netcdffile + ": " +
                     str(self.dataset).replace('\n', ' '))
        self.alldat = {}
        a = pcr2numpy(cover(0.0), 0.0).flatten()
        # Determine steps to load in mem based on estimated memory usage
        floatspermb = 1048576 / 4
        maxmb = 40

        maxlentime = len(self.dataset.variables['time'])
        self.maxsteps = minimum(maxmb * len(a) / floatspermb + 1,
                                maxlentime - 1)
        self.fstep = 0
        self.lstep = self.fstep + self.maxsteps

        self.datetime = self.dataset.variables['time'][:]
        if hasattr(self.dataset.variables['time'], 'units'):
            self.timeunits = self.dataset.variables['time'].units
        else:
            self.timeunits = 'Seconds since 1970-01-01 00:00:00'
        if hasattr(self.dataset.variables['time'], 'calendar'):
            self.calendar = self.dataset.variables['time'].calendar
        else:
            self.calendar = 'gregorian'
        self.datetimelist = netCDF4.num2date(self.datetime,
                                             self.timeunits,
                                             calendar=self.calendar)

        try:
            self.x = self.dataset.variables['x'][:]
        except:
            self.x = self.dataset.variables['lon'][:]

        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables['y'][:]
        except:
            self.y = self.dataset.variables['lat'][:]

        # test if 1D or 2D array
        if len(self.y.shape) == 1:
            if self.y[0] > self.y[-1]:
                self.flip = False
            else:
                self.flip = True
        else:  # not sure if this works
            self.y = self.y[:][0]
            if self.y[0] > self.y[-1]:
                self.flip = False
            else:
                self.flip = True

        x = _pcrut.pcr2numpy(
            _pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(
            _pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

        #Get average cell size
        acc = diff(x).mean() * 0.25
        if self.flip:
            (self.latidx, ) = logical_and(
                self.y[::-1] + acc >= y.min(),
                self.y[::-1] <= y.max() + acc).nonzero()
            (self.lonidx, ) = logical_and(self.x + acc >= x.min(),
                                          self.x <= x.max() + acc).nonzero()
        else:
            (self.latidx, ) = logical_and(self.y + acc >= y.min(),
                                          self.y <= y.max() + acc).nonzero()
            (self.lonidx, ) = logical_and(self.x + acc >= x.min(),
                                          self.x <= x.max() + acc).nonzero()

        if len(self.lonidx) != len(x):
            logging.error("error in determining X coordinates in netcdf...")

        if len(self.latidx) != len(y):
            logging.error("error in determining X coordinates in netcdf...")

        for var in vars:
            try:
                self.alldat[var] = self.dataset.variables[var][self.fstep:self.
                                                               maxsteps]
            except:
                self.alldat.pop(var, None)
                logging.warn("Variable " + var +
                             " not found in netcdf file: " + netcdffile)
Пример #16
0
    count = 1
    below_thousand = count % 1000
    above_thousand = count / 1000
    clonemapname = str(mapstackname[0] + "%0" + str(8 - len(mapstackname[0])) + ".f.%03.f") % (
        above_thousand,
        below_thousand,
    )
    clonemap = os.path.join(mapstackfolder, clonemapname)

    if Singlemap:
        clonemap = mapstackname[0]

    _pcrut.setclone(clonemap)

    x = _pcrut.pcr2numpy(_pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
    y = _pcrut.pcr2numpy(_pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]

    start = dt.datetime.strptime(startstr, "%d-%m-%Y %H:%M:%S")

    if Singlemap:
        end = start
    else:
        end = dt.datetime.strptime(endstr, "%d-%m-%Y %H:%M:%S")

    if timestepsecs == 86400:
        if perYear:
            timeList = date_range_peryear(start, end, tdelta="days")
        else:
            timeList = date_range(start, end, timestepsecs)
    else:
Пример #17
0
    count = 1
    below_thousand = count % 1000
    above_thousand = count / 1000

    clonemapname = str(mapstackname[0] + '%0' + str(8 - len(mapstackname[0])) +
                       '.f.%03.f') % (above_thousand, below_thousand)
    clonemap = os.path.join(mapstackfolder, clonemapname)
    if Singlemap:
        clonemap = mapstackname[0]

    if IFormat == 'PCRaster':
        _pcrut.setclone(clonemap)

        x = _pcrut.pcr2numpy(
            _pcrut.xcoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[0, :]
        y = _pcrut.pcr2numpy(
            _pcrut.ycoordinate(_pcrut.boolean(_pcrut.cover(1.0))), NaN)[:, 0]
    else:
        x, y, data, FillVal = _readMap(clonemap, IFormat, logger)

    start = dt.datetime.strptime(startstr, "%d-%m-%Y %H:%M:%S")

    if Singlemap:
        end = start
    else:
        end = dt.datetime.strptime(endstr, "%d-%m-%Y %H:%M:%S")

    if timestepsecs == 86400:
        if perYear:
            timeList = date_range_peryear(start, end, tdelta="days")