Esempio n. 1
0
def absolute(u, v, lon, lat):
    """absolute(u,v,lon,lat):
    Calculates the absolute vorticity (curl(u,v)) of a wind field using
    a basic centred difference scheme. The centred differencing means
    the returned array is reduced in size by 2 elements in each
    dimension.

    zeta((len(lat)-2,len(lon)-2)) = vorticity.absolute(u,v,lon,lat)
    """
    dx = numpy.zeros((len(lat), len(lon)-2))
    dy = numpy.zeros((len(lat)-2, len(lon)))
    du = numpy.zeros((len(lat)-2, len(lon)))
    dv = numpy.zeros((len(lat), len(lon)-2))
    zeta = numpy.zeros((len(lat)-2, len(lon)-2))

    for i in xrange(1, len(lon)-1):
        for j in xrange(0, len(lat)):
            dx[j,i-1] = metutils.convert((lon[i+1]-lon[i-1])*numpy.cos(pi*lat[j]/180.), "deg", "m")
            dv[j,i-1] = v[i+1,j]-v[i-1,j]

    for i in xrange(0, len(lon)):
        for j in xrange(1, len(lat)-1):
            dy[j-1,i] = metutils.convert((lat[j+1]-lat[j-1]), "deg", "m")
            du[j-1,i] = u[i,j+1]-u[i,j-1]

    for i in xrange(len(lat)-2):
        for j in xrange(len(lon)-2):
            zeta[i,j] = dv[i,j]/dx[i,j] - du[i,j]/dy[i,j] + metutils.coriolis(lat[i+1])

    return zeta
Esempio n. 2
0
def filterPressure(pressure,
                   inputPressureUnits='hPa',
                   missingValue=sys.maxint):
    """
    Filter pressure values to remove any non-physical values.

    :param pressure: input pressure values to check.
    :param str inputPressureUnits: The units of the pressure values.
                     Can be one of ``hPa``, ``Pa``, ``kPa``,
                     ``Pascals`` or ``mmHg``.
    :param missingValue: replace all null values in the input data
                         with this value.
    :type pressure: :class:`numpy.ndarray`
    :type missingValue: int or float (default ``sys.maxint``)

    :returns: :class:`numpy.ndarray` with only valid pressure values.

    """

    novalue_index = np.where(pressure == missingValue)
    pressure = metutils.convert(pressure, inputPressureUnits, "hPa")
    pressure[novalue_index] = missingValue

    # Convert any non-physical central pressure values to maximum integer
    # This is required because IBTrACS has a mix of missing value codes
    # (i.e. -999, 0, 9999) in the same global dataset.
    pressure = np.where((pressure < 600) | (pressure > 1100), missingValue,
                        pressure)
    return pressure
Esempio n. 3
0
def filterPressure(pressure, inputPressureUnits='hPa',
                   missingValue=sys.maxint):
    """
    Filter pressure values to remove any non-physical values.

    :param pressure: input pressure values to check.
    :param str inputPressureUnits: The units of the pressure values.
                     Can be one of ``hPa``, ``Pa``, ``kPa``,
                     ``Pascals`` or ``mmHg``.
    :param missingValue: replace all null values in the input data
                         with this value.
    :type pressure: :class:`numpy.ndarray`
    :type missingValue: int or float (default ``sys.maxint``)

    :returns: :class:`numpy.ndarray` with only valid pressure values.
    
    """

    novalue_index = np.where(pressure == missingValue)
    pressure = metutils.convert(pressure, inputPressureUnits, "hPa")
    pressure[novalue_index] = missingValue

    # Convert any non-physical central pressure values to maximum integer
    # This is required because IBTrACS has a mix of missing value codes
    # (i.e. -999, 0, 9999) in the same global dataset.
    pressure = np.where((pressure < 600) | (pressure > 1100),
                        missingValue, pressure)
    return pressure
Esempio n. 4
0
File: grid.py Progetto: squireg/tcrm
def grdSave(filename, data, lon, lat, delta, delimiter=' ', nodata=-9999,
            fmt='%.10e', coords='latlon'):
    """
    Save formatted data to an ascii grid format file.
    The files have 6 header lines describing the data, followed by the
    data in a gridded format.

    Headers:
    ncols
    nrows
    xllcorner
    yllcorner
    cellsize
    NODATA_value

    Usage:
    grdSave(filename, data, lon, lat, delta, delimiter=' ',
            nodata=-9999, fmt='%.10e, coords='latlon')
    """

    if filename and os.path.isfile(filename):
        if filename.endswith('.gz'):
            import gzip
            fh = gzip.open(filename, 'wb')
        else:
            fh = file(filename, 'w')
    elif hasattr(filename, 'seek'):
        fh = filename
    else:
        try:
            fh = open(filename,'w')
        except:
            raise ValueError('Filename must be a string or file handle')

    if coords == 'UTM':
        zone, xllcorner, yllcorner = LLtoUTM(lat.min(),lon.min())
        delta = metutils.convert(delta, "deg", "m")
    else:
        # Assume geographic coordinates
        xllcorner = lon.min()
        yllcorner = lat.min()

    fh.write('ncols         '+str(len(lon))+'\n')
    fh.write('nrows         '+str(len(lat))+'\n')
    fh.write('xllcorner     '+str(xllcorner)+'\n')
    fh.write('yllcorner     '+str(yllcorner)+'\n')
    fh.write('cellsize      '+str(delta)+'\n')
    fh.write('NODATA_value  '+str(nodata)+'\n')
    X = numpy.array(data)
    origShape = None
    if len(X.shape) == 1:
        origShape = X.shape
        X.shape = len(X), 1
    for row in X:
        fh.write(delimiter.join([fmt%val for val in row]) + '\n')
    fh.close()
    if origShape is not None:
        X.shape = origShape
Esempio n. 5
0
def absolute(u, v, lon, lat):
    """
    Calculates the absolute vorticity (f + curl(u,v)) of a wind field using
    a basic centred difference scheme. The centred differencing means
    the returned array is reduced in size by 2 elements in each
    dimension.

    :param u: 2-d array of eastward vector component.
    :param v: 2-d array of northward vector component.
    :param lon: 1-d array of longitudes of grid that defines the vector field.
    :param lat: 1-d array of latitudes of grid that defines the vector field.

    :type u: :class:`numpy.ndarray`
    :type v: :class:`numpy.ndarray`
    :type lon: :class:`numpy.ndarray`
    :type lat: :class:`numpy.ndarray`

    :return: 2-d :class:`numpy.ndarray` of absolute vorticity values.

    """
    dx = np.zeros((len(lat), len(lon) - 2))
    dy = np.zeros((len(lat) - 2, len(lon)))
    du = np.zeros((len(lat) - 2, len(lon)))
    dv = np.zeros((len(lat), len(lon) - 2))
    zeta = np.zeros((len(lat) - 2, len(lon) - 2))

    for i in xrange(1, len(lon) - 1):
        for j in xrange(0, len(lat)):
            dx[j, i-1] = metutils.convert((lon[i+1] - lon[i-1]) * \
                                          np.cos(np.pi*lat[j]/180.),
                                          "deg", "m")
            dv[j, i - 1] = v[i + 1, j] - v[i - 1, j]

    for i in xrange(0, len(lon)):
        for j in xrange(1, len(lat) - 1):
            dy[j - 1, i] = metutils.convert((lat[j + 1] - lat[j - 1]), "deg",
                                            "m")
            du[j - 1, i] = u[i, j + 1] - u[i, j - 1]

    for i in xrange(len(lat) - 2):
        for j in xrange(len(lon) - 2):
            zeta[i, j] = dv[i, j]/dx[i, j] - du[i, j]/dy[i, j] + \
               metutils.coriolis(lat[i+1])

    return zeta
Esempio n. 6
0
def absolute(u, v, lon, lat):
    """
    Calculates the absolute vorticity (f + curl(u,v)) of a wind field using
    a basic centred difference scheme. The centred differencing means
    the returned array is reduced in size by 2 elements in each
    dimension.

    :param u: 2-d array of eastward vector component.
    :param v: 2-d array of northward vector component.
    :param lon: 1-d array of longitudes of grid that defines the vector field.
    :param lat: 1-d array of latitudes of grid that defines the vector field.

    :type u: :class:`numpy.ndarray`
    :type v: :class:`numpy.ndarray`
    :type lon: :class:`numpy.ndarray`
    :type lat: :class:`numpy.ndarray`

    :return: 2-d :class:`numpy.ndarray` of absolute vorticity values.

    """
    dx = np.zeros((len(lat), len(lon) - 2))
    dy = np.zeros((len(lat) - 2, len(lon)))
    du = np.zeros((len(lat) - 2, len(lon)))
    dv = np.zeros((len(lat), len(lon) - 2))
    zeta = np.zeros((len(lat) - 2, len(lon) - 2))

    for i in xrange(1, len(lon) - 1):
        for j in xrange(0, len(lat)):
            dx[j, i-1] = metutils.convert((lon[i+1] - lon[i-1]) * \
                                          np.cos(np.pi*lat[j]/180.),
                                          "deg", "m")
            dv[j, i-1] = v[i+1, j] - v[i-1, j]

    for i in xrange(0, len(lon)):
        for j in xrange(1, len(lat) - 1):
            dy[j-1, i] = metutils.convert((lat[j+1] - lat[j-1]), "deg", "m")
            du[j-1, i] = u[i, j+1] - u[i, j-1]

    for i in xrange(len(lat) - 2):
        for j in xrange(len(lon) - 2):
            zeta[i, j] = dv[i, j]/dx[i, j] - du[i, j]/dy[i, j] + \
               metutils.coriolis(lat[i+1])

    return zeta
Esempio n. 7
0
def filterPressure(pressure, inputPressureUnits='hPa',
                   missingValue=sys.maxint):
    """
    Filter pressure values to remove any non-physical values
    """

    novalue_index = np.where(pressure == missingValue)
    pressure = metutils.convert(pressure, inputPressureUnits, "hPa")
    pressure[novalue_index] = missingValue

    # Convert any non-physical central pressure values to maximum integer
    # This is required because IBTrACS has a mix of missing value codes
    # (i.e. -999, 0, 9999) in the same global dataset.
    pressure = np.where((pressure < 600) | (pressure > 1100),
                        missingValue, pressure)
    return pressure
Esempio n. 8
0
def ltmPressure(jdays, time, lon, lat, ncfile):
    """
    Extract pressure value from a daily long-term mean SLP dataset at the
    given day of year and lon,lat position
    To use this function (and hence some form of daily LTM SLP data) requires
    knowledge of the day of year.

    :param jdays: Julian day (day of year) values.
    :param time: Time of day for each observation (fraction of a day).
    :param lon: Longitude of TC position.
    :param lat: Latitude of TC position.
    :param str ncfile: Path to netCDF file containing daily long-term mean
                       sea level pressure data.

    :type  jdays: :class:`numpy.ndarray`
    :type  time: :class:`numpy.ndarray`
    :type  lon: :class:`numpy.ndarray`
    :type  lat: :class:`numpy.ndarray`

    :returns: :class:`numpy.ndarray` of long-term mean sea level pressure
              values at the day of year and positions given.
    """
    jtime = jdays + np.modf(time)[0]
    coords = np.array([jtime, lat, lon])

    LOG.debug("Sampling data from MSLP data in {0}".format(ncfile))
    ncobj = nctools.ncLoadFile(ncfile)
    slpunits = getattr(ncobj.variables['slp'], 'units')

    data = nctools.ncGetData(ncobj, 'slp')
    # Get the MSLP by interpolating to the location of the TC:
    penv = interp3d.interp3d(data,
                             coords,
                             scale=[365., 180., 360.],
                             offset=[0., -90., 0.])
    penv = metutils.convert(penv, slpunits, 'hPa')
    del data
    ncobj.close()
    del ncobj

    return penv
Esempio n. 9
0
def ltmPressure(jdays, time, lon, lat, ncfile):
    """
    Extract pressure value from a daily long-term mean SLP dataset at the
    given day of year and lon,lat position
    To use this function (and hence some form of daily LTM SLP data) requires
    knowledge of the day of year.

    :param jdays: Julian day (day of year) values.
    :param time: Time of day for each observation (fraction of a day).
    :param lon: Longitude of TC position.
    :param lat: Latitude of TC position.
    :param str ncfile: Path to netCDF file containing daily long-term mean
                       sea level pressure data.

    :type  jdays: :class:`numpy.ndarray`
    :type  time: :class:`numpy.ndarray` 
    :type  lon: :class:`numpy.ndarray`
    :type  lat: :class:`numpy.ndarray`

    :returns: :class:`numpy.ndarray` of long-term mean sea level pressure
              values at the day of year and positions given. 
    """
    jtime = jdays + np.modf(time)[0]
    coords = np.array([jtime, lat, lon])

    logger.debug("Sampling data from MSLP data in {0}".format(ncfile))
    ncobj = nctools.ncLoadFile(ncfile)
    slpunits = getattr(ncobj.variables['slp'], 'units')

    data = nctools.ncGetData(ncobj, 'slp')
    # Get the MSLP by interpolating to the location of the TC:
    penv = interp3d.interp3d(data, coords, scale=[365., 180., 360.],
                             offset=[0., -90., 0.])
    penv = metutils.convert(penv, slpunits, 'hPa')
    del data
    ncobj.close()
    del ncobj

    return penv
Esempio n. 10
0
def ltmPressure(jdays, time, lon, lat, ncfile):
    """
    Extract pressure value from a daily long-term mean SLP dataset at the
    given day of year and lon,lat position
    To use this function (and hence some form of daily LTM SLP data) requires
    knowledge of the day of year.
    """
    jtime = jdays + np.modf(time)[0]
    coords = np.array([jtime, lat, lon])

    logger.debug("Sampling data from MSLP data in {0}".format(ncfile))
    ncobj = nctools.ncLoadFile(ncfile)
    slpunits = getattr(ncobj.variables['slp'], 'units')

    data = nctools.ncGetData(ncobj, 'slp')
    # Get the MSLP by interpolating to the location of the TC:
    penv = interp3d.interp3d(data, coords)
    penv = metutils.convert(penv, slpunits, 'hPa')
    del data
    ncobj.close()
    del ncobj

    return penv
Esempio n. 11
0
def dist2GC(cLon1, cLat1, cLon2, cLat2, lonArray, latArray, units="km"):
    """
    Calculate the distance between an array of points and the great circle
    joining two (other) points.
    All input values are in degrees.
    By default returns distance in km, other units specified by the
    'units' kwarg.

    Based on a cross-track error formulation from:
    http://williams.best.vwh.net/avform.htm#XTE
    """

    # Calculate distance and bearing from first point to array of points:
    dist_ = gridLatLonDist(cLon1, cLat1, lonArray, latArray, units="rad")
    bear_ = gridLatLonBear(cLon1, cLat1, lonArray, latArray)

    #bearing of the cyclone:
    cyc_bear_ = latLon2Azi([cLon1, cLon2], [cLat1, cLat2])

    dist2GC_ = np.asin(np.sin(dist_) * np.sin(bear_ - cyc_bear_))

    distance = metutils.convert(dist2GC_, "rad", units)
    return distance
Esempio n. 12
0
def dist2GC(cLon1, cLat1, cLon2, cLat2, lonArray, latArray, units="km"):
    """
    Calculate the distance between an array of points and the great
    circle joining two (other) points. All input values are in
    degrees. By default returns distance in km, other units specified
    by the 'units' kwarg.

    Based on a cross-track error formulation from:
    http://williams.best.vwh.net/avform.htm#XTE

    :param float cLon1: Longitude of first point.
    :param float cLat1: Latitude of first point.
    :param float cLon2: Longitude of second point.
    :param float cLat2: Latitude of second point.
    :param lonArray: :class:`numpy.ndarray` of longitudes for which
                     the distance to the line joining the two points
                     will be calculated.
    :param latArray: :class:`numpy.ndarray` of latitudes for which the
                      distance to the line joining the two points will
                      be calculated.

    :returns: 2-d array of distances between the array points and the
              line joining two points.
    :rtype: :class:`numpy.ndarray`
    """

    # Calculate distance and bearing from first point to array of points:
    dist_ = gridLatLonDist(cLon1, cLat1, lonArray, latArray, units="rad")
    bear_ = gridLatLonBear(cLon1, cLat1, lonArray, latArray)

    #bearing of the cyclone:
    cyc_bear_ = latLon2Azi([cLon1, cLon2], [cLat1, cLat2])

    dist2GC_ = np.arcsin(np.sin(dist_) * np.sin(bear_ - cyc_bear_))

    distance = metutils.convert(dist2GC_, "rad", units)
    return distance
Esempio n. 13
0
def dist2GC(cLon1, cLat1, cLon2, cLat2, lonArray, latArray, units="km"):
    """
    Calculate the distance between an array of points and the great
    circle joining two (other) points. All input values are in
    degrees. By default returns distance in km, other units specified
    by the 'units' kwarg.

    Based on a cross-track error formulation from:
    http://williams.best.vwh.net/avform.htm#XTE

    :param float cLon1: Longitude of first point.
    :param float cLat1: Latitude of first point.
    :param float cLon2: Longitude of second point.
    :param float cLat2: Latitude of second point.
    :param lonArray: :class:`numpy.ndarray` of longitudes for which
                     the distance to the line joining the two points
                     will be calculated.
    :param latArray: :class:`numpy.ndarray` of latitudes for which the
                      distance to the line joining the two points will
                      be calculated.

    :returns: 2-d array of distances between the array points and the
              line joining two points.
    :rtype: :class:`numpy.ndarray`
    """

    # Calculate distance and bearing from first point to array of points:
    dist_ = gridLatLonDist(cLon1, cLat1, lonArray, latArray, units="rad")
    bear_ = gridLatLonBear(cLon1, cLat1, lonArray, latArray)

    #bearing of the cyclone:
    cyc_bear_ = latLon2Azi([cLon1, cLon2], [cLat1, cLat2])

    dist2GC_ = np.arcsin(np.sin(dist_) * np.sin(bear_ - cyc_bear_))

    distance = metutils.convert(dist2GC_, "rad", units)
    return distance
Esempio n. 14
0
def grdSave(filename,
            data,
            lon,
            lat,
            delta,
            delimiter=' ',
            nodata=-9999,
            fmt='%.10e',
            coords='latlon'):
    """
    Save formatted data to an ascii grid format file.
    The files have 6 header lines describing the data, followed by the
    data in a gridded format.

    :param str filename: Path to the file to be written.
    :param data: 2-d array of data values to store.
    :param lon: Array of longitudes corresponding to data points.
    :param lat: Array of latitudes corresponding to data points.
    :param float delta: Spacing between grid points.
    :param str delimiter: Delimiter to put between data points (default ' ').
    :param float nodata: Value to indicate missing values (default -9999).
    :param str fmt: String format statement.
    :param str coords: Optionally store the data in UTM
                       coordinates. Default is to store the data in
                       geographic coordinates (``coords='latlon'``).
                       If ``coords='UTM'``, then the latitude &
                       longitudes are converted to the local UTM
                       coordinate system.

    :raises ValueError: If the ``filename`` is not a string of file handle.

    Usage::

     >>> grdSave(filename, data, lon, lat, delta, delimiter=' ',
                 nodata=-9999, fmt='%.10e, coords='latlon')
    """

    if filename and os.path.isfile(filename):
        if filename.endswith('.gz'):
            import gzip
            fh = gzip.open(filename, 'wb')
        else:
            fh = file(filename, 'w')
    elif hasattr(filename, 'seek'):
        fh = filename
    else:
        try:
            fh = open(filename, 'w')
        except:
            raise ValueError('Filename must be a string or file handle')

    if coords == 'UTM':
        zone, xllcorner, yllcorner = LLtoUTM(lat.min(), lon.min())
        delta = metutils.convert(delta, "deg", "m")
    else:
        # Assume geographic coordinates
        xllcorner = lon.min()
        yllcorner = lat.min()

    fh.write('ncols         ' + str(len(lon)) + '\n')
    fh.write('nrows         ' + str(len(lat)) + '\n')
    fh.write('xllcorner     ' + str(xllcorner) + '\n')
    fh.write('yllcorner     ' + str(yllcorner) + '\n')
    fh.write('cellsize      ' + str(delta) + '\n')
    fh.write('NODATA_value  ' + str(nodata) + '\n')
    X = numpy.array(data)
    origShape = None
    if len(X.shape) == 1:
        origShape = X.shape
        X.shape = len(X), 1
    for row in X:
        fh.write(delimiter.join([fmt % val for val in row]) + '\n')
    fh.close()
    if origShape is not None:
        X.shape = origShape
Esempio n. 15
0
def loadTrackFile(configFile, trackFile, source, missingValue=0,
                  calculateWindSpeed=True):
    """
    Load TC track data from the given input file, from a specified source.
    The configFile is a configuration file that contains a section called
    'source' that describes the data.
    This returns a collection of :class:`Track` objects that contains
    the details of the TC tracks in the input file.

    :param str configFile: Configuration file with a section ``source``.
    :param str trackFile: Path to a csv-formatted file containing TC data.
    :pararm str source: Name of the source format of the TC data. There
                        *must* be a section in ``configFile`` matching
                        this string, containing the details of the format
                        of the data.
    :param missingValue: Replace all null values in the input data with
                         this value (default=0).
    :param boolean calculateWindSpeed: Calculate maximum wind speed using
                                       a pressure-wind relation described
                                       in :func:`maxWindSpeed`

    :returns: A collection of :class:`Track` objects. 
              If any of the variables are not present in the input
              dataset, they are (where possible) calculated
              (date/time/windspeed), sampled from default datasets
              (e.g. environmental pressure) or set to the missing value.

    Example::

      >>> tracks = loadTrackFile('tcrm.ini', 'IBTRaCS.csv', 'IBTrACS' )

    """
    
    logger.info("Loading %s" % trackFile)
    inputData = colReadCSV(configFile, trackFile, source) #,
                          #nullValue=missingValue)

    config = ConfigParser()
    config.read(configFile)

    inputSpeedUnits = config.get(source, 'SpeedUnits')
    inputPressureUnits = config.get(source, 'PressureUnits')
    inputLengthUnits = config.get(source, 'LengthUnits')
    inputDateFormat = config.get(source, 'DateFormat')
    
    if config.getboolean('DataProcess', 'FilterSeasons'):
        startSeason = config.getint('DataProcess', 'StartSeason')        
        idx = np.where(inputData['season'] >= startSeason)[0]
        inputData = inputData[idx]
        
    # Determine the initial TC positions...
    indicator = getInitialPositions(inputData)


    # Sort date/time information
    if 'age' in inputData.dtype.names:
        year, month, day, hour, minute, datetimes = parseAge(inputData, indicator)
        timeElapsed = inputData['age']
    else:
        year, month, day, hour, minute, datetimes = parseDates(inputData, indicator,
                                                    inputDateFormat)
        timeElapsed = getTimeElapsed(indicator, year, month, day, hour, minute)
        
    # Time between observations:
    dt = getTimeDelta(year, month, day, hour, minute)

    # Calculate julian days
    jdays = julianDays(year, month, day, hour, minute)

    lat = np.array(inputData['lat'], 'd')
    lon = np.mod(np.array(inputData['lon'], 'd'), 360)
    delta_lon = np.diff(lon)
    delta_lat = np.diff(lat)

    # Split into separate tracks if large jump occurs (delta_lon > 10 degrees
    # or delta_lat > 5 degrees)
    # This avoids two tracks being accidentally combined when seasons and track
    # numbers match but basins are different as occurs in the IBTrACS dataset.
    # This problem can also be prevented if the 'tcserialno' column is
    # specified.
    indicator[np.where(delta_lon > 10)[0] + 1] = 1
    indicator[np.where(delta_lat > 5)[0] + 1] = 1

    pressure = filterPressure(np.array(inputData['pressure'], 'd'),
                              inputPressureUnits, missingValue)
    try:
        windspeed = np.array(inputData['vmax'], 'd')
        novalue_index = np.where(windspeed == sys.maxint)
        windspeed = metutils.convert(windspeed, inputSpeedUnits, "mps")
        windspeed[novalue_index] = missingValue
    except (ValueError,KeyError):
        logger.debug("No max wind speed data - all values will be zero")
        windspeed = np.zeros(indicator.size, 'f')
    assert lat.size == indicator.size
    assert lon.size == indicator.size
    assert pressure.size == indicator.size

    try:
        rmax = np.array(inputData['rmax'])
        novalue_index = np.where(rmax == missingValue)
        rmax = metutils.convert(rmax, inputLengthUnits, "km")
        rmax[novalue_index] = missingValue

    except (ValueError, KeyError):
        logger.debug("No radius to max wind data - all values will be zero")
        rmax = np.zeros(indicator.size, 'f')

    if 'penv' in inputData.dtype.names:
        penv = np.array(inputData['penv'], 'd')
    else:
        logger.debug("No ambient MSLP data in this input file")
        logger.debug("Sampling data from MSLP data defined in "
                    "configuration file")
        # Warning: using sampled data will likely lead to some odd behaviour
        # near the boundary of the MSLP grid boundaries - higher resolution
        # MSLP data will decrease this unusual behaviour.

        try:
            ncfile = cnfGetIniValue(configFile, 'Input', 'MSLPFile')
        except:
            logger.exception("No input MSLP file specified in configuration")
            raise
        time = getTime(year, month, day, hour, minute)
        penv = ltmPressure(jdays, time, lon, lat, ncfile)

    speed, bearing = getSpeedBearing(indicator, lon, lat, dt,
                                     missingValue=missingValue)

    if calculateWindSpeed:
        windspeed = maxWindSpeed(indicator, dt, lon, lat, pressure, penv)

    TCID = np.cumsum(indicator)

    data = np.empty(len(indicator), 
                        dtype={
                               'names': trackFields,
                               'formats': trackTypes
                               } )
    for key, value in zip(trackFields, [indicator, TCID, year, month,
                                           day, hour, minute, timeElapsed, datetimes,
                                           lon, lat, speed, bearing,
                                           pressure, windspeed, rmax, penv]):
        data[key] = value
        
    tracks = []
    n = np.max(TCID)
    for i in range(1, n + 1):
        track = Track(data[TCID == i])
        track.trackId = (i, n)
        track.trackfile = trackFile
        getMinPressure(track, missingValue)
        getMaxWind(track, missingValue)
        tracks.append(track)

    return tracks
Esempio n. 16
0
def maxWindSpeed(index, deltatime, lon, lat, pressure, penv,
                 gustfactor=0.9524):
    """
    Calculate the 10-minute-mean maximum wind speed from the central
    pressure deficit, using the method described in Holland et al. (2010).

    :param indicator: Array (values of 1 or 0) indicating the beginning of
                      a new TC in the input dataset.
    :param deltatime: Time difference (in hours) between each point in the
                      record.
    :param lon: Longitudes of TC postions.
    :param lat: Latitudes of TC positions.
    :param pressure: Central pressure estimate of TCs (hPa).
    :param penv: Environmental pressure estimates for each TC postion (hPa).
    :param float gf: Gust factor - default value represents converting from a
                     1-minute sustained wind speed to a 10-minute mean wind
                     speed. Based on Harper et al. 2010, WMO-TD1555.
    :type indicator: :class:`numpy.ndarray`
    :type deltatime: :class:`numpy.ndarray`
    :type lon: :class:`numpy.ndarray`
    :type lat: :class:`numpy.ndarray`
    :type pressure: :class:`numpy.ndarray`
    :type penv: :class:`numpy.ndarray`

    :returns: :class:`numpy.ndarray` of estimated wind speed based on
              central pressure deficit.

    Example::
    
      >>> v = maxWindSpeed(indicator, dt, lon, lat, pressure, penv)

    """

    # Speed and bearing:
    speed, bearing = getSpeedBearing(index, lon, lat, deltatime)
    speed = metutils.convert(speed, 'kmh', 'mps')
    np.putmask(speed, speed > 10e+3, 0)

    # Pressure deficit:
    deltap = penv - pressure

    # Pressure rate of change
    dpt = np.zeros(index.size, 'f')
    dpt[1:] = np.diff(pressure)
    dpdt = dpt / deltatime
    np.putmask(dpdt, index, 0)
    np.putmask(dpdt, np.isnan(dpdt) |
               np.isinf(dpdt) |
               (np.abs(dpdt) > 5.), 0)

    # Estimated pressure at the radius of maximum wind:
    prmw = pressure + deltap / 3.7

    # Calculate thermodynamic variables at RMW:
    tsurf = 28.0 - 3 * (np.abs(lat) - 10.) / 20.
    qmix = 0.9 * (3.802 / prmw) * np.exp(17.67 * tsurf / (243.5 + tsurf))
    tvs = (tsurf + 273.15) * (1. + 0.81 * qmix)
    rho = prmw * 100. / (tvs * 287.04)

    chi = 0.6 * (1.0 - deltap / 215.)
    beta = -0.000044 * np.power(deltap, 2.) + \
        0.01 * deltap + 0.03 * dpdt - 0.014 * np.abs(lat) + \
        0.15 * np.power(speed, chi) + 1.

    # Holland's P-W relation derives a 1-minute mean wind speed, so we often
    # need to convert to some other averaging period. I use the recommendations
    # of Harper et al. (2010) WMO TD-1555:
    # Common values are( Assuming "At-sea" conditions):
    # 10-min mean: 0.95 (default)
    # 3-second gust: 1.11

    v = gustfactor * np.sqrt(deltap * 100 * beta / (rho * np.exp(1.)))
    np.putmask(v, (np.isnan(v) |
                   np.isinf(v) |
                   (pressure >= 10e+7) |
                   (pressure <= 0) |
                   (speed >= 10e+7)), 0)

    return v
Esempio n. 17
0
def grdSave(filename, data, lon, lat, delta, delimiter=' ', nodata=-9999,
            fmt='%.10e', coords='latlon'):
    """
    Save formatted data to an ascii grid format file.
    The files have 6 header lines describing the data, followed by the
    data in a gridded format.

    :param str filename: Path to the file to be written.
    :param data: 2-d array of data values to store.
    :param lon: Array of longitudes corresponding to data points.
    :param lat: Array of latitudes corresponding to data points.
    :param float delta: Spacing between grid points.
    :param str delimiter: Delimiter to put between data points (default ' ').
    :param float nodata: Value to indicate missing values (default -9999).
    :param str fmt: String format statement.
    :param str coords: Optionally store the data in UTM
                       coordinates. Default is to store the data in
                       geographic coordinates (``coords='latlon'``).
                       If ``coords='UTM'``, then the latitude &
                       longitudes are converted to the local UTM
                       coordinate system.
                       
    :raises ValueError: If the ``filename`` is not a string of file handle.

    Usage::

     >>> grdSave(filename, data, lon, lat, delta, delimiter=' ',
                 nodata=-9999, fmt='%.10e, coords='latlon')
    """

    if filename and os.path.isfile(filename):
        if filename.endswith('.gz'):
            import gzip
            fh = gzip.open(filename, 'wb')
        else:
            fh = file(filename, 'w')
    elif hasattr(filename, 'seek'):
        fh = filename
    else:
        try:
            fh = open(filename,'w')
        except:
            raise ValueError('Filename must be a string or file handle')

    if coords == 'UTM':
        zone, xllcorner, yllcorner = LLtoUTM(lat.min(),lon.min())
        delta = metutils.convert(delta, "deg", "m")
    else:
        # Assume geographic coordinates
        xllcorner = lon.min()
        yllcorner = lat.min()

    fh.write('ncols         '+str(len(lon))+'\n')
    fh.write('nrows         '+str(len(lat))+'\n')
    fh.write('xllcorner     '+str(xllcorner)+'\n')
    fh.write('yllcorner     '+str(yllcorner)+'\n')
    fh.write('cellsize      '+str(delta)+'\n')
    fh.write('NODATA_value  '+str(nodata)+'\n')
    X = numpy.array(data)
    origShape = None
    if len(X.shape) == 1:
        origShape = X.shape
        X.shape = len(X), 1
    for row in X:
        fh.write(delimiter.join([fmt%val for val in row]) + '\n')
    fh.close()
    if origShape is not None:
        X.shape = origShape
Esempio n. 18
0
def main():
    """
    Handle command line arguments and call processing functions

    """
    p = argparse.ArgumentParser()

    p.add_argument('-c', '--config_file', help="Configuration file")
    p.add_argument('-v',
                   '--verbose',
                   help="Verbose output",
                   action='store_true')
    p.add_argument('-y', '--year', help="Year to process (1979-2020)")

    args = p.parse_args()

    configFile = args.config_file
    config = ConfigParser()
    config.read(configFile)

    logFile = config.get('Logging', 'LogFile')
    logdir = dirname(realpath(logFile))

    # if log file directory does not exist, create it
    if not isdir(logdir):
        try:
            os.makedirs(logdir)
        except OSError:
            logFile = pjoin(os.getcwd(), 'pcmin.log')

    logLevel = config.get('Logging', 'LogLevel')
    verbose = config.getboolean('Logging', 'Verbose')
    datestamp = config.getboolean('Logging', 'Datestamp')
    if args.verbose:
        verbose = True
    if comm.size > 1 and comm.rank > 0:
        logFile += '-' + str(comm.rank)
        verbose = False

    if datestamp:
        base, ext = splitext(logFile)
        curdate = datetime.datetime.now()
        curdatestr = curdate.strftime('%Y%m%d%H%M')
        logfile = f"{base}.{curdatestr}.{ext.lstrip('.')}"

    logging.basicConfig(level=logLevel,
                        format="%(asctime)s: %(funcName)s: %(message)s",
                        filename=logfile,
                        filemode='w',
                        datefmt="%Y-%m-%d %H:%M:%S")

    if verbose:
        console = logging.StreamHandler(sys.stdout)
        console.setLevel(getattr(logging, logLevel))
        formatter = logging.Formatter(
            '%(asctime)s: %(funcName)s:  %(message)s',
            datefmt='%H:%M:%S',
        )
        console.setFormatter(formatter)
        LOGGER.addHandler(console)

    LOGGER.info(f"Started {sys.argv[0]} (pid {os.getpid()})")
    LOGGER.info(f"Log file: {logfile} (detail level {logLevel})")
    LOGGER.info(f"Code version: f{COMMIT}")

    tpath = config.get('Input', 'Temp')
    rpath = config.get('Input', 'Humidity')
    sstpath = config.get('Input', 'SST')
    slppath = config.get('Input', 'SLP')

    if args.year:
        year = int(args.year)
    else:
        year = 2015

    minLon = config.getfloat('Domain', 'MinLon')
    maxLon = config.getfloat('Domain', 'MaxLon')
    minLat = config.getfloat('Domain', 'MinLat')
    maxLat = config.getfloat('Domain', 'MaxLat')

    LOGGER.info(f"Domain: {minLon}-{maxLon}, {minLat}-{maxLat}")

    for month in range(1, 13):
        LOGGER.info(f"Processing {year}-{month}")
        startdate = datetime.datetime(year, month, 1)
        enddate = datetime.datetime(year, month, monthrange(year, month)[1])

        filedatestr = f"{startdate.strftime('%Y%m%d')}-{enddate.strftime('%Y%m%d')}"

        tfile = pjoin(tpath, f'{year}', f't_era5_oper_pl_{filedatestr}.nc')
        try:
            assert (os.path.isfile(tfile))
        except AssertionError:
            LOGGER.warning(f"Input file is missing: {tfile}")
            LOGGER.warning(f"Skipping month {month}")
            continue

        tobj = nctools.ncLoadFile(tfile)
        tvar = nctools.ncGetVar(tobj, 't')
        tvar.set_auto_maskandscale(True)

        rfile = pjoin(rpath, f'{year}', f'r_era5_oper_pl_{filedatestr}.nc')
        try:
            assert (os.path.isfile(rfile))
        except AssertionError:
            LOGGER.warning(f"Input file is missing: {rfile}")
            LOGGER.warning(f"Skipping month {month}")
            continue
        robj = nctools.ncLoadFile(rfile)
        rvar = nctools.ncGetVar(robj, 'r')
        rvar.set_auto_maskandscale(True)
        # This is actually relative humidity, we need to convert to mixing ratio
        # Calculate mixing ratio - this function returns mixing ratio in g/kg

        # Dimensions need to come from the pressure files
        # These have been clipped to the Australian region, so contain
        # a subset of the global data. The SST and MSLP data
        # are then clipped to the same domain
        tlon = nctools.ncGetDims(tobj, 'longitude')
        tlat = nctools.ncGetDims(tobj, 'latitude')
        LOGGER.debug(f"Latitude extents: {tlat.min()} - {tlat.max()}")
        LOGGER.debug(f"Longitude extents: {tlon.min()} - {tlon.max()}")

        varidx = np.where((tlon >= minLon) & (tlon <= maxLon))[0]
        varidy = np.where((tlat >= minLat) & (tlat <= maxLat))[0]

        templon = tlon[varidx]
        templat = tlat[varidy]

        LOGGER.info(f"Loading SST data")
        sstfile = pjoin(sstpath, f'{year}',
                        f'sst_era5_oper_sfc_{filedatestr}.nc')
        try:
            assert (os.path.isfile(sstfile))
        except AssertionError:
            LOGGER.warning(f"Input file is missing: {sstfile}")
            LOGGER.warning(f"Skipping month {month}")
            continue

        sstobj = nctools.ncLoadFile(sstfile)
        sstvar = nctools.ncGetVar(sstobj, 'sst')
        sstvar.set_auto_maskandscale(True)
        sstlon = nctools.ncGetDims(sstobj, 'longitude')
        sstlat = nctools.ncGetDims(sstobj, 'latitude')

        LOGGER.debug(f"SST latitude extents: {sstlat.min()} - {sstlat.max()}")
        LOGGER.debug(f"SST longitude extents: {sstlon.min()} - {sstlon.max()}")

        LOGGER.info("Loading SLP data")
        slpfile = pjoin(slppath, f'{year}',
                        f'msl_era5_oper_sfc_{filedatestr}.nc')
        try:
            assert (os.path.isfile(slpfile))
        except AssertionError:
            LOGGER.warning(f"Input file is missing: {slpfile}")
            LOGGER.warning(f"Skipping month {month}")
            continue
        slpobj = nctools.ncLoadFile(slpfile)
        slpvar = nctools.ncGetVar(slpobj, 'msl')
        slpvar.set_auto_maskandscale(True)

        # In the ERA5 data on NCI, surface variables are global,
        # pressure variables are only over Australian region
        LOGGER.info("Getting intersection of grids")
        lonx, sstidx, varidxx = np.intersect1d(sstlon,
                                               templon,
                                               return_indices=True)
        laty, sstidy, varidyy = np.intersect1d(sstlat,
                                               templat[::-1],
                                               return_indices=True)
        nx = len(varidx)
        ny = len(varidy)
        LOGGER.info("Loading and converting SST and SLP data")
        sst = metutils.convert(sstvar[:, sstidy, sstidx], sstvar.units, 'C')
        slp = metutils.convert(slpvar[:, sstidy, sstidx], slpvar.units, 'hPa')

        times = nctools.ncGetTimes(nctools.ncLoadFile(tfile))
        nt = len(times)
        LOGGER.debug(f"There are {nt} times in the data file")

        levels = nctools.ncGetDims(nctools.ncLoadFile(tfile), 'level')
        nz = len(levels)
        LOGGER.debug(f"There are {nz} vertical levels in the data file")

        # Create an array of the pressure variable that
        # matches the shape of the temperature and mixing ratio
        # variables.
        LOGGER.info("Creating temporary pressure array")
        pp = np.ones((nz, ny, nx))
        ppT = pp.T
        ppT *= levels

        pmin = np.zeros(sst.shape)
        vmax = np.zeros(sst.shape)
        status = MPI.Status()
        work_tag = 0
        result_tag = 1
        LOGGER.info("Calculating potential intensity")
        if (comm.rank == 0) and (comm.size > 1):
            w = 0
            p = comm.size - 1
            for d in range(1, comm.size):
                if w < nt:
                    LOGGER.debug(f"Sending time {w} to node {d}")
                    comm.send(w, dest=d, tag=work_tag)
                    w += 1
                else:
                    comm.send(None, dest=d, tag=work_tag)
                    p = w

            terminated = 0
            while (terminated < p):
                result, tdx = comm.recv(source=MPI.ANY_SOURCE,
                                        status=status,
                                        tag=MPI.ANY_TAG)
                pmin[tdx, :, :], vmax[tdx, :, :] = result
                LOGGER.debug(f"Mean PI: {np.nanmean(vmax[tdx, :, :]):.2f} m/s")
                d = status.source

                if w < nt:
                    LOGGER.debug(f"Sending time {times[w]} to node {d}")
                    comm.send(w, dest=d, tag=status.tag)
                    w += 1
                else:
                    # Exhausted all times, send empty packet:
                    comm.send(None, dest=d, tag=status.tag)
                    terminated += 1
        elif (comm.size > 1) and (comm.rank != 0):
            status = MPI.Status()
            W = None
            while (True):
                W = comm.recv(source=0, tag=work_tag, status=status)
                if W is None:
                    # Received an empty packet, so no work required
                    LOGGER.debug(
                        "No work to be done on this processor: {0}".format(
                            comm.rank))
                    break
                LOGGER.debug(f"Processing time {times[W]} on node {comm.rank}")
                t = metutils.convert(tvar[W, :, varidy, varidx], tvar.units,
                                     'C')
                r = metutils.rHToMixRat(rvar[W, :, varidy, varidx], t, pp, 'C')
                r = np.where(r < 0, 0, r)
                results = calculate(sst[W, :, :], slp[W, :, :], pp, t, r,
                                    levels)
                LOGGER.debug(f"Finished time {times[W]} on node {comm.rank}")
                comm.send((results, W), dest=0, tag=status.tag)
        elif (comm.size == 1) and (comm.rank == 0):
            # We're working on a single processor:
            for tdx in range(nt):
                LOGGER.debug(f"Processing time {times[W]}")
                t = metutils.convert(tvar[tdx, :, varidy, varidx], tvar.units,
                                     'C')
                r = metutils.rHToMixRat(rvar[tdx, :, varidy, varidx], t, pp,
                                        'C')
                r = np.where(r < 0, 0, r)
                pmin[tdx, :, :], vmax[tdx, :, :] = calculate(
                    sst[tdx, :, :], slp[tdx, :, :], pp, t, r, levels)

        if comm.rank == 0:
            sleep(5)
        comm.Barrier()
        LOGGER.info(f"Saving data for month: {month}")
        outputPath = config.get('Output', 'Path')
        try:
            os.makedirs(outputPath)
        except:
            pass
        outputFile = pjoin(outputPath, f'pcmin.{filedatestr}.nc')
        saveData(outputFile, pmin, vmax, lonx, laty, times)

    LOGGER.info("Finished calculating potential intensity")
Esempio n. 19
0
def gridLatLonDist(cLon, cLat, lonArray, latArray, units=None):
    """
    Generate a grid containing the spherical earth distance
    of the points defined by (lonarray, latarray) from the
    point defined by (clon, clat).
    (lonarray,latarray) and (clon,clat) are in degrees.
    Returns distances in km by default, other units specified by the
    'units' kwarg.

    Based on m_lldist.m by Rich Pawlowicz ([email protected])
    Modified by Craig Arthur 2006-11-13

    Input:
    cLon - longitude of the point to measure the distance from
    cLat - latitude of the point to measure the distance from
    lonArray - 1-d array of longitude values that will define the grid over
               which distances will be calculated
    latArray - 1-d array of latitude values that will define the grid over
               which distances will be calculated
    units - units of distance to be returned (default is kilometre)

    Output:
    dist - 2-d array containing the distance of the points defined in lonArray 
           and latArray from the point (cLon, cLat)

    Example:
    lonArray = np.arange(90.,100.,0.1)
    latArray = np.arange(-20.,-10.,0.1)
    dist = gridLatLonDist( 105., -15., lonArray, latArray,'km') 

    """

    # #CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
    # cLat_cos = 0.0
    # cLat_sin = 0.0
    # lat = empty(len(latArray), 'd')
    # lon = empty(len(lonArray), 'd')

    # dLon_sin = empty(len(lonArray), 'd')
    # dLat_sin = empty(len(latArray), 'd')
    # lat_cos = empty(len(latArray), 'd')

    # dist = empty([len(latArray), len(lonArray)], 'd')

    # code = """
        # #include <math.h>

        # double radius = 6367.0;
        # double toRads = 0.017453292519943295;

        # double cLon_ = cLon;
        # double cLat_ = cLat;

        # cLon_ = cLon_*toRads;
        # cLat_ = cLat_*toRads;

        # cLat_cos = cos(cLat_);

        # for (int i = 0; i < NlonArray[0]; ++i)
        # {
            # lon(i) = lonArray(i)*toRads;
            # double dLon = (lon(i) - cLon_)/2.0;
            # dLon_sin(i) = sin(dLon);
        # }

        # for (int i = 0; i < NlatArray[0]; ++i)
        # {
            # lat(i) = latArray(i)*toRads;
            # lat_cos(i) = cos(lat(i));

            # double dLat = (lat(i) - cLat_)/2.0;
            # dLat_sin(i) = sin(dLat);
        # }

        # for (int j = 0; j < NlatArray[0]; ++j)
        # {
            # for (int i = 0; i < NlonArray[0]; ++i)
            # {
                 # double a = pow(dLat_sin(j), 2) + \
                            # cLat_cos*lat_cos(j)*pow(dLon_sin(i), 2);
                 # double c = 2.0*atan2(sqrt(fabs(a)), sqrt(1 - a));

                 # dist(j, i) = radius*c;
            # }
        # }
    # """
    # err = weave.inline(code,
                       # ['cLon', 'cLat', 'lonArray', 'latArray', 'lat', 'lon',
                        # 'dLon_sin', 'dLat_sin', 'lat_cos', 'dist', 'cLat_cos'],
                       # type_converters=converters.blitz,
                       # compiler = 'gcc')
    # #CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC

    radius = 6367.0

    lat = np.radians(latArray)
    lon = np.radians(lonArray)

    cLon = math.radians(cLon)
    cLat = math.radians(cLat)
    lon_, lat_ = np.meshgrid(lon, lat)

    dLon = lon_ - cLon
    dLat = lat_ - cLat

    a = np.square(np.sin(dLat / 2.0)) + \
        np.cos(cLat) * np.cos(lat_) * np.square(np.sin(dLon / 2.0))
    c = 2.0 * np.arctan2(np.sqrt(np.absolute(a)), np.sqrt(1 - a))
    dist = radius * c

    dist = metutils.convert(dist, "km", units)

    return dist
Esempio n. 20
0
def loadTrackFile(configFile,
                  trackFile,
                  source,
                  missingValue=0,
                  calculateWindSpeed=True):
    """
    Load TC track data from the given input file, from a specified source.
    The configFile is a configuration file that contains a section called
    'source' that describes the data.
    This returns a collection of :class:`Track` objects that contains
    the details of the TC tracks in the input file.

    :param str configFile: Configuration file with a section ``source``.
    :param str trackFile: Path to a csv-formatted file containing TC data.
    :pararm str source: Name of the source format of the TC data. There
                        *must* be a section in ``configFile`` matching
                        this string, containing the details of the format
                        of the data.
    :param missingValue: Replace all null values in the input data with
                         this value (default=0).
    :param boolean calculateWindSpeed: Calculate maximum wind speed using
                                       a pressure-wind relation described
                                       in :func:`maxWindSpeed`

    :returns: A collection of :class:`Track` objects.
              If any of the variables are not present in the input
              dataset, they are (where possible) calculated
              (date/time/windspeed), sampled from default datasets
              (e.g. environmental pressure) or set to the missing value.

    Example::

      >>> tracks = loadTrackFile('tcrm.ini', 'IBTRaCS.csv', 'IBTrACS' )

    """

    LOG.info("Loading %s" % trackFile)
    inputData = colReadCSV(configFile, trackFile, source)  #,
    #nullValue=missingValue)

    config = ConfigParser()
    config.read(configFile)

    inputSpeedUnits = config.get(source, 'SpeedUnits')
    inputPressureUnits = config.get(source, 'PressureUnits')
    inputLengthUnits = config.get(source, 'LengthUnits')
    inputDateFormat = config.get(source, 'DateFormat')

    if config.getboolean('DataProcess', 'FilterSeasons'):
        startSeason = config.getint('DataProcess', 'StartSeason')
        idx = np.where(inputData['season'] >= startSeason)[0]
        inputData = inputData[idx]

    # Determine the initial TC positions...
    indicator = getInitialPositions(inputData)

    # Sort date/time information
    if 'age' in inputData.dtype.names:
        year, month, day, hour, minute, datetimes = parseAge(
            inputData, indicator)
        timeElapsed = inputData['age']
    else:
        year, month, day, hour, minute, datetimes = parseDates(
            inputData, indicator, inputDateFormat)
        timeElapsed = getTimeElapsed(indicator, year, month, day, hour, minute)

    # Time between observations:
    dt = getTimeDelta(year, month, day, hour, minute)

    # Calculate julian days
    jdays = julianDays(year, month, day, hour, minute)

    lat = np.array(inputData['lat'], 'd')
    lon = np.mod(np.array(inputData['lon'], 'd'), 360)
    delta_lon = np.diff(lon)
    delta_lat = np.diff(lat)

    # Split into separate tracks if large jump occurs (delta_lon > 10 degrees
    # or delta_lat > 5 degrees)
    # This avoids two tracks being accidentally combined when seasons and track
    # numbers match but basins are different as occurs in the IBTrACS dataset.
    # This problem can also be prevented if the 'tcserialno' column is
    # specified.
    indicator[np.where(delta_lon > 10)[0] + 1] = 1
    indicator[np.where(delta_lat > 5)[0] + 1] = 1

    pressure = filterPressure(np.array(inputData['pressure'], 'd'),
                              inputPressureUnits, missingValue)
    try:
        windspeed = np.array(inputData['vmax'], 'd')
        novalue_index = np.where(windspeed == sys.maxint)
        windspeed = metutils.convert(windspeed, inputSpeedUnits, "mps")
        windspeed[novalue_index] = missingValue
    except (ValueError, KeyError):
        LOG.debug("No max wind speed data - all values will be zero")
        windspeed = np.zeros(indicator.size, 'f')
    assert lat.size == indicator.size
    assert lon.size == indicator.size
    assert pressure.size == indicator.size

    try:
        rmax = np.array(inputData['rmax'])
        novalue_index = np.where(rmax == missingValue)
        rmax = metutils.convert(rmax, inputLengthUnits, "km")
        rmax[novalue_index] = missingValue

    except (ValueError, KeyError):
        LOG.debug("No radius to max wind data - all values will be zero")
        rmax = np.zeros(indicator.size, 'f')

    if 'penv' in inputData.dtype.names:
        penv = np.array(inputData['penv'], 'd')
    else:
        LOG.debug("No ambient MSLP data in this input file")
        LOG.debug("Sampling data from MSLP data defined in "
                  "configuration file")
        # Warning: using sampled data will likely lead to some odd behaviour
        # near the boundary of the MSLP grid boundaries - higher resolution
        # MSLP data will decrease this unusual behaviour.

        try:
            ncfile = cnfGetIniValue(configFile, 'Input', 'MSLPFile')
        except:
            LOG.exception("No input MSLP file specified in configuration")
            raise
        time = getTime(year, month, day, hour, minute)
        penv = ltmPressure(jdays, time, lon, lat, ncfile)

    if 'poci' in inputData.dtype.names:
        poci = np.array(inputData['poci'], 'd')
    else:
        LOG.debug("Determining poci")
        eps = np.random.normal(0, scale=2.5717)
        poci = getPoci(penv, pressure, lat, jdays, eps)

    speed, bearing = getSpeedBearing(indicator,
                                     lon,
                                     lat,
                                     dt,
                                     missingValue=missingValue)

    if calculateWindSpeed:
        windspeed = maxWindSpeed(indicator, dt, lon, lat, pressure, poci)

    TCID = np.cumsum(indicator)

    data = np.empty(len(indicator),
                    dtype={
                        'names': trackFields,
                        'formats': trackTypes
                    })
    for key, value in zip(trackFields, [
            indicator, TCID, year, month, day, hour, minute, timeElapsed,
            datetimes, lon, lat, speed, bearing, pressure, windspeed, rmax,
            poci
    ]):
        data[key] = value

    tracks = []
    n = np.max(TCID)
    for i in range(1, n + 1):
        track = Track(data[TCID == i])
        track.trackId = (i, n)
        track.trackfile = trackFile
        getMinPressure(track, missingValue)
        getMaxWind(track, missingValue)
        tracks.append(track)

    return tracks
Esempio n. 21
0
def gridLatLonDist(cLon, cLat, lonArray, latArray, units=None):
    """
    Generate a grid containing the spherical earth distance
    of the points defined by (lonarray, latarray) from the
    point defined by (clon, clat).
    (lonarray,latarray) and (clon,clat) are in degrees.
    Returns distances in km by default, other units specified by the
    'units' kwarg.

    Based on m_lldist.m by Rich Pawlowicz ([email protected])
    Modified by Craig Arthur 2006-11-13


    :param float cLon: Longitude of the point to measure the distance from.
    :param float cLat: Latitude of the point to measure the distance from.
    :param lonArray: 1-d array of longitude values that will define the
                     grid over which distances will be calculated.
    :param latArray: 1-d array of latitude values that will define the
                     grid over which distances will be calculated.
    :param str units: Units of distance to be returned (default is kilometre)

    :returns: 2-d array containing the distance of the points defined in
             ``lonArray`` and ``latArray`` from the point
             (``cLon``, ``cLat``).

    Example::

        >>> lonArray = np.arange(90.,100.,0.1)
        >>> latArray = np.arange(-20.,-10.,0.1)
        >>> dist = gridLatLonDist( 105., -15., lonArray, latArray, 'km')

    """

    # #CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
    # cLat_cos = 0.0
    # cLat_sin = 0.0
    # lat = empty(len(latArray), 'd')
    # lon = empty(len(lonArray), 'd')

    # dLon_sin = empty(len(lonArray), 'd')
    # dLat_sin = empty(len(latArray), 'd')
    # lat_cos = empty(len(latArray), 'd')

    # dist = empty([len(latArray), len(lonArray)], 'd')

    # code = """
    # #include <math.h>

    # double radius = 6367.0;
    # double toRads = 0.017453292519943295;

    # double cLon_ = cLon;
    # double cLat_ = cLat;

    # cLon_ = cLon_*toRads;
    # cLat_ = cLat_*toRads;

    # cLat_cos = cos(cLat_);

    # for (int i = 0; i < NlonArray[0]; ++i)
    # {
    # lon(i) = lonArray(i)*toRads;
    # double dLon = (lon(i) - cLon_)/2.0;
    # dLon_sin(i) = sin(dLon);
    # }

    # for (int i = 0; i < NlatArray[0]; ++i)
    # {
    # lat(i) = latArray(i)*toRads;
    # lat_cos(i) = cos(lat(i));

    # double dLat = (lat(i) - cLat_)/2.0;
    # dLat_sin(i) = sin(dLat);
    # }

    # for (int j = 0; j < NlatArray[0]; ++j)
    # {
    # for (int i = 0; i < NlonArray[0]; ++i)
    # {
    # double a = pow(dLat_sin(j), 2) + \
    # cLat_cos*lat_cos(j)*pow(dLon_sin(i), 2);
    # double c = 2.0*atan2(sqrt(fabs(a)), sqrt(1 - a));

    # dist(j, i) = radius*c;
    # }
    # }
    # """
    # err = weave.inline(code,
    # ['cLon', 'cLat', 'lonArray', 'latArray', 'lat', 'lon',
    # 'dLon_sin', 'dLat_sin', 'lat_cos', 'dist', 'cLat_cos'],
    # type_converters=converters.blitz,
    # compiler = 'gcc')
    # #CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC

    radius = 6367.0

    lat = np.radians(latArray)
    lon = np.radians(lonArray)

    cLon = math.radians(cLon)
    cLat = math.radians(cLat)
    lon_, lat_ = np.meshgrid(lon, lat)

    dLon = lon_ - cLon
    dLat = lat_ - cLat

    a = np.square(np.sin(dLat / 2.0)) + \
        np.cos(cLat) * np.cos(lat_) * np.square(np.sin(dLon / 2.0))
    c = 2.0 * np.arctan2(np.sqrt(np.absolute(a)), np.sqrt(1 - a))
    dist = radius * c

    dist = metutils.convert(dist, "km", units)

    return dist
Esempio n. 22
0
def maxWindSpeed(index,
                 deltatime,
                 lon,
                 lat,
                 pressure,
                 penv,
                 gustfactor=0.9524):
    """
    Calculate the 10-minute-mean maximum wind speed from the central
    pressure deficit, using the method described in Holland et al. (2010).

    :param indicator: Array (values of 1 or 0) indicating the beginning of
                      a new TC in the input dataset.
    :param deltatime: Time difference (in hours) between each point in the
                      record.
    :param lon: Longitudes of TC postions.
    :param lat: Latitudes of TC positions.
    :param pressure: Central pressure estimate of TCs (hPa).
    :param penv: Environmental pressure estimates for each TC postion (hPa).
    :param float gf: Gust factor - default value represents converting from a
                     1-minute sustained wind speed to a 10-minute mean wind
                     speed. Based on Harper et al. 2010, WMO-TD1555.
    :type indicator: :class:`numpy.ndarray`
    :type deltatime: :class:`numpy.ndarray`
    :type lon: :class:`numpy.ndarray`
    :type lat: :class:`numpy.ndarray`
    :type pressure: :class:`numpy.ndarray`
    :type penv: :class:`numpy.ndarray`

    :returns: :class:`numpy.ndarray` of estimated wind speed based on
              central pressure deficit.

    Example::

      >>> v = maxWindSpeed(indicator, dt, lon, lat, pressure, penv)

    """

    # Speed and bearing:
    speed, bearing = getSpeedBearing(index, lon, lat, deltatime)
    speed = metutils.convert(speed, 'kmh', 'mps')
    np.putmask(speed, speed > 10e+3, 0)

    # Pressure deficit:
    deltap = penv - pressure

    # Pressure rate of change
    dpt = np.zeros(index.size, 'f')
    dpt[1:] = np.diff(pressure)
    dpdt = dpt / deltatime
    np.putmask(dpdt, index, 0)
    np.putmask(dpdt, np.isnan(dpdt) | np.isinf(dpdt) | (np.abs(dpdt) > 5.), 0)

    # Estimated pressure at the radius of maximum wind:
    prmw = pressure + deltap / 3.7

    # Calculate thermodynamic variables at RMW:
    tsurf = 28.0 - 3 * (np.abs(lat) - 10.) / 20.
    qmix = 0.9 * (3.802 / prmw) * np.exp(17.67 * tsurf / (243.5 + tsurf))
    tvs = (tsurf + 273.15) * (1. + 0.81 * qmix)
    rho = prmw * 100. / (tvs * 287.04)

    chi = 0.6 * (1.0 - deltap / 215.)
    beta = -0.000044 * np.power(deltap, 2.) + \
        0.01 * deltap + 0.03 * dpdt - 0.014 * np.abs(lat) + \
        0.15 * np.power(speed, chi) + 1.

    # Holland's P-W relation derives a 1-minute mean wind speed, so we often
    # need to convert to some other averaging period. I use the recommendations
    # of Harper et al. (2010) WMO TD-1555:
    # Common values are( Assuming "At-sea" conditions):
    # 10-min mean: 0.95 (default)
    # 3-second gust: 1.11

    v = gustfactor * np.sqrt(deltap * 100 * beta / (rho * np.exp(1.)))
    np.putmask(v, (np.isnan(v) | np.isinf(v) | (pressure >= 10e+7) |
                   (pressure <= 0) | (speed >= 10e+7)), 0)

    return v