def main():
    # 1. ERA 20C 1900-1979
    create_date = ''
    years = np.arange(1900,1980)
    mnth = np.arange(1,13)
    for yr in range(len(years)):
        for mn in range(len(mnth)):
            create_date = create_date+(str(years[yr]) + str(mnth[mn]).zfill(2)+'01/')
    create_date = create_date[:-1]

    server = ECMWFDataServer()
    server.retrieve({
        "class": "e2",
        "dataset": "era20c",
        "date": create_date,
        "expver": "1",
        "levtype": "sfc",
        "param": "151.128/165.128/166.128",
        "stream": "moda",
        "type": "an",
        "target": "output",
        'grid': "1/1",

        'format' : "netcdf"
    })

    # ERA 5 1980-2018
    c = cdsapi.Client()

    data = c.retrieve(
        'reanalysis-era5-single-levels-monthly-means',
        {
            'product_type':'monthly_averaged_reanalysis',
            'variable':[
                '10m_u_component_of_wind','10m_v_component_of_wind','mean_sea_level_pressure'
            ],
            'year':[
                '1979','1980','1981',
                '1982','1983','1984',
                '1985','1986','1987',
                '1988','1989','1990',
                '1991','1992','1993',
                '1994','1995','1996',
                '1997','1998','1999',
                '2000','2001','2002',
                '2003','2004','2005',
                '2006','2007','2008',
                '2009','2010','2011',
                '2012','2013','2014',
                '2015','2016','2017',
                '2018'
            ],
            'month':[
                '01','02','03',
                '04','05','06',
                '07','08','09',
                '10','11','12'
            ],
            'time':'00:00',
            'format':'netcdf',
            'grid': "1/1",
    },"/Users/tfrederi/Downloads/ERA5.nc")
    return
def download_era_interim_data(year, grid=1.5, just_one_day=False):
    """ downloads data from ERA Interim """

    # FIXME: we are only checking that the file exists.
    # we need to check whether the data is actually available
    # i.e,, open the dataset, check the time dmension ,an return if ok
    # or keep going and download
    # This wil also help to identify cases when we peviously downloaded
    # just_one_day but we want to re-download the files for the full year!

    # FIXME: download only the region!!!

    if just_one_day:
        date_param = "%s0101" % year
    else:
        date_param = "%s0101/to/%s1231" % (year, year)

    server = ECMWFDataServer()

    server_config = {
        'dataset': "interim",
        'date': date_param,
        'stream': "oper",
        'time': "00:00:00/06:00:00/12:00:00/18:00:00",
        'step': "0",
        'type': "an",
        'grid': "%s/%s" % (grid, grid),
        'levtype': "sfc",
        'class': "ei",
        #'area'    : "%s/%s/%s/%s" % (20,-90,-40,-30)  # (north, west, south, east)
        'format': "netcdf",
    }

    sfc_an = {
        'viwve':
        71.162,  # Vertical integral of eastward water vapour flux (ewvf)
        'viwvn':
        72.162,  # Vertical integral of northward water vapour flux (nwvf)
        'vilwe':
        88.162,  # Vertical integral of eastward cloud liquid water flux (eclwf)
        'vilwn':
        89.162,  # Vertical integral of northward cloud liquid water flux (nclwf)
        'viiwe':
        90.162,  # Vertical integral of eastward cloud frozen water flux (ecfwf)
        'viiwn':
        91.162,  # Vertical integral of northward cloud frozen water flux (ncfwf)
        'sp': 134.128,  # Surface pressure
        'tcw': 136.128,  # Total column water
        'tcwv': 137.128,  # Total column water vapour
    }

    sfc_fc = {
        'tp': 228.128,  # Total precipitation
        'e': 182.128,  # Evaporation
    }

    ml_an = {
        'u': 131.128,  # U component of wind
        'v': 132.128,  # V component of wind
        'q': 133.128,  # Specific humidity
    }

    for key, value in sfc_an.items():
        filename = "%s/%s.%s.nc" % (wam2layers_config.data_dir, year, key)
        if not os.path.isfile(filename):
            server_config.update({'param': value, 'target': filename})
            server.retrieve(server_config)

    for key, value in sfc_fc.items():
        filename = "%s/%s.%s.nc" % (wam2layers_config.data_dir, year, key)
        if not os.path.isfile(filename):
            server_config.update({
                'param': value,
                'target': filename,
                'type': "fc",
                'time': "00:00:00/12:00:00",
                'step': "3/6/9/12"
            })
            server.retrieve(server_config)

    for key, value in ml_an.items():
        filename = "%s/%s.%s.nc" % (wam2layers_config.data_dir, year, key)
        if not os.path.isfile(filename):
            server_config.update({
                'param':
                "%s" % value,
                'target':
                filename,
                'type':
                "an",
                'levtype':
                "ml",
                'step':
                "0",
                'time':
                "00:00:00/06:00:00/12:00:00/18:00:00",
                'levelist':
                '/'.join(str(level) for level in wam2layers_config.levels)
            })
            server.retrieve(server_config)
Exemple #3
0
def main():
    usage = """usage: %prog --start_date=YYYYMMDD [--end_date=YYYYMMDD] [--times=tt1/tt2/tt3] [--levels=nlevels]
                                                  [--area=north/west/south/east]  [--outputdir=output_directory] """
    parser = OptionParser(usage=usage)
    parser.add_option("--start_date",
                      dest="start_date",
                      help="start date YYYYMMDD",
                      metavar="start_date")
    parser.add_option("--end_date",
                      dest="end_date",
                      help="end_date YYYYMMDD",
                      metavar="end_date")
    parser.add_option("--times",
                      dest="times",
                      default="00/03/06/09/12/15/18/21",
                      help="times such as 00/12",
                      metavar="times")
    parser.add_option("--levels",
                      dest="levels",
                      default="60",
                      help="number of vertical levels",
                      metavar="levels")
    parser.add_option(
        "--area",
        dest="area",
        default="90.0/-179.0/-90.0/180.0",
        help=
        "area defined as north/west/south/east with default 90.0/-179.0/-90.0/180.0",
        metavar="area")
    parser.add_option("--outputdir",
                      dest="outputdir",
                      help="root directory for storing output files",
                      metavar="outputdir")
    (options, args) = parser.parse_args()

    if not options.start_date:
        parser.error("start date must be specified!")
    else:
        start_date = options.start_date

    if not options.end_date:
        end_date = start_date
    else:
        end_date = options.end_date

    if not options.outputdir:
        # if WORKDIR is defined, we will use it otherwise files
        # will be stored in the current directory
        outputdir = os.environ.get("WORKDIR", ".")
    else:
        outputdir = options.outputdir

    print "start date %s " % (start_date)
    print "end date %s " % (end_date)

    server = ECMWFDataServer()

    # Retrieve ERA interim data for running flexpart

    syear = int(start_date[:4])
    smonth = int(start_date[4:6])
    sday = int(start_date[6:])
    start = datetime.date(year=syear, month=smonth, day=sday)
    eyear = int(end_date[:4])
    emonth = int(end_date[4:6])
    eday = int(end_date[6:])

    end = datetime.date(year=eyear, month=emonth, day=eday)

    current_ym = ""
    ir_date = start
    retrieve = "no"
    for date in daterange(start, end):
        # if new year & month then we create a new directory to store output files
        if date.strftime("%Y%m") != current_ym and current_ym != "":
            retrieve = "yes"

        if date == end:
            retrieve = "yes"

        if retrieve == "yes":
            # we need to retrieve MARS data for this period (maximum one month)
            flexpart = EIFlexpart()
            dates = ir_date.strftime("%Y%m%d") + "/to/" + er_date.strftime(
                "%Y%m%d")
            current_outputdir = outputdir + "/" + ir_date.strftime(
                "%Y") + '/' + ir_date.strftime("%m") + '/'
            mkdir_p(current_outputdir)
            print "retrieve " + dates + " in dir " + current_outputdir
            flexpart.retrieve(server, dates, options.times, options.area,
                              options.levels, current_outputdir)
            ir_date = date
            retrieve = "no"

        er_date = date

        current_ym = date.strftime("%Y%m")
Exemple #4
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ea",
    "dataset": "era5",
    "date": "2014-01-01/to/2014-12-31",
    "expver": "1",
    "format": "netcdf",
    "area": "42/-125/32/-115",
    "levtype": "sfc",
    "number": "0",
    "param": "168.128/167.128/134.128/228.128/165.128/166.128/66.128/39.128",
    "step": "6",
    "stream": "enda",
    "time": "06:00:00",
    "type": "fc",
    "target": "cal_era5_2014.nc",
    "grid": "0.1/0.1",
})
Exemple #5
0
#!/usr/bin/env python
#
# (C) Copyright 2012-2013 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. 
# In applying this licence, ECMWF does not waive the privileges and immunities 
# granted to it by virtue of its status as an intergovernmental organisation nor
# does it submit to any jurisdiction.
#

# To run this example, you need an API key 
# available from https://api.ecmwf.int/v1/key/
import json
from ecmwfapi import ECMWFDataServer


parseFunc=json.loads
retrieveFunc=ECMWFDataServer().retrieve
Exemple #6
0
def download_interim_for_gssha(main_directory,
                               start_datetime,
                               end_datetime,
                               leftlon=-180,
                               rightlon=180,
                               toplat=90,
                               bottomlat=-90,
                               precip_only=False):
    """
    Function to download ERA5 data for GSSHA

    .. note:: https://software.ecmwf.int/wiki/display/WEBAPI/Access+ECMWF+Public+Datasets

    Args:
        main_directory(:obj:`str`): Location of the output for the forecast data.
        start_datetime(:obj:`str`): Datetime for download start.
        end_datetime(:obj:`str`): Datetime for download end.
        leftlon(Optional[:obj:`float`]): Left bound for longitude. Default is -180.
        rightlon(Optional[:obj:`float`]): Right bound for longitude. Default is 180.
        toplat(Optional[:obj:`float`]): Top bound for latitude. Default is 90.
        bottomlat(Optional[:obj:`float`]): Bottom bound for latitude. Default is -90.
        precip_only(Optional[bool]): If True, will only download precipitation.

    Example::

        from gsshapy.grid.era_to_gssha import download_era_interim_for_gssha

        era_interim_folder = '/era_interim'
        leftlon = -95
        rightlon = -75
        toplat = 35
        bottomlat = 30
        download_era_interim_for_gssha(era5_folder, leftlon, rightlon, toplat, bottomlat)

    """
    # parameters: https://software.ecmwf.int/wiki/display/CKB/Details+of+ERA-Interim+parameters

    # import here to make sure it is not required to run
    from ecmwfapi import ECMWFDataServer
    server = ECMWFDataServer()

    try:
        mkdir(main_directory)
    except OSError:
        pass

    download_area = "{toplat}/{leftlon}/{bottomlat}/{rightlon}".format(
        toplat=toplat, leftlon=leftlon, bottomlat=bottomlat, rightlon=rightlon)
    download_datetime = start_datetime
    interim_request = {
        'dataset': "interim",
        #  'oper' specifies the high resolution daily data, as opposed to monthly means, wave, eda edmm, etc.
        'stream': "oper",
        #  Surface level, as opposed to pressure level (pl) or model level (ml)
        'levtype': "sfc",
        # The spatial resolution in ERA interim is 80 km globally on a Gaussian grid.
        # Here we us lat/long with 0.75 degrees, which is approximately the equivalent of 80km.
        'grid': "0.5/0.5",
        'area': download_area,
        'format': 'netcdf',
    }
    while download_datetime <= end_datetime:
        interim_request['date'] = download_datetime.strftime("%Y-%m-%d")
        if not precip_only:
            download_file = path.join(
                main_directory, "erai_gssha_{0}_an.nc".format(
                    download_datetime.strftime("%Y%m%d")))
            if not path.exists(download_file):
                #  We want instantaneous parameters, which are archived as type Analysis ('an') as opposed to forecast (fc)
                interim_request['type'] = "an"
                # For parameter codes see the ECMWF parameter database at http://apps.ecmwf.int/codes/grib/param-db
                interim_request['param'] = "2t/2d/sp/10u/10v/tcc"
                # step 0 is analysis, 3-12 is forecast
                interim_request['step'] = "0"
                # ERA Interim provides 6-hourly analysis
                interim_request['time'] = "00/06/12/18"
                interim_request['target'] = download_file
                server.retrieve(interim_request)

            download_file = path.join(
                main_directory, "erai_gssha_{0}_1_fc.nc".format(
                    download_datetime.strftime("%Y%m%d")))
            if not path.exists(download_file):
                interim_request['type'] = "fc"
                interim_request['param'] = "2t/2d/sp/10u/10v/tcc"
                interim_request['step'] = "3"
                interim_request['time'] = "00/06/12/18"
                interim_request['target'] = download_file
                server.retrieve(interim_request)

        download_file = path.join(
            main_directory, "erai_gssha_{0}_fc.nc".format(
                download_datetime.strftime("%Y%m%d")))
        if not path.exists(download_file):
            interim_request['type'] = "fc"
            interim_request['param'] = "tp/ssrd"
            interim_request['step'] = "3/6/9/12"
            interim_request['time'] = "00/12"
            interim_request['target'] = download_file
            server.retrieve(interim_request)
            # TODO: READ FILE AND MODIFY VALUES SO IT IS NOT INCREMENTAL
            # https://software.ecmwf.int/wiki/pages/viewpage.action?pageId=56658233
            # You need  total precipitation for every 6 hours.
            # Daily total precipitation (tp) is only available with a forecast base time 00:00 and 12:00,
            # so to get tp for every 6 hours you will need to extract (and for the second and fourth period calculate):
            # tp(00-06) = (time 00, step 6)
            # tp(06-12) = (time 00, step 12) minus (time 00, step 6)
            # tp(12-18) = (time 12, step 6)
            # tp(18-24) = (time 12, step 12) minus (time 12, step 6)
            # (Note the units for total precipitation is meters.)
            tmp_download_file = download_file + '_tmp'
            with xr.open_dataset(download_file) as xd:
                diff_xd = xd.diff('time')
                xd.tp[1:4] = diff_xd.tp[:3]
                xd.tp[5:] = diff_xd.tp[4:]
                xd.ssrd[1:4] = diff_xd.ssrd[:3]
                xd.ssrd[5:] = diff_xd.ssrd[4:]
                xd.to_netcdf(tmp_download_file)
            remove(download_file)
            rename(tmp_download_file, download_file)

        download_file = path.join(
            main_directory, "erai_gssha_{0}_0_fc.nc".format(
                download_datetime.strftime("%Y%m%d")))
        if download_datetime <= start_datetime and not path.exists(
                download_file):
            loc_download_date = (download_datetime -
                                 timedelta(1)).strftime("%Y-%m-%d")
            interim_request['type'] = "fc"
            interim_request['param'] = "tp/ssrd"
            interim_request['step'] = "9/12"
            interim_request['time'] = "12"
            interim_request['target'] = download_file
            interim_request['date'] = loc_download_date
            server.retrieve(interim_request)
            # convert to incremental (see above)
            tmp_download_file = download_file + '_tmp'
            with xr.open_dataset(download_file) as xd:
                inc_xd = xd.diff('time')
                inc_xd.to_netcdf(tmp_download_file)
            remove(download_file)
            rename(tmp_download_file, download_file)
        download_datetime += timedelta(1)
def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True,
                   server=None, target=_ecmwf):
    """
    Download data from ECMWF MACC Reanalysis API.

    Parameters
    ----------
    filename : str
        full path of file where to save data, ``.nc`` appended if not given
    params : str or sequence of str
        keynames of parameter[s] to download
    startdate : datetime.datetime or datetime.date
        UTC date
    stopdate : datetime.datetime or datetime.date
        UTC date
    lookup_params : bool, default True
        optional flag, if ``False``, then codes are already formatted
    server : ecmwfapi.api.ECMWFDataServer
        optionally provide a server object, default is ``None``
    target : callable
        optional function that calls ``server.retrieve`` to pass to thread

    Returns
    -------
    t : thread
        a thread object, use it to check status by calling `t.is_alive()`

    Notes
    -----
    To download data from ECMWF requires the API client and a registration
    key. Please read the documentation in `Access ECMWF Public Datasets
    <https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets>`_.
    Follow the instructions in step 4 and save the ECMWF registration key
    as `$HOME\.ecmwfapirc` or set `ECMWF_API_KEY` as the path to the key.

    This function returns a daemon thread that runs in the background. Exiting
    Python will kill this thread, however this thread will not block the main
    thread or other threads. This thread will terminate when the file is
    downloaded or if the thread raises an unhandled exception. You may submit
    multiple requests simultaneously to break up large downloads. You can also
    check the status and retrieve downloads online at
    http://apps.ecmwf.int/webmars/joblist/. This is useful if you kill the
    thread. Downloads expire after 24 hours.

    .. warning:: Your request may be queued online for an hour or more before
        it begins to download

    Precipitable water :math:`P_{wat}` is equivalent to the total column of
    water vapor (TCWV), but the units given by ECMWF MACC Reanalysis are kg/m^2
    at STP (1-atm, 25-C). Divide by ten to convert to centimeters of
    precipitable water:

    .. math::
        P_{wat} \\left( \\text{cm} \\right) \
        = TCWV \\left( \\frac{\\text{kg}}{\\text{m}^2} \\right) \
        \\frac{100 \\frac{\\text{cm}}{\\text{m}}} \
        {1000 \\frac{\\text{kg}}{\\text{m}^3}}

    The keynames available for the ``params`` argument are given by
    :const:`pvlib.iotools.ecmwf_macc.PARAMS` which maps the keys to codes used
    in the API. The following keynames are available:

    =======  =========================================
    keyname  description
    =======  =========================================
    tcwv     total column water vapor in kg/m^2 at STP
    aod550   aerosol optical depth measured at 550-nm
    aod469   aerosol optical depth measured at 469-nm
    aod670   aerosol optical depth measured at 670-nm
    aod865   aerosol optical depth measured at 865-nm
    aod1240  aerosol optical depth measured at 1240-nm
    =======  =========================================

    If ``lookup_params`` is ``False`` then ``params`` must contain the codes
    preformatted according to the ECMWF MACC Reanalysis API. This is useful if
    you want to retrieve codes that are not mapped in
    :const:`pvlib.iotools.ecmwf_macc.PARAMS`.

    Specify a custom ``target`` function to modify how the ECMWF API function
    ``server.retrieve`` is called. The ``target`` function must have the
    following signature in which the parameter definitions are similar to
    :func:`pvlib.iotools.get_ecmwf_macc`. ::


        target(server, startdate, stopdate, params, filename) -> None

    Examples
    --------
    Retrieve the AOD measured at 550-nm and the total column of water vapor for
    November 1, 2012.

    >>> from datetime import date
    >>> from pvlib.iotools import get_ecmwf_macc
    >>> filename = 'aod_tcwv_20121101.nc'  # .nc extension added if missing
    >>> params = ('aod550', 'tcwv')
    >>> start = end = date(2012, 11, 1)
    >>> t = get_ecmwf_macc(filename, params, start, end)
    >>> t.is_alive()
    True

    """
    if not filename.endswith('nc'):
        filename += '.nc'
    if lookup_params:
        try:
            params = '/'.join(PARAMS.get(p) for p in params)
        except TypeError:
            params = PARAMS.get(params)
    startdate = startdate.strftime('%Y-%m-%d')
    stopdate = stopdate.strftime('%Y-%m-%d')
    if not server:
        server = ECMWFDataServer()
    t = threading.Thread(target=target, daemon=True,
                         args=(server, startdate, stopdate, params, filename))
    t.start()
    return t
Exemple #8
0
def download_era(start,
                 end,
                 parameters,
                 target,
                 product='ERA-Interim',
                 format='grib',
                 grid_size=None,
                 timesteps=[0, 6, 12, 18],
                 landmask=True):
    """
    Download era 5 data

    Parameters
    ----------
    start : date
        start date
    end : date
        end date
    parameters : list
        parameter ids, see wiki
    target : str
        path at which to save the downloaded grib file
    product : str, optional
        Name of the model, "ERA-interim" (default) or "ERA5"
    format: str, optional
        format of the downloaded data, netcdf or grib (default)
    grid_size: [float,float], optional
        size of the grid in form (lon, lat), which the data is resampled to
        If None is passed the minimum grid for the accoring product is chosen
    timesteps: list
        list of times for which data is downloaded
    landmask: bool
        If True, also download the land/sea mask
    """
    server = ECMWFDataServer()
    param_strings = []

    if product == 'ERA-Interim':
        dataset = 'interim'
        dataclass = 'ei'
    elif product == 'ERA5':
        dataset = 'era5'
        dataclass = 'ea'
    else:
        raise ValueError(
            'Unknown ECMWF product. Use "ecmwf_download -h" too show supported data sets'
        )

    if landmask and 172 not in parameters:
        parameters.append(172)

    for parameter in parameters:
        param_strings.append("%d.128" % parameter)

    timestep_strings = []
    for timestep in timesteps:
        timestep_strings.append("%02d" % timestep)

    param_string = '/'.join(param_strings)
    timestep_string = '/'.join(timestep_strings)
    date_string = "%s/to/%s" % (start.strftime("%Y-%m-%d"),
                                end.strftime("%Y-%m-%d"))

    grid_size = "%f/%f" % (grid_size[0], grid_size[1]) if grid_size else None

    # ATTENTION: When downloading netcdf files steps and times must not overlap!!
    # see: https://software.ecmwf.int/wiki/display/CKB/What+to+do+with+ECCODES+ERROR+%3A+Try+using+the+-T+option
    dl_params = {
        "class": dataclass,
        "dataset": dataset,
        "expver": "1",
        "stream": "oper",
        "type": "an",
        "levtype": "sfc",
        "param": param_string,
        "date": date_string,
        "time": timestep_string,
        "step": "0",
        "grid": grid_size,
        "format": format,
        "target": target
    }

    if not grid_size:
        if format == 'netcdf':
            if product == 'ERA5':
                grid_size = "%f/%f" % (0.3, 0.3)
            else:
                grid_size = "%f/%f" % (0.75, 0.75)
            dl_params['grid'] = grid_size
        else:
            del dl_params['grid']
    else:
        if (any(size < 0.75 for size in grid_size) and product == 'ERA-Interim') or \
           (any(size < 0.3 for size in grid_size) and product == 'ERA5'):
            raise Warning(
                'Custom grid smaller than original ERA data. See https://software.ecmwf.int/wiki/display/CKB/Does+downloading+data+at+higher+resolution+improve+the+output'
            )

    server.retrieve(dl_params)
Exemple #9
0
def download_files(dt, file_type, t_hour, step, path_out):
    """
    Downloads necessary CAMS data for MAJA and converts them into MAJA input format
    """
    from ecmwfapi import ECMWFDataServer
    from datetime import datetime

    server = ECMWFDataServer()

    date_courante = str(dt.year) + '%02d' % dt.month + '%02d' % dt.day
    print('Current_date =', date_courante)

    if file_type['surface']:
        # Surface
        # Recupere AOT a 550nm pour BC, SS, SU, DU, OM
        # Et (si possible p. CAMS46r1) AM, NI
        nom_aot = "CAMS_AOT_" + date_courante + 'UTC' + str(
            int(t_hour) + int(step)).zfill(2) + '0000.nc'
        path_aot = os.path.join(path_out, nom_aot)
        print('Nom fichier de sortie AOT :', path_aot)

        models = "208.210/209.210/210.210/211.210/212.210"
        if dt >= datetime(2019, 7, 10):
            # This is the new format:
            models += "/250.210/251.210"
        # 208.210/209.210/210.210/211.210/212.210 : AOT at 550nm for BC, SS, OM, SU, DU
        server.retrieve({
            'stream': "oper",
            'class': "mc",
            'dataset': "cams_nrealtime",
            'expver': '0001',
            'step': step,
            'levtype': "SFC",
            'date': date_courante,
            'time': t_hour,
            'type': "fc",
            'param': models,
            'area': "G",
            'grid': "1.25/1.25",
            'format': "netcdf",
            'target': path_aot
        })

    if file_type['pressure']:
        # Pressure levels
        # Recupere Relative Humidity RH
        nom_rh = "CAMS_RH_" + date_courante + 'UTC' + str(
            int(t_hour) + int(step)).zfill(2) + '0000.nc'
        path_rh = os.path.join(path_out, nom_rh)
        print('Nom fichier de sortie RH :', path_rh)
        levellist = "1/2/3/5/7/10/20/30/50/70/100/150/200/250/300/400/500/600/700/800/850/900/925/950/1000"
        server.retrieve({
            'stream': "oper",
            'class': "mc",
            'dataset': "cams_nrealtime",
            'expver': "0001",
            'step': step,
            'levtype': "pl",
            "levelist": levellist,
            'date': date_courante,
            'time': t_hour,
            'type': "fc",
            'param': "157.128",
            'area': "G",
            'grid': "1.25/1.25",
            'format': "netcdf",
            'target': path_rh
        })

    if file_type['model']:
        # Model levels
        # Recupere les mixing ratios :
        # 3 bins DUST,
        # 3 bins SEASALT,
        # ORGANICMATTER hydrophile et hydrophobe,
        # BLACKCARBON hydrophile et hydrophobe,
        # et SULFATE.
        nom_mr = "CAMS_MR_" + date_courante + 'UTC' + str(
            int(t_hour) + int(step)).zfill(2) + '0000.nc'
        path_mr = os.path.join(path_out, nom_mr)
        print('Nom fichier de sortie mixRatios :', path_mr)
        models = "1.210/2.210/3.210/4.210/5.210/6.210/7.210/8.210/9.210/10.210/11.210"
        levels = "1/to/60"
        if dt >= datetime(2019, 7, 10):
            # This is the new format:
            models += "/247.210/248.210/249.210"
            levels = "1/to/137"
        server.retrieve({
            'stream': "oper",
            'class': "mc",
            'dataset': "cams_nrealtime",
            'expver': "0001",
            'step': step,
            'levtype': "ml",
            "levelist": levels,
            'date': date_courante,
            'time': t_hour,
            'type': "fc",
            'param': models,
            'area': "G",
            'grid': "1.25/1.25",
            'format': "netcdf",
            'target': path_mr
        })
    return path_aot, path_rh, path_mr
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer(url="https://api.ecmwf.int/v1",
                         key="",
                         email="*****@*****.**")

# Retrieve data in netCDF format
server.retrieve({
    'stream': "oper",
    'levtype': "sfc",
    'param': "167",
    'dataset': "interim",
    'step': "0",
    'grid': "0.5/0.5",
    'area': "90/-180/-90/179.5",
    'time': "00/06/12/18",
    'date': "2014-07-01/to/2014-07-31",
    'type': "an",
    'class': "ei",
    'format': "netcdf",
    'target': "C:\\Users\\Laurens\\test.nc"
})

#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "2001-01-01/to/2001-12-31",
def main(var,
         years=[2017, 2018],
         month_start=1,
         month_end=12,
         path='/mnt/netdisk1/stephan/WeatherBench/tigge/raw/',
         ens=False):

    server = ECMWFDataServer()
    months = range(month_start, month_end + 1)
    for year in years:
        for month in months:

            try:
                days = calendar.monthrange(year, month)[1]
                month = str(month).zfill(2)
                if var == 'z':
                    params = {
                        "class":
                        "ti",
                        "dataset":
                        "tigge",
                        "date":
                        f"{year}-{month}-01/to/{year}-{month}-{days}",
                        "expver":
                        "prod",
                        "grid":
                        "0.703125/0.703125",
                        "levelist":
                        "500",
                        "levtype":
                        "pl",
                        "origin":
                        "ecmf",
                        "param":
                        "156",
                        "step":
                        "0/6/12/18/24/30/36/42/48/54/60/66/72/78/84/90/96/102/108/114/120/126/132/138/144/150/156/162/168/174/180/186/192/198/204/210/216/222/228/234/240/246/252/258/264/270/276/282/288/294/300/306/312/318/324/330/336/342/348/354/360",
                        "time":
                        "00:00:00/12:00:00",
                        "type":
                        "cf",
                        "target":
                        f"{path}/geopotential_500/geopotential_500{'_ens' if ens else ''}_{year}_{month}_raw.grib",
                    }
                elif var == 't':
                    params = {
                        "class":
                        "ti",
                        "dataset":
                        "tigge",
                        "date":
                        f"{year}-{month}-01/to/{year}-{month}-{days}",
                        "expver":
                        "prod",
                        "grid":
                        "0.703125/0.703125",
                        "levelist":
                        "850",
                        "levtype":
                        "pl",
                        "origin":
                        "ecmf",
                        "param":
                        "130",
                        "step":
                        "0/6/12/18/24/30/36/42/48/54/60/66/72/78/84/90/96/102/108/114/120/126/132/138/144/150/156/162/168/174/180/186/192/198/204/210/216/222/228/234/240/246/252/258/264/270/276/282/288/294/300/306/312/318/324/330/336/342/348/354/360",
                        "time":
                        "00:00:00/12:00:00",
                        "type":
                        "cf",
                        "target":
                        f"{path}/temperature_850/temperature_850{'_ens' if ens else ''}_{year}_{month}_raw.grib",
                    }
                elif var in var_dict.keys():
                    params = {
                        "class":
                        "ti",
                        "dataset":
                        "tigge",
                        "date":
                        f"{year}-{month}-01/to/{year}-{month}-{days}",
                        "expver":
                        "prod",
                        "grid":
                        "0.703125/0.703125",
                        # "levelist": "850",
                        "levtype":
                        "sfc",
                        "origin":
                        "ecmf",
                        "param":
                        var_dict[var],
                        # "step": "0/6/12/18/24/30/36/42/48/54/60/66/72/78/84/90/96/102/108/114/120/126/132/138/144/150/156/162/168/174/180/186/192/198/204/210/216/222/228/234/240/246/252/258/264/270/276/282/288/294/300/306/312/318/324/330/336/342/348/354/360",
                        "step":
                        "0/6/12/18/24/30/36/42/48/54/60/66/72/78/84/90/96/102/108/114/120",
                        "time":
                        "00:00:00/12:00:00",
                        "type":
                        "cf",
                        "target":
                        f"{path}/{var}/{var}{'_ens' if ens else ''}_{year}_{month}_raw.grib",
                    }
                if ens:
                    params[
                        'number'] = "1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/32/33/34/35/36/37/38/39/40/41/42/43/44/45/46/47/48/49/50"
                    params['step'] = "0/24/48/72/96/120/144/168"
                    params['type'] = "pf"

                os.makedirs('/'.join(params['target'].split('/')[:-1]),
                            exist_ok=True)
                server.retrieve(params)

            except APIException:
                print(
                    f'Damaged files {year}-{month}-01/to/{year}-{month}-{days}'
                )
Exemple #12
0
def download_eraint(
        target_path,
        start,
        end,
        variables,
        grid_size=None,
        type="fc",
        h_steps=(0, 6, 12, 18),
        grb=False,
        dry_run=False,
        steps=(0, ),
):
    """
    Download era interim data

    Parameters
    ----------
    target_path : str
        path at which to save the downloaded grib file
    start : date
        start date
    end : date
        end date
    variables : list
        parameter ids, see wiki
    product : str, optional
        Name of the model, "ERA-interim" (default) or "ERA5"
    grid_size: [float,float], optional
        size of the grid in form (lon, lat), which the data is resampled to
        If None is passed the minimum grid for the accoring product is chosen
    h_steps: tuple, optional (default: (0, 6, 12, 18))
        List of full hours to download data at the selected dates
    grb: bool, optional (default: False)
        Download data as grb files instead of nc files
    dry_run: bool
        Do not download anything, this is just used for testing the functions
    """
    if dry_run:
        warnings.warn("Dry run does not create connection to ECMWF")
        server = None
    else:
        server = ECMWFDataServer()

    param_strings = []

    dataset = "interim"
    dataclass = "ei"

    for variable in variables:
        param_strings.append(str(variable))

    timestep_strings = []
    for timestep in h_steps:
        timestep_strings.append("%02d" % timestep)

    param_string = "/".join(param_strings)
    timestep_string = "/".join(timestep_strings)
    date_string = "%s/to/%s" % (
        start.strftime("%Y-%m-%d"),
        end.strftime("%Y-%m-%d"),
    )

    grid_size = "%f/%f" % (grid_size[0], grid_size[1]) if grid_size else None

    step = "/".join([str(s) for s in steps])
    # ATTENTION: When downloading netcdf files steps and times
    # must not overlap!! see:
    # https://software.ecmwf.int/wiki/display/CKB/What+to+do+with+ECCODES+ERROR+%3A+Try+using+the+-T+option  # noqa: E501

    dl_params = {
        "class": dataclass,
        "dataset": dataset,
        "expver": "1",
        "stream": "oper",
        "type": type,
        "levtype": "sfc",
        "param": param_string,
        "date": date_string,
        "time": timestep_string,
        "step": step,
        "grid": grid_size,
        "format": "grib1" if grb else "netcdf",
        "target": target_path,
    }

    if not grid_size:
        if not grb:
            grid_size = "%f/%f" % (0.75, 0.75)
            dl_params["grid"] = grid_size
        else:
            del dl_params["grid"]
    else:
        if any(size < 0.75 for size in grid_size):
            raise Warning(
                "Custom grid smaller than original ERA Interim resolution. "
                "See https://software.ecmwf.int/wiki/display/CKB/"
                "Does+downloading+data+at+higher+resolution+improve+the+output"  # noqa: E501
            )
    if not dry_run:
        server.retrieve(dl_params)
Exemple #13
0
def ecmwf(date_range='1979-01-01/to/2017-08-31',
          area='-40/-90/-90/90',
          type='an',
          step='0',
          time='00/06/12/18',
          params=['msl', 't2m', 'skt'],
          output_filename=None):
    """ Submits MARS request to retrieve ERA-Interim reanalysis fields as netCDF file.

    Arguments:
        date_range: for daily fields, format as, e.g., '1979-01-01/to/2017-08-31'
                    for monthly means of daily means, use [datetime(start_yr,start_mo,1),datetime(end_yr,end_mo,1)]
        area: subsetting area, format '-40/-90/-90/90' (N/W/S/E)
        type: 'an' for analysis or 'fc' for forecast
        step: '0' for analysis only, '6/12' or '3/6/9/12' for 6-hourly or 3-hourly forecasts from 0000 and 1200 UTC
              or None for monthly means (regardless, it will be ignored)
        time: analysis times, e.g. '00/06/12/18' for all analyses, or '00/12' if retrieving forecasts only
              or None for monthly means (regardless, it will be ignored)
        params: parameter abbreviations, to be translated into GRIB and Table 2 codes - see below for those available
                note: to find new codes, use parameter database: http://apps.ecmwf.int/codes/grib/param-db/
                      or use web interface and check "View the MARS request"
        output_filename: desired path + filename including '.nc' extension, to save locally
                         or None to save to temporary storage; download from: http://apps.ecmwf.int/webmars/joblist/
                note: if not downloading locally, cancel call using Ctrl-C after "Request is queued" appears
                      (otherwise file will be deleted almost instantly from ECMWF servers)

    None: cancelling call (Ctrl-C) after "Request is queued" appears is fine. It will prevent local download, though.

    Note: private login key required. See documentation for instructions on creating local login key.

    Note: file size limit is probably 20 GB. Check here: https://software.ecmwf.int/wiki/display/WEBAPI/News+feed

    Limited web API access:
        http://apps.ecmwf.int/datasets/data/interim-full-daily/levtype=sfc/
        http://apps.ecmwf.int/datasets/data/interim-full-moda/levtype=sfc/

    Documentation:
        https://software.ecmwf.int/wiki/display/WEBAPI/Access+ECMWF+Public+Datasets
        https://software.ecmwf.int/wiki/display/WEBAPI/Python+ERA-interim+examples
        https://software.ecmwf.int/wiki/display/UDOC/MARS+user+documentation
        https://software.ecmwf.int/wiki/display/UDOC/MARS+keywords
        http://apps.ecmwf.int/codes/grib/param-db

    Reference: Dee et al. 2011

    """
    param_codes = ''
    for param_idx, param in enumerate(params):
        # analysis parameters
        if param == 't2m': param_codes += '167.128'  # 2 metre temperature (K)
        elif param == 'sst':
            param_codes += '34.128'  # Sea surface temperature (K)
        elif param == 'skt':
            param_codes += '235.128'  # Skin temperature (K)
        elif param == 'd2m':
            param_codes += '168.128'  # 2 metre dewpoint temperature (K)
        elif param == 'msl':
            param_codes += '151.128'  # Mean sea level pressure (Pa)
        elif param == 'sp':
            param_codes += '134.128'  # Surface pressure (Pa)
        elif param == 'u10':
            param_codes += '165.128'  # 10 metre U wind component (m/s)
        elif param == 'v10':
            param_codes += '166.128'  # 10 metre V wind component (m/s)
        elif param == 'si10':
            param_codes += '207.128'  # 10 metre wind speed (m/s) [NOTE: in monthly means only]
        elif param == 'lcc':
            param_codes += '186.128'  # Low cloud cover (fractional coverage, 0 to 1)
        elif param == 'tcc':
            param_codes += '164.128'  # Total cloud cover (fractional coverage, 0 to 1)
        elif param == 'rsn':
            param_codes += '33.128'  # Snow density in snow layer (kg/m^3)
        elif param == 'sd':
            param_codes += '141.128'  # Snow depth in snow layer (m of water equivalent)
        elif param == 'sr':
            param_codes += '173.128'  # Climatological aerodynamic land surface roughness length (m)
        elif param == 'tsn':
            param_codes += '238.128'  # Temperature of snow layer (K)
            # forecast parameters (* indicates accumulated field; note downward fluxes are positive)
        elif param == 'sf':
            param_codes += '144.128'  # Snowfall (m of water equivalent) *
        elif param == 'sshf':
            param_codes += '146.128'  # Surface sensible heat flux (J/m^2) *
        elif param == 'slhf':
            param_codes += '147.128'  # Surface latent heat flux (J/m^2) *
        elif param == 'ssr':
            param_codes += '176.128'  # Surface net solar radiation [shortwave] (J/m^2) *
        elif param == 'str':
            param_codes += '177.128'  # Surface net thermal radiation [longwave] (J/m^2) *
        elif param == 'strd':
            param_codes += '175.128'  # Surface thermal radiation [longwave] downwards (J/m^2) *
        elif param == 'e':
            param_codes += '182.128'  # Evaporation (m of water equivalent) *
        elif param == 'tp':
            param_codes += '228.128'  # Total precipitation (m) *
        elif param == 'iews':
            param_codes += '229.128'  # Instantaneous eastward turbulent surface stress (N/m^2)
        elif param == 'inss':
            param_codes += '230.128'  # Instantaneous northward turbulent surface stress (N/m^2)
        elif param == 'blh':
            param_codes += '159.128'  # Boundary layer height (m)
        if param_idx < len(params) - 1: param_codes += '/'

    retrieve_dict = {
        "class": "ei",
        "dataset": "interim",
        "expver": "1",
        "format": "netcdf",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": param_codes,
        "type": type,
        'area': area,
        "target": output_filename,
        "use": 'frequent',
    }

    # monthly means of daily means
    if len(date_range) == 2:
        retrieve_dict['stream'] = 'moda'
        final_date_range = ''
        working_month = date_range[0]
        while working_month < date_range[1]:
            final_date_range += working_month.strftime('%Y%m%d')
            final_date_range += '/'
            working_month += relativedelta(months=+1)
        final_date_range += date_range[1].strftime('%Y%m%d')
        retrieve_dict['date'] = final_date_range

    # daily fields
    else:
        retrieve_dict['stream'] = 'oper'
        retrieve_dict['date'] = date_range
        retrieve_dict['step'] = step
        retrieve_dict['time'] = time

    server = ECMWFDataServer()
    server.retrieve(retrieve_dict)
def main(argv):

    try:
        opts, argv = getopt.getopt(argv, ":h:i:e:s:E:o:g:P:t:f:r:", [
            'help', '[outFile]', 'code', '[shapeFile]', 'start', 'end', '[tr]'
        ])
    except getopt.GetoptError:
        print 'error in parameter for eraInterimDownload. type eraInterimDownload.py -help for more detail on use '
        sys.exit(2)

    for opt, arg in opts:
        if opt == '-h':
            print 'eraInterimDownload.py  '
            print '    [mandatory] : '
            print '        --init <dateStart YYYY-MM-DD>'
            print '        --end <dateEnd YY-MM-DD>'
            print '        --shapefile <shapefile> OU -Extend < xmin,ymax,xmax,ymin>'
            print '    [optional] :'
            print '        --typeData  < analyse , forcast> (default forcast)'
            print '        --grid <EraInterim Time> (default 0.75)'
            print '        --outfile <outfolder> (default /home/user/eraInterim)'
            print '        --proxy <True/False> (default False)'
            print '        --temporaryFile <True/False> (default False)'
            print '        --result < TxtFile / RasterFile> default RasterFile'
            print ''
            sys.exit()
        elif opt in ('-o', '--outFolder'):
            oFolder = arg
        elif opt in ('-i', '--start'):
            startDate = arg
        elif opt in ('-e', '--end'):
            endDate = arg
        elif opt in ('-s', '--shapefile'):
            pathToShapefile = arg
        elif opt in ('-E', '--tr'):
            extend = arg.split(',')
        elif opt in ('-g', '--grid'):
            grid = arg
        elif opt in ('-P', '--proxy'):
            proxy = arg
        elif opt in ('-t', '--typeData'):
            typeData = arg
        elif opt in ('-f', '--temporaryFile'):
            temporaryFile = arg
        elif opt in ('-r', '--result'):
            typeOutput = arg

    if len(sys.argv) < 7:
        print 'eraInterimDownload.py'
        print '    -i <dateStart YYYY-MM-DD> '
        print '    -e <dateEnd YY-MM-DD>'
        print '    -s <shapefile> '
        print '  or'
        print '    -E < xmin,ymax,xmax,ymin>]'
        print ''
        print '    [-t < analyse , forcast> (default analyse)]'
        print '    [-g <size of grid in 0.125/0.25/0.5/0.75/1.125/1.5/2/2.5/3> (default0.75)]'
        print '    [-o <outfolder> (default /home/user/eraInterim)]'
        print '    [-P <proxy : True/False> (default False)]'
        print '    [-f <temporaryFile : True/False> (default False)]'
        print '    [-r <resultOutput : TxtFile/RasterFile> (default RasterFile)]'
        print ''
        print 'For help on interimCode -help'
        sys.exit(2)

    try:
        oFolder
    except NameError:
        oFolder = os.path.expanduser('~')
        oFolder = oFolder + '/eraInterim'
        print "output folder not precised : downloaded eraInterim images on " + oFolder

    # verification du folder/or creation if not exists
    utils.checkForFolder(oFolder)

    try:
        startDate
    except NameError:
        exit('init Date not precised')
    # verification si sartDate est une date
    startDate = utils.checkForDate(startDate)

    try:
        endDate
    except NameError:
        exit('end Date not specified')
    # verification si sartDate est une date
    endDate = utils.checkForDate(endDate)

    if (startDate > endDate):
        exit('startDate could not be greater than endDate')

    today = date.today()
    limitDate = today - timedelta(days=31 * 3)
    limitDate = date(limitDate.year, limitDate.month,
                     calendar.monthrange(limitDate.year, limitDate.month)[1])
    if (startDate > limitDate or endDate > limitDate):
        exit('date could not exceed 2014-12-31')

    try:
        pathToShapefile
    except NameError:
        try:
            extend
        except NameError:
            exit(
                'no Area of interest have been specified. please use -shp or -tr to declare it'
            )

    if 'pathToShapefile' in locals():
        extendArea = utils.convertShpToExtend(pathToShapefile)
    else:
        extendArea = extend

    extendArea = utils.checkForExtendValidity(extendArea)

    try:
        typeData
    except NameError:
        typeData = 'analyse'

    try:
        grid
    except NameError:
        grid = '0.75'
    grid = utils.checkForGridValidity(grid)

    try:
        proxy
    except NameError:
        proxy = False

    #Proxy parameteres needed
    if (proxy):
        login = raw_input('login proxy : ')
        pwd = raw_input('password proxy :  : ')
        site = raw_input('site (surf.cnes.fr) : ')
        os.environ["http_proxy"] = "http://%s:%s@%s:8050" % (login, pwd, site)
        os.environ["https_proxy"] = "http://%s:%s@%s:8050" % (login, pwd, site)

    try:
        temporaryFile
    except NameError:
        temporaryFile = False

    try:
        typeOutput
    except NameError:
        typeOutput = 'RasterFile'
    """----------------------------------------"""

    #Create param if first Time
    if (not utils.checkForFile(os.path.expanduser('~') + '/.ecmwfapirc')):
        print(
            'for first connexion you have to define yout key and password on ecmwf'
        )
        print('cf  https://apps.ecmwf.int/auth/login/')
        print('')
        u = raw_input('user (mail) : ')
        k = raw_input('keys : ')
        utils.createParamFile(os.path.expanduser('~') + '/.ecmwfapirc', u, k)

    delta = endDate - startDate
    nbDays = delta.days + float(delta.seconds) / 86400 + 1

    #--------------------------On charge les rasters
    if typeData == "analyse":
        time = ['00', "12", "06", "18"]
        step = []
        nbBandByDay = len(time)
    else:
        time = ['00', "12"]
        step = [3, 6, 9, 12]
        nbBandByDay = (12 * len(time)) / (len(step)) + 1
    server = ECMWFDataServer()
    """ altitude de la grille EraInterim """
    # Only Forcast possible
    codeGeopot = [129]
    GeoFile = oFolder + "/129" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'
    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeGeopot, GeoFile,
                                      typeData)
    server.retrieve(struct)
    Geo = utils.convertNETCDFtoDicArray(GeoFile)
    Geo = utils.convertGeoToAlt(Geo)
    #un peu inutile car ne change pas ... mais bon!
    Geo = utils.computeDailyMax(Geo, nbBandByDay, typeData)
    """ Vitesse du vent """
    codeVent = [165, 166]
    vent = {}
    ventFile = []
    for i in codeVent:
        ventFile.append(oFolder + "/" + str(i) + '_' +
                        startDate.strftime('%Y%m%d') + '_' +
                        endDate.strftime('%Y%m%d') + '.nc')
        struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                          extendArea, [i], ventFile[-1],
                                          typeData)
        server.retrieve(struct)
        vent[i] = utils.convertNETCDFtoDicArray(ventFile[-1])

    vent = utils.fusVentFromDict(vent, nbBandByDay)
    vent = utils.computeDailyMean(vent, nbBandByDay, typeData)
    """ Humidité relative """

    codePressure = [134]
    pressureFile = oFolder + "/134" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'

    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codePressure, pressureFile,
                                      typeData)
    server.retrieve(struct)
    pressure = utils.convertNETCDFtoDicArray(pressureFile)
    #oulalal c'est moche
    pressureMean = utils.convertPaToKgPa(pressure)
    pressure = utils.convertToHectoPascal(pressure)
    pressureMean = utils.computeDailyMean(pressureMean, nbBandByDay, typeData)

    codeT2m = [167]
    T2mFile = oFolder + "/167" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'

    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeT2m, T2mFile, typeData)
    server.retrieve(struct)
    T2m = utils.convertNETCDFtoDicArray(T2mFile)

    Tmean = utils.computeDailyMean(T2m, nbBandByDay, typeData)
    Tmax = utils.computeDailyMax(T2m, nbBandByDay)
    Tmin = utils.computeDailyMin(T2m, nbBandByDay)

    T2m = utils.convertKToD(T2m)
    #T2m = utils.computeDailyMean(T2m,nbBandByDay,typeData)

    codeDewP = [168]
    DewPFile = oFolder + "/168" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'

    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeDewP, DewPFile, typeData)
    server.retrieve(struct)
    DewP = utils.convertNETCDFtoDicArray(DewPFile)
    DewP = utils.convertKToD(DewP)
    #DewP = utils.computeDailyMean(DewP,nbBandByDay,typeData)

    humidity = utils.ComputeHumidityFromPT(pressure, T2m, DewP)
    #humidity = utils.computeDailyMean(humidity,nbBandByDay,typeData)
    Hmax = utils.computeDailyMax(humidity, nbBandByDay)
    Hmin = utils.computeDailyMin(humidity, nbBandByDay)
    Hmean = utils.computeDailyMean(humidity, nbBandByDay, typeData)
    """ ONLY FORCAST FOR THESE VAR"""
    typeData = "forcast"
    time = ['00', "12"]
    step = [3, 6, 9, 12]
    nbBandByDay = (12 * len(time)) / (len(step)) + 1
    """ Rayonnement global incident journalier """
    # Only Forcast possiblet
    codeRay = [176]
    RayFile = oFolder + "/176" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'
    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeRay, RayFile, typeData)
    server.retrieve(struct)
    Ray = utils.convertNETCDFtoDicArray(RayFile)
    Ray = utils.computeDailyMean(Ray, nbBandByDay, typeData)
    Ray = utils.convertWToMJ(Ray)
    """ downward surface solar radiation """
    # Only Forcast possiblet
    codeRay = [169]
    RayFileDownShort = oFolder + "/169" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'
    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeRay, RayFileDownShort,
                                      typeData)
    server.retrieve(struct)
    RayDownShort = utils.convertNETCDFtoDicArray(RayFileDownShort)
    RayDownShort = utils.computeDailyMean(RayDownShort, nbBandByDay, typeData)
    RayDownShort = utils.convertWToMJ(RayDownShort)
    """ downward surface thermal radiation """
    # Only Forcast possiblet
    codeRay = [175]
    RayFileDownLong = oFolder + "/175" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'
    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeRay, RayFileDownLong,
                                      typeData)
    server.retrieve(struct)
    RayDownLong = utils.convertNETCDFtoDicArray(RayFileDownLong)
    RayDownLong = utils.computeDailyMean(RayDownLong, nbBandByDay, typeData)
    RayDownLong = utils.convertWToMJ(RayDownLong)
    """ Evaporation """
    codeEvap = [182]
    EvapFile = oFolder + "/182" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'
    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codeEvap, EvapFile, typeData)
    server.retrieve(struct)
    Evap = utils.convertNETCDFtoDicArray(EvapFile)
    Evap = utils.computeDailyMean(Evap, nbBandByDay, typeData)
    #Evap = utils.convertMToMm(Evap)
    """ Precipitation """
    #NOT NEEDED FOR ETO BUT Exported

    utils.checkForTimeValidity(time)
    utils.checkForStepValidity(step, typeData)
    codePrecipitation = [228]
    precipitationFile = oFolder + "/228" + '_' + startDate.strftime(
        '%Y%m%d') + '_' + endDate.strftime('%Y%m%d') + '.nc'

    struct = utils.create_request_sfc(startDate, endDate, time, step, grid,
                                      extendArea, codePrecipitation,
                                      precipitationFile)
    server.retrieve(struct)
    precipitation = utils.convertNETCDFtoDicArray(precipitationFile)
    precipitation = utils.computeDailyAccumulation(precipitation, nbBandByDay,
                                                   typeData)
    """ Grid of latitude [0],longitude[1] in WGS84"""
    geoTransform = utils.getGeoTransform(RayFile)
    shape = Ray[0].shape
    latlon = utils.getCentroidLatFromArray(shape, geoTransform, grid)
    """ --------------------- Compute ET0---------------------- """

    ET0_0 = {}
    ET0_1 = {}
    ET0_2 = {}
    DoyList = []
    DateList = []

    for i in range(0, int(nbDays)):
        #jour Julien
        J = utils.doy(startDate, i)
        dateEnCours = startDate + timedelta(days=i)
        DateList.append(dateEnCours)
        DoyList.append(J)
        Hmax[i] = np.where(Hmax[i] > 100, 100, Hmax[i])

        # --- Constants ---#
        #Solar constant
        Gsc = 0.0820  # [MJ.m-2.min-1]
        #Albedo - grass reference crop
        a = 0.23
        #Ratio of molecular weight of water vapor/dry air
        epsilon = 0.622
        #Latente heat of vaporisation
        Lv = 2.45  # [MJ.kg-1]
        # Specific heat at constant pressure
        Cp = 1.013e-3
        # [MJ.kg-1.°C-1]
        # Stefan-Boltzmann constant [MJ.K-4.m-2.day-1]
        StefBoltz = 4.903e-9
        #FAO

        # --- Equations ---#
        # Psychometric constant [kPa.°C-1]
        cte_psy = (Cp * pressureMean[i]) / (epsilon * Lv
                                            )  # Equation 8 Chap 3 FAO
        #Mean sturation vapor presure [kPa]
        #es = (utils.esat(pressureMean[i],Tmax[i]) + utils.esat(pressureMean[i],Tmin[i]))/2;    #Equation 12 Chap 3
        es = (utils.eocalc(Tmax[i] - 273.16) +
              utils.eocalc(Tmin[i] - 273.16)) / 2  #Equation 12 Chap 3
        # Slope of saturation vapour pressure curve at air temperature [kPa.°C-1]
        delta = utils.delta_calc(Tmean[i])
        # Equation 13 Chap 3
        # Actual vapour pressure derived from relative humidity [kPa]
        #ea = (utils.esat(pressureMean[i]/100,Tmax[i]-273.16)*(Hmax[i]/100) + utils.esat(pressureMean[i]/100,Tmin[i]-273.16)*(Hmin[i]/100))/2;      # Equation 17 Chap 3
        ea = (utils.eocalc(Tmax[i] - 273.16) *
              (Hmax[i] / 100) + utils.eocalc(Tmin[i] - 273.16) *
              (Hmin[i] / 100)) / 2
        # Conversion of latitude from degrees to radians
        phi = (np.pi / 180) * latlon[1]
        # Relative distance Earth-Sun
        dr = 1 + 0.033 * math.cos(2 * math.pi * J / 365)
        # Equation 23 Chap 3
        # Solar declination
        d = 0.4093 * math.sin(2 * math.pi * J / 365 - 1.39)
        # Equation 24 Chap 3
        # sunset hour angle
        ws = np.arccos(-np.tan(phi) * math.tan(d))
        # Equation 25 Chap 3
        """Classical calculation FAO """

        # Extraterestrial radiation for daily periods
        Ra = (24. * 60 / np.pi) * Gsc * dr * (
            ws * np.sin(phi) * np.sin(d) + np.cos(phi) * np.cos(d) * np.sin(ws)
        )  # Equation 21 Chap 3
        # Clear sky solar radiation [MJ.m-2.day-1]
        Rso = (0.75 + 2e-5 * Geo[i]) * Ra
        # Equation 37 Chap 3
        # Net solar radiation [MJ.m-2.day-1]
        Rns = (1 - a) * RayDownShort[i]
        # Equation 38 Chap 3
        #
        f = (1.35 * (np.fmin(RayDownShort[i] / Rso, 1)) - 0.35)
        # Net longwave radiation [MJ.m-2.day-1]
        Rnl = StefBoltz * (
            (Tmax[i]**4 + Tmin[i]**4) / 2) * (0.34 - 0.14 * np.sqrt(ea)) * f
        # Equation 39 Chap 3
        # Net Radiation [MJ.m-2.day-1]
        Rn = Rns - Rnl
        # Equation 40 Chap 3
        G = 0
        # Equation 42 Chap 3
        ET0_0[i] = (0.408 * delta * (Rn - G) + cte_psy * (900 /
                                                          (Tmean[i] + 273)) *
                    (es - ea) * vent[i]) / (delta + cte_psy *
                                            (1 + 0.34 * vent[i]))
        # Equation 6 Chap 4
        """ Considering product 176 = RN these equations are not needed """
        Rn = Ray[i]
        # Soil heat flux at daily scale
        G = 0
        # Equation 42 Chap 3
        ET0_1[i] = (0.408 * delta * (Rn - G) + cte_psy * (900 /
                                                          (Tmean[i] + 273)) *
                    (es - ea) * vent[i]) / (delta + cte_psy *
                                            (1 + 0.34 * vent[i]))
        # Equation 6 Chap 4
        """ Considering product 176 Evaporation """
        ET0_2[i] = Evap[i]

    if typeOutput == 'RasterFile':
        #On ecrit le fichier ET0
        geoTransform = utils.getGeoTransform(RayFile)
        shape = Ray[0].shape
        utils.writeTiffFromDicoArray(ET0_0, oFolder + "/tmp.tif", shape,
                                     geoTransform)
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif", oFolder + "/ET0.tif",
                               shape, pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/ET0.tif")

        #On écrit le fichier Precipitation
        geoTransform = utils.getGeoTransform(precipitationFile)
        shape = precipitation[0].shape
        utils.writeTiffFromDicoArray(precipitation, oFolder + "/tmp.tif",
                                     shape, geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/precipitationAcc.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif",
                           oFolder + "/precipitationAcc.tif")

    else:
        #On ecrit le fichier au format Txt
        proj = utils.getProj(pathToShapefile)
        utils.WriteTxtFileForEachPixel(oFolder, ET0_0, ET0_1, ET0_2, DateList,
                                       DoyList, Ray, RayDownShort, RayDownLong,
                                       Tmean, Tmax, Tmin, Hmean, Hmax, Hmin,
                                       vent, precipitation, pressureMean, Geo,
                                       latlon, proj)
        utils.WritePointList(oFolder, latlon, proj)
    """ ------------------------------------------- """
    if (temporaryFile):
        #On ecrit le fichier latlon
        geoTransform = utils.getGeoTransform(GeoFile)
        shape = Geo[0].shape
        utils.writeTiffFromDicoArray(Geo, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif", oFolder + "/altitude.tif",
                               shape, pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/altitude.tif")

        #On ecrit le fichier latlon
        geoTransform = utils.getGeoTransform(RayFile)
        shape = Ray[0].shape
        utils.writeTiffFromDicoArray(latlon, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif", oFolder + "/latLon.tif",
                               shape, pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/latLon.tif")

        #On ecrit le fichier vent --> a enlever
        geoTransform = utils.getGeoTransform(ventFile[-1])
        shape = vent[0].shape
        utils.writeTiffFromDicoArray(vent, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif", oFolder + "/ventMean.tif",
                               shape, pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/ventMean.tif")

        #On ecrit le fichier Rhmin
        geoTransform = utils.getGeoTransform(pressureFile)
        shape = pressureMean[0].shape
        utils.writeTiffFromDicoArray(pressureMean, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/pressureMean.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/pressureMean.tif")

        #On ecrit le fichier Rhmax
        geoTransform = utils.getGeoTransform(pressureFile)
        shape = Hmax[0].shape
        utils.writeTiffFromDicoArray(Hmax, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/humidityMax.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/humidityMax.tif")

        #On ecrit le fichier Rhmin
        geoTransform = utils.getGeoTransform(pressureFile)
        shape = Hmin[0].shape
        utils.writeTiffFromDicoArray(Hmin, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/humidityMin.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif", oFolder + "/humidityMin.tif")

        #On ecrit le fichier Tmax
        geoTransform = utils.getGeoTransform(T2mFile)
        shape = Tmax[0].shape
        utils.writeTiffFromDicoArray(Tmax, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/TemperatureMax.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif",
                           oFolder + "/TemperatureMax.tif")

        #On ecrit le fichier Tmin
        geoTransform = utils.getGeoTransform(T2mFile)
        shape = Tmin[0].shape
        utils.writeTiffFromDicoArray(Tmin, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/TemperatureMin.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif",
                           oFolder + "/TemperatureMin.tif")

        #On ecrit le fichier Rayonnement
        geoTransform = utils.getGeoTransform(RayFile)
        shape = Ray[0].shape
        utils.writeTiffFromDicoArray(Ray, oFolder + "/tmp.tif", shape,
                                     geoTransform)

        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder + "/tmp.tif",
                               oFolder + "/RayonnementMean.tif", shape,
                               pathToShapefile)
            os.remove(oFolder + "/tmp.tif")
        else:
            utils.moveFile(oFolder + "/tmp.tif",
                           oFolder + "/RayonnementMean.tif")

    #on supprime les fichier intermédiare !
    os.remove(pressureFile)
    os.remove(T2mFile)
    os.remove(DewPFile)
    os.remove(RayFile)
    os.remove(GeoFile)
    for i in ventFile:
        os.remove(i)
    os.remove(precipitationFile)
    os.remove(EvapFile)
    os.remove(RayFileDownLong)
    os.remove(RayFileDownShort)
Exemple #15
0
def download_needed_era_data(Grid, start_date, end_date, file_name):
    """
    This function will download the ERA interim data in the region
    specified by the input Py-ART Grid within the interval specified by
    start_date and end_date. This is useful for the batch processing of
    files since the ECMWF API is limited to 20 queued requests at a time.
    This is also useful if you want to store ERA interim data for future
    use without having to download it again.

    You need to have the ECMWF API and an ECMWF account set up in order to
    use this feature. Go to this website for instructions on installing the
    API and setting up your account:

    https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets

    Parameters
    ----------
    Grid: Py-ART Grid
        The input Py-ART Grid to modify.
    start_date: datetime
        The start date of the file to download.
    end_date: datetime
        The end date of the file to download.
    file_name:
        The name of the destination file.
    """

    if ECMWF_AVAILABLE is False and file_name is None:
        raise (ModuleNotFoundError,
               ("The ECMWF API is not installed. Go to" +
                "https://confluence.ecmwf.int/display/WEBAPI" +
                "/Access+ECMWF+Public+Datasets" +
                " in order to use the auto download feature."))

    print("Download ERA Interim data...")
    # ERA interim data is in pressure coordinates
    # Retrieve u, v, w, and geopotential
    # Geopotential is needed to convert into height coordinates

    retrieve_dict = {}
    retrieve_dict['stream'] = "oper"
    retrieve_dict['levtype'] = "pl"
    retrieve_dict['param'] = "131.128/132.128/135.128/129.128"
    retrieve_dict['dataset'] = "interim"
    retrieve_dict['levelist'] = ("1/2/3/5/7/10/20/30/50/70/100/125/150/" +
                                 "175/200/225/250/300/350/400/450/500/" +
                                 "550/600/650/700/750/775/800/825/850/" +
                                 "875/900/925/950/975/1000")
    retrieve_dict['step'] = "0"
    retrieve_dict['time'] = "00/06/12/18"
    retrieve_dict['date'] = (start_date.strftime("%Y-%m-%d") + '/to/' +
                             end_date.strftime("%Y-%m-%d"))
    retrieve_dict['class'] = "ei"
    retrieve_dict['grid'] = "0.75/0.75"
    N = "%4.1f" % Grid.point_latitude["data"].max()
    S = "%4.1f" % Grid.point_latitude["data"].min()
    E = "%4.1f" % Grid.point_longitude["data"].max()
    W = "%4.1f" % Grid.point_longitude["data"].min()

    retrieve_dict['area'] = N + "/" + W + "/" + S + "/" + E
    retrieve_dict['format'] = "netcdf"
    retrieve_dict['target'] = file_name
    server = ECMWFDataServer()
    server.retrieve(retrieve_dict)
    def get_data(self,
                 year,
                 month,
                 day,
                 hour,
                 key,
                 force=False,
                 reduced_set=False):
        ''' Retrieves wildfire data for a given date + time.
        
        THIS METHOD RETRIEVES DATA SYNCHRONOUSLY AND WILL NOT RETURN UNTIL DATA IS DOWNLOADED.
        IT SHOULD BE CALLED IN ITS OWN THREAD FOR ALMOST ALL PRACTICAL USES
        
        Sorry for shouting, but it's important to note.  Generally, you will want to use the
        bulk_download method to get data.
        
        :param year: The desired year of the data to retrieve
        :param month: The desired month of the data to retrieve
        :param day: The desired day of the data to retrieve
        :param hour: The desired hour of the data to retrieve (0,6,12,18)
        :param force: Whether to download the data even if it already exists on disk
        :param reduced_set: Whether a reduced set of variables is being retrieved
        :type year: int
        :type month: int
        :type day: int
        :type hour: int
        :type force: boolean
        :type reduced_set: boolean
        
        :returns: Nothing.  Returns when data download is finished.
        '''
        # Only download if file doesn't exist and we haven't forced a redownload
        if not self.need_to_download(year, month, day, hour,
                                     reduced_set) and not force:
            print 'Already downloaded', self._get_filename(
                year, month, day, hour, reduced_set), 'not redownloading'
            return

        server = ECMWFDataServer('https://api.ecmwf.int/v1', key[0], key[1])

        # Get the full set of variables?
        if reduced_set:
            variable_ids = WildfireTiggeDataRetriever.minimal_vars
        else:
            variable_ids = WildfireTiggeDataRetriever.all_vars

        # The dictionary for the ECMWF request
        request_params = {
            "class": "ti",
            "type": "cf",
            "dataset": "tigge",
            "expver": "prod",
            "grid": "0.5/0.5",
            "levtype": "sfc",
            "origin": "kwbc",
            "format": "netcdf",
            "step":
            "0/6/12/18/24/30/36/42/48/54/60/66/72/78/84/90/96/102/108/114/120/126/132/138/144/150/156/162/168/174/180/186/192/198/204/210/216/222/228/234/240",
            "date": __format_date__(year, month, day),
            "param": variable_ids,
            "time": __format_time__(hour),
            "target": self._get_filename(year, month, day, hour, reduced_set),
            "area": "14/-82/-57/-31"
        }
        server.retrieve(request_params)
Exemple #17
0
def make_constraint_from_era_interim(Grid, file_name=None, vel_field=None):
    """
    This function will read ERA Interim in NetCDF format and add it 
    to the Py-ART grid specified by Grid. PyDDA will automatically download
    the ERA Interim data that you need for the scan. It will chose the domain
    that is enclosed by the analysis grid and the time period that is closest
    to the scan. It will then do a Nearest Neighbor interpolation of the 
    ERA-Interim u and v winds to the analysis grid. 

    You need to have the ECMWF API and an ECMWF account set up in order to
    use this feature. Go to this website for instructions on installing the
    API and setting up your account:

    https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets

    Parameters
    ----------
    Grid: Py-ART Grid
        The input Py-ART Grid to modify.
    file_name: str or None
        The netCDF file containing the ERA Interim data. Setting to None will
        invoke the API in order to attempt to download the data. If the web
        API is experiencing delays, it is better to use it to download the
        file and then refer to it here. If this file does not exist
        PyDDA will use the API to create the file.
    dest_era_file: str or None
        If this is not None, then the ERA file that is saved using the
        automatic download feature will be saved
        to this file for future reading. This is useful in case the 
        web API is experiencing delays. This is not used if file_name
        is specified.
    vel_field: str or None
        The name of the velocity field in the Py-ART grid. Set to None to
        have Py-DDA attempt to automatically detect it.

    Returns
    -------
    new_Grid: Py-ART Grid
        The Py-ART Grid with the ERA Interim data added into the "u_erainterim",
        "v_erainterim", and "w_erainterim" fields.

    """
    if vel_field is None:
        vel_field = pyart.config.get_field_name('corrected_velocity')

    if ECMWF_AVAILABLE is False and file_name is None:
        raise (ModuleNotFoundError,
               ("The ECMWF API is not installed. Go to" +
                "https://confluence.ecmwf.int/display/WEBAPI" +
                "/Access+ECMWF+Public+Datasets" +
                " in order to use the auto download feature."))

    grid_time = datetime.strptime(Grid.time["units"],
                                  "seconds since %Y-%m-%dT%H:%M:%SZ")
    hour_rounded_to_nearest_6 = int(6 * round(float(grid_time.hour) / 6))

    if hour_rounded_to_nearest_6 == 24:
        grid_time = grid_time + timedelta(days=1)
        grid_time = datetime(grid_time.year, grid_time.month, grid_time.day, 0,
                             grid_time.minute, grid_time.second)
    else:
        grid_time = datetime(grid_time.year, grid_time.month, grid_time.day,
                             hour_rounded_to_nearest_6, grid_time.minute,
                             grid_time.second)

    if file_name is not None:
        if not os.path.isfile(file_name):
            raise FileNotFoundError(file_name + " not found!")

    if file_name is None:
        print("Download ERA Interim data...")
        # ERA interim data is in pressure coordinates
        # Retrieve u, v, w, and geopotential
        # Geopotential is needed to convert into height coordinates

        retrieve_dict = {}
        retrieve_dict['stream'] = "oper"
        retrieve_dict['levtype'] = "pl"
        retrieve_dict['param'] = "131.128/132.128/135.128/129.128"
        retrieve_dict['dataset'] = "interim"
        retrieve_dict['levelist'] = ("1/2/3/5/7/10/20/30/50/70/100/125/150/" +
                                     "175/200/225/250/300/350/400/450/500/" +
                                     "550/600/650/700/750/775/800/825/850/" +
                                     "875/900/925/950/975/1000")
        retrieve_dict['step'] = "0"
        retrieve_dict['time'] = "%02d" % hour_rounded_to_nearest_6
        retrieve_dict['date'] = grid_time.strftime("%Y-%m-%d")
        retrieve_dict['class'] = "ei"
        retrieve_dict['grid'] = "0.75/0.75"
        N = "%4.1f" % Grid.point_latitude["data"].max()
        S = "%4.1f" % Grid.point_latitude["data"].min()
        E = "%4.1f" % Grid.point_longitude["data"].max()
        W = "%4.1f" % Grid.point_longitude["data"].min()

        retrieve_dict['area'] = N + "/" + W + "/" + S + "/" + E
        retrieve_dict['format'] = "netcdf"
        tfile = tempfile.NamedTemporaryFile()
        retrieve_dict['target'] = tfile.name
        file_name = tfile.name
        server = ECMWFDataServer()
        server.retrieve(retrieve_dict)
        time_step = 0

    ERA_grid = Dataset(file_name, mode='r')
    base_time = datetime.strptime(ERA_grid.variables["time"].units,
                                  "hours since %Y-%m-%d %H:%M:%S.%f")
    time_seconds = ERA_grid.variables["time"][:]
    our_time = np.array(
        [base_time + timedelta(seconds=int(x)) for x in time_seconds])
    time_step = np.argmin(np.abs(base_time - grid_time))

    analysis_grid_shape = Grid.fields[vel_field]['data'].shape

    height_ERA = ERA_grid.variables["z"][:]
    u_ERA = ERA_grid.variables["u"][:]
    v_ERA = ERA_grid.variables["v"][:]
    w_ERA = ERA_grid.variables["w"][:]
    lon_ERA = ERA_grid.variables["longitude"][:]
    lat_ERA = ERA_grid.variables["latitude"][:]
    radar_grid_lat = Grid.point_latitude['data']
    radar_grid_lon = Grid.point_longitude['data']
    radar_grid_alt = Grid.point_z['data']
    u_flattened = u_ERA[time_step].flatten()
    v_flattened = v_ERA[time_step].flatten()
    w_flattened = w_ERA[time_step].flatten()

    the_shape = u_ERA.shape
    lon_mgrid, lat_mgrid = np.meshgrid(lon_ERA, lat_ERA)

    lon_mgrid = np.tile(lon_mgrid, (the_shape[1], 1, 1))
    lat_mgrid = np.tile(lat_mgrid, (the_shape[1], 1, 1))
    lon_flattened = lon_mgrid.flatten()
    lat_flattened = lat_mgrid.flatten()
    height_flattened = height_ERA[time_step].flatten()
    height_flattened -= Grid.radar_altitude["data"]

    u_interp = NearestNDInterpolator(
        (height_flattened, lat_flattened, lon_flattened),
        u_flattened,
        rescale=True)
    v_interp = NearestNDInterpolator(
        (height_flattened, lat_flattened, lon_flattened),
        v_flattened,
        rescale=True)
    w_interp = NearestNDInterpolator(
        (height_flattened, lat_flattened, lon_flattened),
        w_flattened,
        rescale=True)
    u_new = u_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon)
    v_new = v_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon)
    w_new = w_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon)

    new_grid = deepcopy(Grid)

    u_dict = {'data': u_new, 'long_name': "U from ERA-Interim", 'units': "m/s"}
    v_dict = {'data': v_new, 'long_name': "V from ERA-Interim", 'units': "m/s"}
    w_dict = {'data': w_new, 'long_name': "W from ERA-Interim", 'units': "m/s"}

    new_grid.add_field("U_erainterim", u_dict, replace_existing=True)
    new_grid.add_field("V_erainterim", v_dict, replace_existing=True)
    new_grid.add_field("W_erainterim", w_dict, replace_existing=True)

    # Free up memory

    ERA_grid.close()

    if 'tfile' in locals():
        tfile.close()

    return new_grid
                'Reading ECMWF data requires netCDF4 to be installed.')


try:
    from ecmwfapi import ECMWFDataServer
except ImportError:

    def ECMWFDataServer(*a, **kw):
        raise ImportError(
            'To download data from ECMWF requires the API client.\nSee https:/'
            '/confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets'
        )

    SERVER = None
else:
    SERVER = ECMWFDataServer()

#: map of ECMWF MACC parameter keynames and codes used in API
PARAMS = {
    "tcwv": "137.128",
    "aod550": "207.210",
    'aod469': '213.210',
    'aod670': '214.210',
    'aod865': '215.210',
    "aod1240": "216.210",
}


def _ecmwf(server, startdate, stopdate, params, targetname):
    # see http://apps.ecmwf.int/datasets/data/macc-reanalysis/levtype=sfc/
    server.retrieve({
Exemple #19
0
def download_era5_for_gssha(main_directory,
                            start_datetime,
                            end_datetime,
                            leftlon=-180,
                            rightlon=180,
                            toplat=90,
                            bottomlat=-90,
                            precip_only=False):
    """
    Function to download ERA5 data for GSSHA

    .. note:: https://software.ecmwf.int/wiki/display/WEBAPI/Access+ECMWF+Public+Datasets

    Args:
        main_directory(:obj:`str`): Location of the output for the forecast data.
        start_datetime(:obj:`str`): Datetime for download start.
        end_datetime(:obj:`str`): Datetime for download end.
        leftlon(Optional[:obj:`float`]): Left bound for longitude. Default is -180.
        rightlon(Optional[:obj:`float`]): Right bound for longitude. Default is 180.
        toplat(Optional[:obj:`float`]): Top bound for latitude. Default is 90.
        bottomlat(Optional[:obj:`float`]): Bottom bound for latitude. Default is -90.
        precip_only(Optional[bool]): If True, will only download precipitation.

    Example::

        from gsshapy.grid.era_to_gssha import download_era5_for_gssha

        era5_folder = '/era5'
        leftlon = -95
        rightlon = -75
        toplat = 35
        bottomlat = 30
        download_era5_for_gssha(era5_folder, leftlon, rightlon, toplat, bottomlat)

    """
    # parameters: https://software.ecmwf.int/wiki/display/CKB/ERA5_test+data+documentation#ERA5_testdatadocumentation-Parameterlistings

    # import here to make sure it is not required to run
    from ecmwfapi import ECMWFDataServer
    server = ECMWFDataServer()

    try:
        mkdir(main_directory)
    except OSError:
        pass

    download_area = "{toplat}/{leftlon}/{bottomlat}/{rightlon}".format(
        toplat=toplat, leftlon=leftlon, bottomlat=bottomlat, rightlon=rightlon)
    download_datetime = start_datetime
    while download_datetime <= end_datetime:
        download_file = path.join(
            main_directory,
            "era5_gssha_{0}.nc".format(download_datetime.strftime("%Y%m%d")))
        download_date = download_datetime.strftime("%Y-%m-%d")
        if not path.exists(download_file) and not precip_only:
            server.retrieve({
                'dataset': "era5_test",
                #  'oper' specifies the high resolution daily data, as opposed to monthly means, wave, eda edmm, etc.
                'stream': "oper",
                #  We want instantaneous parameters, which are archived as type Analysis ('an') as opposed to forecast (fc)
                'type': "an",
                #  Surface level, as opposed to pressure level (pl) or model level (ml)
                'levtype': "sfc",
                # For parameter codes see the ECMWF parameter database at http://apps.ecmwf.int/codes/grib/param-db
                'param': "2t/2d/sp/10u/10v/tcc",
                # The spatial resolution in ERA5 is 31 km globally on a Gaussian grid.
                # Here we us lat/long with 0.25 degrees, which is approximately the equivalent of 31km.
                'grid': "0.25/0.25",
                # ERA5 provides hourly analysis
                'time': "00/to/23/by/1",
                # area:  N/W/S/E
                'area': download_area,
                'date': download_date,
                'target': download_file,
                'format': 'netcdf',
            })

        era5_request = {
            'dataset': "era5_test",
            'stream': "oper",
            'type': "fc",
            'levtype': "sfc",
            'param': "tp/ssrd",
            'grid': "0.25/0.25",
            'area': download_area,
            'format': 'netcdf',
        }
        prec_download_file = path.join(
            main_directory, "era5_gssha_{0}_fc.nc".format(
                download_datetime.strftime("%Y%m%d")))
        loc_download_file0 = path.join(
            main_directory, "era5_gssha_{0}_0_fc.nc".format(
                download_datetime.strftime("%Y%m%d")))
        loc_download_file1 = path.join(
            main_directory, "era5_gssha_{0}_1_fc.nc".format(
                download_datetime.strftime("%Y%m%d")))
        loc_download_file2 = path.join(
            main_directory, "era5_gssha_{0}_2_fc.nc".format(
                download_datetime.strftime("%Y%m%d")))
        if download_datetime <= start_datetime and not path.exists(
                loc_download_file0):
            loc_download_date = (download_datetime -
                                 timedelta(1)).strftime("%Y-%m-%d")
            # precipitation 0000-0600
            era5_request['step'] = "6/to/12/by/1"
            era5_request['time'] = "18"
            era5_request['target'] = loc_download_file0
            era5_request['date'] = loc_download_date
            server.retrieve(era5_request)

        if download_datetime == end_datetime and not path.exists(
                loc_download_file1):
            loc_download_date = download_datetime.strftime("%Y-%m-%d")
            # precipitation 0600-1800
            era5_request['step'] = "1/to/12/by/1"
            era5_request['time'] = "06"
            era5_request['target'] = loc_download_file1
            era5_request['date'] = loc_download_date
            server.retrieve(era5_request)
        if download_datetime == end_datetime and not path.exists(
                loc_download_file2):
            loc_download_date = download_datetime.strftime("%Y-%m-%d")
            # precipitation 1800-2300
            era5_request['step'] = "1/to/5/by/1"
            era5_request['time'] = "18"
            era5_request['target'] = loc_download_file2
            era5_request['date'] = loc_download_date
            server.retrieve(era5_request)
        if download_datetime < end_datetime and not path.exists(
                prec_download_file):
            # precipitation 0600-0600 (next day)
            era5_request['step'] = "1/to/12/by/1"
            era5_request['time'] = "06/18"
            era5_request['target'] = prec_download_file
            era5_request['date'] = download_date
            server.retrieve(era5_request)

        download_datetime += timedelta(1)
Exemple #20
0
def retrieve_ERA_i_field(args):
    # !/usr/bin/python
    from ecmwfapi import ECMWFDataServer
    import numpy as np
    from datetime import datetime, timedelta
    import os
    server = ECMWFDataServer()

    base_path = "/Users/semvijverberg/surfdrive/Output_ERA/"
    start = datetime(args.startyear, args.startmonth, 1)
    end = datetime(args.endyear, args.endmonth, 1)
    datelist = [start.strftime('%Y-%m-%d')]
    while start <= end:
        if start.month < end.month:
            start += timedelta(days=31)
            datelist.append(
                datetime(start.year, start.month, 1).strftime('%Y-%m-%d'))
        else:
            start = datetime(start.year + 1, args.startmonth - 1, 1)
            datelist.append(
                datetime(start.year, start.month, 1).strftime('%Y-%m-%d'))
    datestring = "/".join(datelist)
    # monthly means of individual analysis steps, i.e. 00:00, 06:00, 12:00 etc,
    # download synoptic monthly means by setting stream to "mnth"
    # normal monthly mean, download monthly mean of daily means by setting stream to "moda"
    if args.stream == "mnth":
        time = "00:00:00/06:00:00/12:00:00/18:00:00"
    else:
        time = "00:00:00"

    server.retrieve({
        "dataset":
        "interim",
        "class":
        "ei",
        "date":
        datestring,
        "grid":
        args.grid,
        "levelist":
        "285",
        "levtype":
        "pt",  # potential temperature (Isentrope)
        "param":
        args.var_cf_code,  # Potential vorticity; Pressure; Relative Vorticity
        "stream":
        args.stream,
        "time":
        time,
        "type":
        "an",
        "format":
        "netcdf",
        "target":
        os.path.join(
            base_path, "{}_{}-{}.nc".format(args.var_cf_code, args.startyear,
                                            args.endyear)),
    })
    filename = os.path.join(
        base_path,
        "PV-pressure-RelVort_{}-{}.nc".format(args.var_cf_code, args.startyear,
                                              args.endyear))
    print 'this fu'
    return filename, " You have downloaded variable {} \n stream is set to {} \n all dates: {} \n".format(
        args.var_cf_code, args.stream, datelist)
Exemple #21
0
def retrieve_era_interim(param,
                         level,
                         year,
                         step='0',
                         area='Glb',
                         nc=False,
                         overwrite=False):
    """
    Function to retrieve ERA-Interim data from MARS via the ECMWF API.

    Args:
        param: string containing short name (e.g. '2t'; see param_dict)
        level: string containing level information (e.g. 'sfc', 'pl850')
        year: string containing year (e.g. '1979')
        step: string containing information about step (default '0')
        area: 'Glb' (global; default) or string containing bounds (N/W/S/E, e.g. '30/90/-10/120')
        nc: if True, request NetCDF format; if False (default), the files will be GRIB format
        overwrite: if True, replace existing files; if False (default), skip existing files

    Output file:
        GRIB file:
            data/ei_<area>/<param>_<code>_<level>/ei_<param>_<code>_<level>_<step>_<year>.grb
        or NetCDF file:
            data/ei_<area>/<param>_<code>_<level>_nc/ei_<param>_<code>_<level>_<step>_<year>_nc.nc
        where <code> is the parameter code (see param_dict) and <area> has '/' replaced by 'n'/'e'.

    Returns:
        0 if output file already exists
        Output filename if file is created
        -1 otherwise
    """
    print('param={}, level={}, year={}, step={}, area={}'.format(
        param, level, year, step, area))
    code = param_dict[param]
    area_ne = '{}n{}e{}n{}e'.format(*area.split('/'))
    if nc is True:
        out_dir = 'data/ei_{}/{}_{}_{}_nc'.format(area_ne, param, code, level)
        out_filename = 'ei_{}_{}_{}_{}_{}_nc.nc'.format(
            param, code, level, step, year)
    else:
        out_dir = 'data/ei_{}/{}_{}_{}'.format(area_ne, param, code, level)
        out_filename = 'ei_{}_{}_{}_{}_{}.grb'.format(param, code, level, step,
                                                      year)
    # Check if output file already exists
    if os.path.exists('{}/{}'.format(out_dir, out_filename)):
        if overwrite is False:
            print('{} already exists. Skipping.'.format(out_filename))
            return 0
    # Retrieval dictionary
    retrieval_dict = {
        'class': 'ei',
        'dataset': 'interim',
        'date': '{}-01-01/to/{}-12-31'.format(year, year),
        'expver': '1',
        'grid': '0.75/0.75',
        'param': '{}.128'.format(code),
        'step': step,
        'stream': 'oper',
        'target':
        'data/temp_{}'.format(out_filename),  # temporary file location
    }
    if step == '0':
        retrieval_dict['type'] = 'an'
        retrieval_dict['time'] = '00:00:00/06:00:00/12:00:00/18:00:00'
    else:
        retrieval_dict['type'] = 'fc'
        retrieval_dict['time'] = '00:00:00/12:00:00'
    if level == 'sfc':
        retrieval_dict['levtype'] = 'sfc'
    elif level[0:2] == 'pl':
        retrieval_dict['levtype'] = 'pl'
        retrieval_dict['levelist'] = level[2:]
    if area != 'Glb':
        retrieval_dict['area'] = area
    if nc is True:
        retrieval_dict['format'] = 'netcdf'
    # Retrieve data to temporary location
    server = ECMWFDataServer()
    server.retrieve(retrieval_dict)
    # If temporary file exists, move it to output file location (and create directory if necessary)
    if os.path.exists('data/temp_{}'.format(out_filename)):
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
            print('Created {}/'.format(out_dir))
        shutil.move('data/temp_{}'.format(out_filename),
                    '{}/{}'.format(out_dir, out_filename))
        print('Written {}/{}'.format(out_dir, out_filename))
        result = out_dir + out_filename
    else:
        result = -1
    return result
Exemple #22
0
else:
    f3d = options.dir + options.fname + '.3d'
    f2d = options.dir + options.fname + '.2d'
    ftppt = options.dir + options.fname + '.2df'

if levtype == 'ml':
    f3d += '.ml'

file3d = options.dir + f3d
file2d = options.dir + f2d
filetppt = options.dir + ftppt

#datestr = str(year) + monthstr + firstdaystr + '/to/' + str(year) + monthstr + lastdaystr
print("Retrieve for: ", datestr)

server = ECMWFDataServer(verbose=False)

##wtype = "4v"   ##4D variational analysis is available as well as analysis.

wtype = "an"

if stream == 'oper':
    ##need to break each day into four time periods to keep 3d grib files at around 1.6 GB
    wtime1 = "00:00:00/01:00:00/02:00:00/03:00:00/04:00:00/05:00:00"
    wtime2 = "06:00:00/07:00:00/08:00:00/09:00:00/10:00:00/11:00:00"
    wtime3 = "12:00:00/13:00:00/14:00:00/15:00:00/16:00:00/17:00:00"
    wtime4 = "18:00:00/19:00:00/20:00:00/21:00:00/22:00:00/23:00:00"
    if options.getfullday:
        wtimelist = [wtime1 + '/' + wtime2 + '/' + wtime3 + '/' + wtime4]
    else:  #retrieve day in 4 different files with 6 hour increments.
        wtimelist = [wtime1, wtime2, wtime3, wtime4]
Exemple #23
0
 def run(self):
     server = ECMWFDataServer()
     settings = retrieve_settings(self.start_date, self.end_date, D2M)
     with self.output().temporary_path() as out_fname:
         settings["target"] = out_fname
         server.retrieve(settings)
Exemple #24
0
def downloadonefile(riqi):
    ts = time()
    print(riqi)
    return
    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/u10/u10."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "165.128", #u10
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/v10/v10."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "166.128", #v10
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/2t/2t."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levelist": "1000/975/950/925/900/875/850/825/800/775/750/700/650/600/550/500/450/400/350/300/250/225/200/175/150/125/100/70",
        "levtype": "sfc",
        "param": "167.128", #2m气温
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/sst/sst."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "34.128", #sst
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })   

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/sp/sp."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levelist": "1000/975/950/925/900/875/850/825/800/775/750/700/650/600/550/500/450/400/350/300/250/225/200/175/150/125/100/70",
        "levtype": "sfc",
        "param": "134.128", #Surface pressure
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/mslp/mslp."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "151.128", #mslp
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/tcw/tcw."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "136.128", #tcw
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/tcwv/tcwv."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "137.128", #tcwv
        "step": "0",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "target": filename,
      })

    filename="/mnt/HD/HD_a2/Public/ERA-Interim/SURF/cape/cape."+riqi+".grb"
    if(os.path.isfile(filename)): #如果存在文件名则返回
      print("ok",filename)
    else:
      server = ECMWFDataServer()
      server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": riqi,
        "expver": "1",
        "grid": "0.75/0.75",
        "levtype": "sfc",
        "param": "59.128",  #cape
        "step": "3/6/9/12",
        "stream": "oper",
        "time": "00:00:00/12:00:00",
        "type": "fc",
        "target": filename,
      })