def getDataECMWF(self, filename):

        # create a new ECMWFServer object
	    server = ECMWFDataServer()

	    #build a dictionary that provides the MARS like information to the ECMWF servers
	    # start with fixed fields that are the same for all queries
	    self.MARSCode = {
		    'stream'  : "oper",
	        'dataset' : "interim",
	        'param'   : str(self.param),
	        'date'    : self.start+"/to/"+self.end,
	        'area'    : str(self.latitude_max)+"/"+str(self.longitude_min)+"/"+str(self.latitude_min)+"/"+str(self.longitude_max),
	        'time'    : "00/12",
	        'grid'    : str(self.resolution)+"/"+str(self.resolution),
	        'target'  : filename,
	        'levtype' : str(self.levtype),
	        'type'    : str(self.type),
            'format'  : "netcdf"
	    }

        # if the filed is a forecast, then we want the 12-hour cumulated values.
	    if self.type is "fc":
		    self.MARSCode['step'] = "12"

        # if the field is analysis then we want the instantaneous values
	    elif self.type is "an":
		    self.MARSCode['step'] = '0'
            # if the analysed field is also on pressure levels, then define the level that we want.
		    if self.levtype is "pl":
			    self.MARSCode['levelist'] = str(self.level)

	    ##retrieve the data
	    server.retrieve(self.MARSCode)
Exemple #2
0
def retrieve(request):
    """ Execute Mars request

    Parameters
    ----------
    request

    Returns
    -------
    filename
    """
    import os
    from ecmwfapi import ECMWFDataServer
    server = ECMWFDataServer()

    if os.path.isdir(os.path.dirname(request['target'])):
        os.makedirs(os.path.isdir(os.path.dirname(request['target'])))

    try:
        server.retrieve(request)
        print "Request was successful."
        print request['target']
        return True

    except Exception as e:
        print repr(e)
        return False
def fetch_era_data(variable_dict):
    """ Download data from ECMWF

        Parameters:
        -----------
        param_dic : dict
          A dictionary defining the download, eg:   
           {"class": "ei",
            "dataset": "interim",
            "date": '19790101/19790201/19790301',
            "grid": "0.75/0.75",
            "levtype": "sfc",
            "param": '165.128',
            "stream": "moda",
            "target": 'ERA-Int_slp.mon.mean.nc',
            "type": "an",
            "format": "netcdf"
            }
            
            See ('https://software.ecmwf.int/wiki/display/WEBAPI/' +
                'Accessing+ECMWF+data+servers+in+batch')

        Returns:
        --------
      
          Downloads files to PWD.
    """
    server = ECMWFDataServer()
    server.retrieve(variable_dict)
def get(parameter, year):

    if parameter.isdigit():
        try:
            paramDict = {row[3]:row[1] for row in csv.reader(open('parameter_info.csv','rU'))}
            paramName = paramDict[parameter]
            paramID = parameter
        except IOError:
            print "Parameter ID not valid"
    else:
        try:
            paramDict = {row[1]:row[3] for row in csv.reader(open('parameter_info.csv','rU'))}
            paramID = paramDict[parameter]
            paramName = parameter
        except IOError:
            print "Parameter name not valid"

    param = paramID + ".128"

    outputDir = os.path.expandvars('$HOME/norgay/data/sources/ERAI/3HOURLY/'+paramName+'/')
    try:
        os.makedirs(outputDir)
    except OSError:
        if not os.path.isdir(outputDir):
            raise

    server = ECMWFDataServer()

    if year!='1979':
        startDate = str(int(year)-1) + '-12-31'
    else:
        startDate = year + '-01-01'

    if year=='2015':
        endDate = year + '-06-30'
    else:
        endDate = year + '-12-31'

    targetFile = "ERAI_{}_{}to{}.nc".format(paramName, startDate, endDate)

    server.retrieve({
        'stream'    : "oper",
        'levtype'   : "sfc",
        'param'     : param,
        'dataset'   : "interim",
        'step'      : "03/06/09/12",
        'grid'      : "0.25/0.25",
        'time'      : "00/12",
        'date'      : "{}/to/{}".format(startDate, endDate),
        'type'      : "fc",
        'class'     : "ei",
        'format'    : "netcdf",
        'target'    : outputDir+targetFile
    })
Exemple #5
0
def API(output_folder, DownloadType, string1, string2, string3, string4, string5, string6, string7, string8, string9, string10):

    import os
    from ecmwfapi import ECMWFDataServer
    os.chdir(output_folder)	
    server = ECMWFDataServer()

    if DownloadType == 1 or DownloadType == 2:
        server.retrieve({
            'stream'    : "%s" %string1,
            'levtype'   : "%s" %string2,
            'param'     : "%s" %string3,
            'dataset'   : "interim",
            'step'      : "%s" %string4,
            'grid'      : "%s" %string5,
            'time'      : "%s" %string6,
            'date'      : "%s" %string7,
            'type'      : "%s" %string8,     # http://apps.ecmwf.int/codes/grib/format/mars/type/
            'class'     : "%s" %string9,     # http://apps.ecmwf.int/codes/grib/format/mars/class/
            'area'      : "%s" %string10,   							
            'format'    : "netcdf",
            'target'    : "data_interim.nc"
            })

    if DownloadType == 3:
        server.retrieve({
            'levelist'   : "1000",
            'stream'    : "%s" %string1,
            'levtype'   : "%s" %string2,
            'param'     : "%s" %string3,
            'dataset'   : "interim",
            'step'      : "%s" %string4,
            'grid'      : "%s" %string5,
            'time'      : "%s" %string6,
            'date'      : "%s" %string7,
            'type'      : "%s" %string8,     # http://apps.ecmwf.int/codes/grib/format/mars/type/
            'class'     : "%s" %string9,     # http://apps.ecmwf.int/codes/grib/format/mars/class/
            'area'      : "%s" %string10,   							
            'format'    : "netcdf",
            'target'    : "data_interim.nc"
            })
	
	
    return()
	
	
	
	
	
Exemple #6
0
def download(year,month):
    server = ECMWFDataServer()
    file = "/prj/AMMA2050/ERA5/surface/ERA5_"+str(year)+"_"+str(month).zfill(2)+"_srfc.nc"
    server.retrieve({
        "class": "ea",
        "dataset": "era5",
        "date": str(year)+"-"+str(month).zfill(2)+"-01/to/"+str(year)+"-"+str(month).zfill(2)+"-31",
        "expver": "1",
        "grid": "0.25/0.25",
        "levtype": "sfc",
        "param":"59.128/60.162/61.162/62.162/63.162/71.162/72.162/78.128/79.128/79.162/80.162/81.162/82.162/83.162/84.162/86.162/89.228/129.128/134.128/137.128/151.128/159.128/164.128/165.128/166.128/167.128/168.128/172.128/186.128/187.128/188.128/231.128/232.128/235.128/246.228/247.228" ,
        "step": "0",
        "stream": "oper",
        "time": "06:00:00/09:00:00/12:00:00/15:00:00/18:00:00/21:00:00/00:00:00/03:00:00",
        "type": "an",
        "area": "25/-18.5/3.5/17",
        "format": "netcdf",
        "target": file
    })
Exemple #7
0
def download(year, month):
    server = ECMWFDataServer()
    file = "/prj/AMMA2050/ERA5/pressure_levels/ERA5_"+str(year)+"_"+str(month).zfill(2)+"_pl.nc"
    server.retrieve({
        "class": "ea",
        "dataset": "era5",
        "date": str(year)+"-"+str(month).zfill(2)+"-01/to/"+str(year)+"-"+str(month).zfill(2)+"-31",
        "expver": "1",
        "grid": "0.25/0.25",
        "levtype": "pl",
        "levelist": "200/250/300/350/400/450/500/550/600/650/700/750/825/850/875/900/925/950/975",
        "param": "60.128/130.128/131.128/132.128/133.128/135.128/155.128/157.128",
        "step": "0",
        "stream": "oper",
        "time": "06:00:00/09:00:00/12:00:00/15:00:00/18:00:00/21:00:00/00:00:00/03:00:00",
        "type": "an",
        "area": "25/-18.5/3.5/17",
        "format": "netcdf",
        "target": file
    })
Exemple #8
0
def loop(y):
  file = "/localscratch/wllf030/cornkle/ERA-I/daily_"+ str(y) + "_pl_12UTC.nc"
  server = ECMWFDataServer()
  server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": str(y)+"-01-01/to/" + str(y) + "-12-31",
    "expver": "1",
    "grid": "0.75/0.75",
    "levtype": "pl",
    "levelist": "250/350/450/550/600/650/700/750/800/850/900/925/950",
    "param": "60.128/130.128/131.128/132.128/133.128/135.128/155.128/157.128",
    "step": "0",
    "stream": "oper",
    "time": "12:00:00",
    "type": "an",
    "area": "22/-18/3/15",
    "format": "netcdf",
    "target": file
  })
def retrieve_netcdf(Variables_available,monthlystr,namestr,location):
    """Retrieve NetCDF file"""
    from chosen_variables import chosen_variables
    from location import chosen_area
    from ecmwfapi import ECMWFDataServer
    server = ECMWFDataServer()
    server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": monthlystr,
        "expver": "1",
        "grid": "0.125/0.125",
        "area": chosen_area(location),
        "levtype": "sfc",
        "param": chosen_variables(Variables_available),
        "step": "0",
        "stream": "oper",
        "target": namestr,
        "time": "00/06/12/18",
        "type": "an",
        'format'    : "netcdf"
        })
    server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": monthlystr,
        "expver": "1",
        "grid": "0.125/0.125",
        "area": chosen_area(location),
        "levtype": "sfc",
        "param": chosen_variables(Variables_available),
        "step": "3/6/9/12",
        "stream": "oper",
        "target": namestr,
        "time": "00/12",
        "type": "fc",
        'format'    : "netcdf"
        })
Exemple #10
0
def download_ECMWF(date,days=2):
    date2 = date + datetime.timedelta(days=days-1)
    datestr = '{0:04d}-{1:02d}-{2:02d}/to/{3:04d}-{4:02d}-{5:02d}'.format(
                        date.year,date.month,date.day,
                        date2.year,date2.month,date2.day)
    ncstr = 'ECMWF_{0:04d}{1:02d}{2:02d}.nc'.format(
                        date.year,date.month,date.day)
    REQ = {}
    REQ['stream'] = 'oper'
    REQ['levtype'] = 'pl'
    REQ['param'] = 'z/u/v'
    REQ['dataset'] = 'interim'
    REQ['step'] = '0'
    REQ['grid'] = '1/1'
    REQ['time'] = '00/06/12/18'
    REQ['date'] = datestr
    REQ['type'] = 'an'
    REQ['class'] = 'ei'
    REQ['target'] = os.path.join(ECMWFdir,ncstr)
    REQ['format'] = 'netcdf'

    server = ECMWFDataServer()
    server.retrieve(REQ)
Exemple #11
0
"""
created on July 23, 2014

@author: Nikola Jajcay
"""

from ecmwfapi import ECMWFDataServer
 
server = ECMWFDataServer()
 
server.retrieve({
    "stream" : "oper",
    "levtype" : "pl",
    "param" : "60.128/129.128/130.128/131.128/132.128/133.128/135.128/138.128/155.128/157.128/203.128", ## https://badc.nerc.ac.uk/data/ecmwf-e40/params.html
    # "dataset" : "interim", ## era40, interim, era20c
    "levelist" : "100/300/500/800/850/900/925/950/1000",
    "step" : "0",
    # "grid" : "2.5/2.5",
    "time" : "12:00:00", ## daily
    "date" : "2017-04-04",
    # "area" : "75/-40/25/80", ## north/west/south/east
    "type" : "an", ## an for analysis, fc for forecast
    "class" : "od", ## e4 for era40, ei for interim
    # "format" : "netcdf",
    # "padding" : "0",
    "target" : "output", ## filename
    "expver" : 1
})

Exemple #12
0
#! /home/meteoboy4/anaconda/bin/python
from ecmwfapi import ECMWFDataServer

#Make a list containing the desired year to download and parsing the filename later
year=["1985"]
for iyear in range(29):
    iyear=iyear+1
    year.append(str(int(year[0])+iyear))
print(year)

server=ECMWFDataServer()
for iyear in range(len(year)):
    server.retrieve({
    	'stream'	:	"oper",
    	'param'		:	"131.128",
    	'dataset'	:	"interim",
    	'step'		:	"0",
    	'grid'		:	"1.0/1.0",
    	'levelist'	:	"300",
    	'levtype'	:	"pl",
    	'time'		:	"00/06/12/18",
    	'date'		:	year[iyear]+"-01-01/to/"+year[iyear]+"-12-31",
    	'expver'	:	"1",
    	'type'		:	"an",
    	'class'		:	"ei",
    	'format'	:	"netcdf",
    	'target'	:	"/run/media/MeteoBoy4/Data/MData/ERA-Interim/1985-2014_ALEV/Daily4/U_wind_component/300hpa/"+year[iyear]+".nc"
    })
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "2013-01-01/to/2013-01-31",
    "expver": "1",
    "grid": "0.125/0.125",
    "levelist": "400/450/500/550/600/650/700/750/775/800/825/850/875/900/925/950/975/1000",
    "levtype": "pl",
    "param": "130.128",
    "step": "0",
    "stream": "oper",
    "area": "-2/37/-4/38",
    "target": "2013001_2013031__temperature.nc",
    "time": "12",
    "type": "an",
})
Exemple #14
0
#!/usr/bin/env python26
from ecmwfapi import ECMWFDataServer
import os


def leap_year(year):
    if (year % 400 == 0):
        x = 1
    elif (year % 4 == 0):
        x = 1
    else:
        x = 0
    return x


server = ECMWFDataServer()

variables = 'Volumetric soil water layer 3'
out_name = 'SQ3'
mon_name = [
    '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'
]
ord_mon = [
    '31', '28', '31', '30', '31', '30', '31', '31', '30', '31', '30', '31'
]
leap_mon = [
    '31', '29', '31', '30', '31', '30', '31', '31', '30', '31', '30', '31'
]

for year in range(2014, 2015):
    for mon in range(1, 6):
#!/usr/bin/python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
'dataset' : 'interim',
'date'    : '20121101/to/20121105',
'stream'	 : 'oper',
'time'    : '00/06/12/18',
'grid'    : '0.75/0.75',
'step'    : '0',
'levtype' : 'pl',
'type'    : 'an',
'class'   : 'ei',
'param'   : '132',
'area'    : '31.925/77.575/31.825/77.675',
'levelist': '500/650/775/850/925/1000',
'target'  : 'vpl.grb'
    })
#
# (C) Copyright 2012-2013 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. 
# In applying this licence, ECMWF does not waive the privileges and immunities 
# granted to it by virtue of its status as an intergovernmental organisation nor
# does it submit to any jurisdiction.
#

from ecmwfapi import ECMWFDataServer

# To run this example, you need an API key 
# available from https://api.ecmwf.int/v1/key/
 
server = ECMWFDataServer()
  
server.retrieve({
    'stream'    : "oper",
    'levtype'   : "sfc",
    'param'     : "165.128/166.128/167.128",
    'dataset'   : "interim",
    'step'      : "0",
    'grid'      : "0.75/0.75",
    'time'      : "00/06/12/18",
    'date'      : "2014-07-01/to/2014-07-31",
    'type'      : "an",
    'class'     : "ei",
    'format'    : "netcdf",
    'target'    : "interim_2014-07-01to2014-07-31_00061218.nc"
})
Exemple #17
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "1979-01-01/to/2017-12-31",
    "expver": "1",
    "grid": "0.75/0.75",
    "area": "50/0/-50/180",	
    "levelist": "100/150/200/250/350/450/550/650/750/800/850/900/950/1000",
    "levtype": "pl",
    "param": "60.128",
    "step": "0",
    "stream": "oper",
    "time": "00:00:00",
    "type": "an",
    "format": "netcdf",
    "target": "EI_AN0012_1979_2017_PV60128_3D.nc",
})
    def __init__(self):
        """
        Initialises all variables needed. Sets the environment for the current
         platform. Handy to have when testing on local platform.
        """

        # This is one of the more important variables. This setting defines how far back into history (in days) we go.
        # I.e. to do a whole year we would set this to 365 etc.
        self.BACKFILL_DAYS = 10

        self.home = expanduser("~")
        self.server = ECMWFDataServer()

        self.ERROR_FILES = []
        self.DATE = datetime.datetime.today().strftime('%Y-%m-%d')
        if sys.platform == "linux2":
            self.MOTU_CLIENT_DIR = "/home/ubuntu/eumetsat_archive/motu-client-python"
            self.FTP_DIR = "/home/ubuntu/mnt_s3_bucket"
            self.DATA_FILE = "/home/ubuntu/eumetsat_archive/data.json"
            self.OUTPUT_DIR = "/home/ubuntu/eumetsat_archive/temp"
            self.COLOUR_FOLDER = "/home/ubuntu/eumetsat_archive/colour_files/"
        else:
            self.OUTPUT_DIR = self.home + "/repos/eumetsat_archive/temp"
            self.DATA_FILE = "data.json"
            self.FTP_DIR = self.home + "/repos/eumetsat_archive/ftpdir"
            self.MOTU_CLIENT_DIR = self.home + "/repos/eumetsat_archive/motu-client-python"
            self.COLOUR_FOLDER = self.home + "/repos/eumetsat_archive/colour_files/"

        if not os.path.exists(self.OUTPUT_DIR):
            os.makedirs(self.OUTPUT_DIR)
        # Set up logging.
        self.logFormatter = logging.Formatter(
            "%(asctime)s [%(levelname)-5.5s]  %(message)s")
        self.logger = logging.getLogger()
        self.handler = logging.StreamHandler()
        self.formatter = logging.Formatter(
            '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        self.handler.setFormatter(self.formatter)
        self.logger.addHandler(self.handler)
        self.logger.setLevel(logging.INFO)
        self.logPath = self.FTP_DIR
        self.fileName = datetime.datetime.today().strftime('%Y-%m-%d')
        if not os.path.exists(self.logPath + "/logs"):
            os.makedirs(self.logPath + "/logs")
        self.fileHandler = logging.FileHandler("{0}/{1}.txt".format(
            self.logPath + "/logs", self.fileName))
        self.fileHandler.setFormatter(self.logFormatter)
        self.logger.addHandler(self.fileHandler)

        self.fromdate = ""
        self.todate = ""
        self.datasetname = ""
        argscounter = 0
        self.counter = 0
        self.dataCount = 0

        # Get arguments. This enabled the scripts to be used via the following command;
        # python batchDownload.py -fromdate 20161010 -todate 20161010 -datasetname CERSAT-GLO-BLENDED_WIND_L4-V3-OBS_FULL_TIME_SERIE
        # Using -v will enable verbose logging, as well as maintaining the downloaded temp files.

        for arg in sys.argv:
            if arg == '-v':
                self.debug = True
                self.logger.setLevel(logging.DEBUG)
            elif arg == '-fromdate':
                self.fromdate = sys.argv[argscounter + 1]
            elif arg == '-todate':
                self.todate = sys.argv[argscounter + 1]
            elif arg == '-datasetname':
                self.datasetname = sys.argv[argscounter + 1]
            elif arg == '-backfilldays':
                self.BACKFILL_DAYS = int(sys.argv[argscounter + 1])
            else:
                self.debug = False
                self.logger.setLevel(logging.INFO)
            argscounter = argscounter + 1
        pass
Exemple #19
0
month_end   = var_month_end
# Area
north       = var_north
south       = var_south
west        = var_west
east        = var_east
# Output folder, i.e. replace by your project name
out_path    = 'var_out'



##################################################
#os.mkdir(out_path)
#os.mkdir(out_path+"nc_output_files")
#os.mkdir(out_path+"txt_output_files")
server = ECMWFDataServer()
count  = 1
for year in range(year_start, year_end+1):
    if len(range(year_start, year_end+1)) == 1:
        mt_start = month_start
        mt_end = month_end
    else:
        if year == year_start:
            mt_start = month_start
            mt_end = 12
        elif year == year_end:
            mt_start = 1
            mt_end = month_end
        else:
            mt_start = 1
            mt_end = 12
Exemple #20
0
# conda activate ERA5
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()
server.retrieve({
    "class":
    "ea",
    "dataset":
    "era5",
    "expver":
    "1",
    "stream":
    "oper",
    "type":
    "an",
    "levtype":
    "sfc",
    "param":
    "165.128/166.128/151.128",
    "date":
    "2015-09-29/to/2015-10-03",
    "time":
    "00/01/02/03/04/05/06/07/08/09/10/11/12/13/14/15/16/17/18/19/20/21/22/23",
    "step":
    "0",
    "grid":
    "0.25/0.25",
    "area":
    "30.5/-81.5/18/-65.5",
    "format":
    "netcdf",
class Downloader:
    def __init__(self):
        """
        Initialises all variables needed. Sets the environment for the current
         platform. Handy to have when testing on local platform.
        """

        # This is one of the more important variables. This setting defines how far back into history (in days) we go.
        # I.e. to do a whole year we would set this to 365 etc.
        self.BACKFILL_DAYS = 10

        self.home = expanduser("~")
        self.server = ECMWFDataServer()

        self.ERROR_FILES = []
        self.DATE = datetime.datetime.today().strftime('%Y-%m-%d')
        if sys.platform == "linux2":
            self.MOTU_CLIENT_DIR = "/home/ubuntu/eumetsat_archive/motu-client-python"
            self.FTP_DIR = "/home/ubuntu/mnt_s3_bucket"
            self.DATA_FILE = "/home/ubuntu/eumetsat_archive/data.json"
            self.OUTPUT_DIR = "/home/ubuntu/eumetsat_archive/temp"
            self.COLOUR_FOLDER = "/home/ubuntu/eumetsat_archive/colour_files/"
        else:
            self.OUTPUT_DIR = self.home + "/repos/eumetsat_archive/temp"
            self.DATA_FILE = "data.json"
            self.FTP_DIR = self.home + "/repos/eumetsat_archive/ftpdir"
            self.MOTU_CLIENT_DIR = self.home + "/repos/eumetsat_archive/motu-client-python"
            self.COLOUR_FOLDER = self.home + "/repos/eumetsat_archive/colour_files/"

        if not os.path.exists(self.OUTPUT_DIR):
            os.makedirs(self.OUTPUT_DIR)
        # Set up logging.
        self.logFormatter = logging.Formatter(
            "%(asctime)s [%(levelname)-5.5s]  %(message)s")
        self.logger = logging.getLogger()
        self.handler = logging.StreamHandler()
        self.formatter = logging.Formatter(
            '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        self.handler.setFormatter(self.formatter)
        self.logger.addHandler(self.handler)
        self.logger.setLevel(logging.INFO)
        self.logPath = self.FTP_DIR
        self.fileName = datetime.datetime.today().strftime('%Y-%m-%d')
        if not os.path.exists(self.logPath + "/logs"):
            os.makedirs(self.logPath + "/logs")
        self.fileHandler = logging.FileHandler("{0}/{1}.txt".format(
            self.logPath + "/logs", self.fileName))
        self.fileHandler.setFormatter(self.logFormatter)
        self.logger.addHandler(self.fileHandler)

        self.fromdate = ""
        self.todate = ""
        self.datasetname = ""
        argscounter = 0
        self.counter = 0
        self.dataCount = 0

        # Get arguments. This enabled the scripts to be used via the following command;
        # python batchDownload.py -fromdate 20161010 -todate 20161010 -datasetname CERSAT-GLO-BLENDED_WIND_L4-V3-OBS_FULL_TIME_SERIE
        # Using -v will enable verbose logging, as well as maintaining the downloaded temp files.

        for arg in sys.argv:
            if arg == '-v':
                self.debug = True
                self.logger.setLevel(logging.DEBUG)
            elif arg == '-fromdate':
                self.fromdate = sys.argv[argscounter + 1]
            elif arg == '-todate':
                self.todate = sys.argv[argscounter + 1]
            elif arg == '-datasetname':
                self.datasetname = sys.argv[argscounter + 1]
            elif arg == '-backfilldays':
                self.BACKFILL_DAYS = int(sys.argv[argscounter + 1])
            else:
                self.debug = False
                self.logger.setLevel(logging.INFO)
            argscounter = argscounter + 1
        pass

    def load_file(self):
        """
        Loads the json file into memory.
        """

        with open(self.DATA_FILE) as data_file:
            self.jsondata = simplejson.load(data_file)
        for item in self.jsondata:
            if item["process"] == True:
                self.dataCount = self.dataCount + 1

    def run(self):
        """
        Run function. This will decide if we have passed arguments or are running the process
        completely.
        """

        self.load_file()

        if self.fromdate != "" and self.todate != "" and self.datasetname != "":
            fromdate = datetime.datetime.strptime(self.fromdate,
                                                  "%Y%m%d").date()
            toDate = datetime.datetime.strptime(self.todate, "%Y%m%d").date()
            self.DATE = fromdate.strftime('%Y-%m-%d')
            item = self.findItem(self.datasetname)
            self.logger.info("==== Processing " + self.datasetname.upper() +
                             " =====")
            ##TODO: In here at some point let's get the proper dates to enable using ranges.
            ##i.e. transofrm fromdate / todate to actual dates maybe?
            self.processItem(item, fromdate, toDate)

        else:
            self.processJsonData()

    def findItem(self, datasetname):
        """
        For the selected dataset via the command line, get the object from the JSON file
        """

        for item in self.jsondata:
            if item["dataset"] == datasetname:
                ##TODO: Enable the use of variables.
                return item

    def processItem(self, item, fromdate, toDate):
        """ This is the main part of the program. Here we retrieve the parameters from the
        object that we get from the JSON file.

        :param item: object from the JSON file.
        :param fromdate: from which date to start processing
        :param toDate: what the end date for processing should be
        """

        dataset = item["dataset"]
        try:
            variable = item["variable"]
        except:
            pass
        try:
            te = item["te"]
        except:
            te = " -180 -90 180 90"

        variableName = ""
        try:
            variableName = "_" + item["variableName"]
        except:
            variableName = ""

        try:
            z = item["depth"]
        except:
            z = -99

        try:
            ts = item["ts"]
        except:
            ts = "8192 0"

        dateformat = item["dateformat"]
        ftpdatafolder = self.FTP_DIR + "/" + dataset.upper(
        ) + variableName.upper()
        file = ftpdatafolder + "/" + fromdate.strftime('%Y-%m-%d') + ".png"

        # If the file exists - don't do anything.
        if not os.path.exists(file):
            self.logger.info(
                fromdate.strftime('%Y-%m-%d') +
                ".png doesn't exist. Backfilling...")
            fromdate = fromdate.strftime(dateformat)
            toDate = toDate.strftime(dateformat)
            if item["type"] == 'useMotu':

                # If this is a dataset coming from the MOTU download service, handle it accordingly.
                username = item["username"]
                password = item["password"]
                m = item["m"]
                s = item["s"]

                self.getMotuDataset(username, password, m, s, dataset,
                                    fromdate, toDate, variable, te, z,
                                    variableName, ts)

            elif item["type"] == 'ecmwf':

                # If this uses the ECMWF python download service, handle it accordingly.
                self.logger.info("Processing ECMWF data")
                ecmfwdataset = item["ecmwf_dataset"]
                try:
                    time = item["time"]
                except:
                    time = "12:00:00"
                self.getECMWFDataset(dataset, ecmfwdataset, fromdate,
                                     item["param"], "", ts, item["expver"],
                                     time)

            elif item["type"] == 'ftp':

                # Otherwise it's an FTP download.
                url = item["url"]
                self.getFTPDataset(url, dataset, variable, fromdate, te, ts)
        else:
            self.logger.info("Already got " + fromdate.strftime('%Y-%m-%d') +
                             ".png Moving on.")

    def cropData(self, dataset):
        """
        For the MACC datasets, they all need to be cropped to a certain size, this is because there is a column
        of data on the far right which contains header information, and renders as null data.
        """

        self.logger.info("Cropping")
        ftpdatafolder = dataset.upper()
        filename = str(self.FTP_DIR) + '/' + str(ftpdatafolder) + '/' + str(
            self.DATE) + '.png'
        cropCommand = "convert " + filename + " -crop 8183x4096+0+0 +repage + " + filename
        subprocess.call(cropCommand, shell=True)

    def processJsonData(self):
        """
        If we are processing the entire data.json file then this is where we do it.
        """
        self.logger.info(" Found " + str(self.dataCount) +
                         " datasets to process. Starting process.")
        for item in self.jsondata:
            # If we want to process the data according to the 'process' flag on the object.
            if item["process"] == True:
                self.counter = self.counter + 1
                dataset = item["dataset"]
                self.logger.info("=========== " + str(self.counter) + " / " +
                                 str(self.dataCount) + " : " + dataset +
                                 " =============")
                try:
                    gap = item["gap"]
                except:
                    gap = 0
                variableName = ""
                try:
                    variableName = "_" + item["variableName"]
                except:
                    variableName = ""
                daysago = item["daysAgo"]

                self.logger.info("==== Processing " + dataset.upper() +
                                 variableName + " =====")
                for num in range(daysago, self.BACKFILL_DAYS + 1):
                    now = datetime.datetime.today()
                    numSubtractionDays = datetime.timedelta(days=num)
                    fromdate = now - numSubtractionDays
                    toDate = fromdate + datetime.timedelta(days=gap)
                    self.DATE = fromdate.strftime('%Y-%m-%d')
                    self.processItem(item, fromdate, toDate)

        if len(self.ERROR_FILES) > 0:
            self.logger.error(str(len(self.ERROR_FILES)) + " file(s) failed.:")
            for file in self.ERROR_FILES:
                self.logger.info(file)
        else:
            self.logger.info("All successful.")
            self.cleanup()

        self.logger.info("=====================================")
        self.logger.info("=====================================")

    def getMotuDataset(self, username, password, m, s, dataset, fromdate,
                       todate, variable, te, depth, variableName, ts):
        """
        Function to retrieve datasets that are supported by the MOTU python client.
        Parameters are fairly self explanatory.
        """

        filename = dataset + variableName + "_" + self.DATE
        tempfilename = self.OUTPUT_DIR + "/" + filename + ".nc"
        if os.path.exists(tempfilename):
            self.logger.info(tempfilename +
                             " already exists so just reprocessing it.")
        else:
            self.logger.info("Downloading " + filename)
            if self.debug:
                motucommand = "python " + self.MOTU_CLIENT_DIR + "/motu-client.py "
            else:
                motucommand = "python " + self.MOTU_CLIENT_DIR + "/motu-client.py -q "

            motucommand = (motucommand + " -u " + username + " -p " +
                           password + " -m " + m + " -s " + s + " -d " +
                           dataset + " -x -180 -X 180 -y -90 -Y 90 " + " -t " +
                           fromdate + " -T " + todate + " -v " + variable +
                           " -o " + self.OUTPUT_DIR + " -f " + filename +
                           ".nc")
            # Occasionally datasets are requested at specific depths. If this is the case
            # we create a range for the depth variable.
            if depth != -99:
                motucommand = (motucommand + " -z " + str(depth) + " -Z " +
                               str(depth + 0.0002))

            self.logger.debug(motucommand)
            dataset = dataset + variableName
            subprocess.check_output(motucommand, shell=True)

        try:
            self.processImagery(dataset, filename, variable, te, self.DATE, ts)
            self.moveToFtpFolder(dataset, filename + ".png", fromdate)
            self.cleanup()
        except:
            self.logger.error("Error processing " + filename)
            self.ERROR_FILES.append(filename)
            pass

    def getECMWFDataset(self, dataset, ecmfwdataset, fromdate, param, te, ts,
                        expver, time):
        """
        When downloading ECMWF data we use the ECMWF python api. This allows us to specify
        various parameters, the most important one the "param" one - which indicates the
        dataset that we're after. the "grid" specifies the resolution (in pixels per km2).
        """

        self.logger.info("Processing " + dataset)
        filename = dataset + "_" + self.DATE
        tempfilename = self.OUTPUT_DIR + "/" + filename + ".nc"
        if os.path.exists(tempfilename):
            self.logger.info(tempfilename +
                             " already exists so just reprocessing it.")
        else:
            ecmwfCommand = 'server.retrieve({' \
                           '"class": "mc", ' \
                           '"dataset": "' + ecmfwdataset + '",' \
                            '"date": "' + fromdate + '",' \
                            '"expver": "' + expver + '",' \
                            '"grid": "0.1/0.1",' \
                            '"levtype": "sfc",' \
                            '"param": "' + param + '",' \
                            '"step": "3",' \
                            '"stream": "oper",' \
                            '"time": "' + time + '",' \
                            '"type": "fc",' \
                            '"format": "netcdf",' \
                            '"target": "' + self.OUTPUT_DIR + "/" + filename + ".nc" + '"})'
            self.logger.debug(ecmwfCommand)

            self.server.retrieve({
                "class":
                "mc",
                "dataset":
                ecmfwdataset,
                "date":
                fromdate,
                "expver":
                expver,
                "grid":
                "0.1/0.1",
                "levtype":
                "sfc",
                "param":
                param,
                "step":
                "3",
                "stream":
                "oper",
                "time":
                time,
                "type":
                "fc",
                "format":
                "netcdf",
                "target":
                self.OUTPUT_DIR + "/" + filename + ".nc"
            })

        self.processImagery(dataset, filename, "", " 0 -90 360 90", self.DATE,
                            ts)
        self.moveToFtpFolder(dataset, filename + ".png", fromdate)
        self.cropData(dataset)
        self.cleanup()

    def getFTPDataset(self, url, dataset, variable, fromdate, te, ts):
        """
        This will download data from most FTP sites. The file construction just needs to be created properly.
        """

        filename = dataset + "_" + fromdate
        formattedFileName = dataset + "_" + self.DATE
        tempfilename = self.OUTPUT_DIR + "/" + formattedFileName + ".nc"
        if os.path.exists(tempfilename):
            self.logger.info(tempfilename +
                             " already exists so just reprocessing it.")
        else:
            self.logger.debug("Downloading " + filename)

            if self.debug:
                wgetCommand = ("wget " + url + filename + ".nc -O " +
                               self.OUTPUT_DIR + "/" + formattedFileName +
                               ".nc")
                self.logger.info(wgetCommand)
            else:
                wgetCommand = ("wget -q " + url + filename + ".nc -O " +
                               self.OUTPUT_DIR + "/" + formattedFileName +
                               ".nc")

            subprocess.call(wgetCommand, shell=True)
        self.processImagery(dataset, formattedFileName, variable, te, fromdate,
                            ts)
        self.moveToFtpFolder(dataset, formattedFileName + ".png", fromdate)
        self.cleanup()

    def processImagery(self, dataset, filename, myvariable, te, fromdate, ts):
        """
        Once the netcdf data is downloaded, this function will convert the imagery to a tiff file
        and then convert that tiff file to a png with the correct parameters.
        """

        if os.path.exists(self.OUTPUT_DIR + "/" + filename + ".nc"):
            if myvariable:
                myvariable = ":" + myvariable
            else:
                myvariable = ""
            self.logger.info("Processing " + filename)
            if self.debug:
                gdalwarpcommand = "gdalwarp "
            else:
                gdalwarpcommand = "gdalwarp -q "

            gdalwarpcommand = gdalwarpcommand + " \
                            -of GTiff \
                            -t_srs epsg:4326 \
                            -te"                                 + te + " \
                            -ts "                                  + ts + " \
                            NETCDF:"                                     + self.OUTPUT_DIR + "/" + filename + ".nc" + myvariable + " " + \
                              self.OUTPUT_DIR + "/" + filename + ".tif"

            if self.debug:
                gdaldemcommand = "gdaldem color-relief "
            else:
                gdaldemcommand = "gdaldem color-relief -q "
            gdaldemcommand = gdaldemcommand + " \
                            -of PNG -alpha "                                             + \
                             self.OUTPUT_DIR + "/" + filename + ".tif " + \
                             self.COLOUR_FOLDER + dataset + ".txt  " + \
                             self.OUTPUT_DIR + "/" + filename + ".png"
            self.logger.debug(gdalwarpcommand)
            self.logger.debug(gdaldemcommand)
            subprocess.call(gdalwarpcommand, shell=True)
            subprocess.call(gdaldemcommand, shell=True)

        else:
            self.logger.info(filename + " doesn't exist. Moving on.")

    def moveToFtpFolder(self, target_foldername, filename, fromdate):
        """
        After processing, the png file is currently in a temporary folder. This function
         moves the file to the correct folder according to the global variable FTP_DIR
        """

        self.logger.info("Moving")
        ftpdatafolder = target_foldername.upper()
        if not os.path.exists(self.FTP_DIR + '/' + ftpdatafolder):
            os.makedirs(self.FTP_DIR + '/' + ftpdatafolder)
        origin = str(self.OUTPUT_DIR) + '/' + str(filename)
        destination = str(self.FTP_DIR) + '/' + str(ftpdatafolder) + '/' + str(
            self.DATE) + '.png'
        self.logger.debug('from: ' + origin)
        self.logger.debug('to: ' + destination)
        subprocess.call("mv " + origin + " " + destination, shell=True)
        subprocess.call("chmod 644 " + destination, shell=True)
        self.logger.info("Processing finished")
        try:
            self.logger.debug("Removing: " + self.OUTPUT_DIR + "/" +
                              target_foldername + "_" + fromdate + ".png")
            os.remove(self.OUTPUT_DIR + "/" + target_foldername + "_" +
                      fromdate + ".png")
        except:
            pass

    def cleanup(self):
        """
        This function removes all temporary files, unless we're in debug mode.
        """

        if self.debug:
            pass
        else:
            try:
                filelist = os.listdir(self.OUTPUT_DIR)
                for f in filelist:
                    os.remove(self.OUTPUT_DIR + "/" + f)
                self.logger.info("Temp files deleted")
            except:
                self.logger.info("Temp files not deleted")
                pass
Exemple #22
0
from ecmwfapi import ECMWFDataServer

if __name__ == '__main__':
    server = ECMWFDataServer()

    server.retrieve({
        "class": "ti",
        "dataset": "tigge",
        "date": "2012-01-01/to/2012-12-31",
        "expver": "prod",
        "grid": "0.5/0.5",
        "levtype": "sfc",
        "origin": "ecmf",
        "param": "59/151/165/166/167",
        "area": "-33/135/-41/150",
        "step": "0/6/12/18/24/30/36/42/48/54/60/66/72",
        "time": "00:00:00",
        "type": "fc",
        "target": "2012fc",
    })

    server.retrieve({
        "class": "ti",
        "dataset": "tigge",
        "date": "2012-01-01/to/2012-12-31",
        "expver": "prod",
        "grid": "0.5/0.5",
        "levtype": "sfc",
        "origin": "ecmf",
        "param": "59/151/165/166/167",
        "area": "-33/135/-41/150",
#!/usr/bin/env python

# Use this script to download ERA Interim data needed to compute hydrological (not atmosphe)
# Make sure the home folder contains .ecmwfapirc, see https://software.ecmwf.int/wiki/display/WEBAPI/Access+ECMWF+Public+Datasets
# To install 'ecmwfapi': sudo pip install https://software.ecmwf.int/wiki/download/attachments/56664858/ecmwf-api-client-python.tgz
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()

# Set year/month/day to be downloaded. This will save all data into one file for conversion.
# Thus, do not request too long interval!
start_year = '2016'
start_month = '01-01'
end_year = '2016'
stop_month = '12-31'
# Set time to be downloaded (usually 12:00:00)
# Possible values: '00:00:00/06:00:00/12:00:00/18:00:00'
time = '12:00:00';
# Set output file.
output_file = 'i:/GlobalModel/ERAinterim/Surface/OriginalForHydro/ERA_SM_and_Snow.nc'


# prepare/declare arguments for downloading (will be modified later)
args_sl = {
    "class": "ei",
    "dataset": "interim",
    "date": "2015-02-01/to/2016-12-31",
    "expver": "1",
    "grid": "0.75/0.75",
    "levtype": "sfc",
    "param": "39.128/40.128/41.128/42.128/141.128",
    "step": "0",
Exemple #24
0
#!/usr/bin/env python

# Surface temp 167.128
# Pressure 54.128 levtype =
# Relative humidity 157.128
# specific humidity .128
# total cloud cover .128
from ecmwfapi import ECMWFDataServer



#periode = "1979-01-01/to/2018-07-31"
periode = "1990-12-01/to/1990-12-08"


server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "expver": "1",
    "stream": "oper",
    "type": "an", # use fc for forcast data
    "levtype": "sfc", #surface variable
    "param": "164.128",
    "date": periode,
    "time": "00:00:00/06:00:00/12:00:00/18:00:00",
    #"step": "0", # timestep is 3 hours
    "grid": "0.75/ 0.75", # 0.75, 0.75 is recomended resolution for grib, 1.0/1.0 is recomended for when you dont do global.
    "area": "75/-15/30/42",     # "75/-15/30/42", #Europa --> N/W/S/E lat long degrees
    "format":"netcdf",
    "target": "./filesML/total_cloud_cover_Europa_sp.nc"
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()

#list_dates <- seq.Date(as.Date(data_i), as.Date(data_f), by = "day")
#list_dates <- format(list_dates, "%Y%m%d")

server.retrieve({
    #'stream'    : "oper",
    'levtype'   : "sfc",
    'param'     : "172/134/151/165/166/167/168/169/235/33/34/31/141/139/170/183/236/39/40/41/42",
    'dataset'   : "interim",
    'step'      : "0",
    'grid'      : "128",
    'time'      : "00/06/12/18",
    'date'      : "20100701/to/20100701",
    'type'      : "an",
    'class'     : "ei",
    #'format'    : "netcdf",
    'target'    : "ERA-Int_sfc_20100701.grib"
 })
Exemple #26
0
code = {'u10': '165.128', 'v10': '166.128'}
time = "2008-01-01_2018-01-01"
download_name = 'uv_{}.nc'.format(time)
data_path = "/home/qxs/bma/"
download_file = data_path + download_name
param = ''
vars = ['u10', 'v10']
for var in vars:
    param = param + str(code[var]) + '/'
try:
    os.makedirs(data_path + time + '/')
except OSError:
    pass
finally:
    if not os.path.exists(download_file):
        server = ECMWFDataServer()
        server.retrieve({
            'class': "ei",
            'dataset': "interim",
            'stream': "oper",
            'levtype': "sfc",
            'type': "an",
            'time': "00/12",
            'step': "0",
            'grid': "0.5/0.5",
            'area': area["china"],
            'param': param[:-1],
            'date': "{}/to/{}".format("20080101", "20180101"),
            'format': "netcdf",
            'target': download_file
        })
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()

server.retrieve({
    'class': "ei",
    'dataset': "interim",
    'type': "fc",
    'stream': "oper",
    'expver': "1",
    'levtype': "sfc",
    'param': "228.128",
    'step': "3",
    'grid': "0.75/0.75",
    'time': "00:00:00/12:00:00",
    'date': "1979-01-01/to/2017-12-31",
    'format': "netcdf",
    'target': "EI_1979_2017_TP228128.nc"
})
Exemple #28
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

import sys

str = sys.argv[1]

server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": str,
    "expver": "1",
    "levelist": "46/to/60",
    "levtype": "ml",
    "param": "130.128/131.128/132.128/138.128",
    "step": "0",
    "stream": "oper",
    "target": "2015/" + str.replace('-', '') + "_ml.nc",
    "format": "netcdf",
    "time": "00/06/12/18",
    "type": "an",
    "area": "75/-20/30/70",
    "grid": "0.36/0.36",
})
lonmin = str(float(lonmin)-dl)
lonmax = str(float(lonmax)+dl)
latmin = str(float(latmin)-dl)
latmax = str(float(latmax)+dl)
print 'lonmin-dl = ',lonmin
print 'lonmax+dl =',lonmax 
print 'latmin-dl =',latmin
print 'latmax+dl =',latmax



######################
# Retrieve data
######################

server = ECMWFDataServer()

### Loop on the years and months
for year in range(yearStart, yearEnd+1):
   
   if year==yearStart and year==yearEnd:
      monthStartYear = monthStart
      monthEndYear   = monthEnd
   elif year==yearStart and year!=yearEnd:
      monthStartYear = monthStart
      monthEndYear   = 12
   elif year!=yearStart and year==yearEnd:
      monthStartYear = 1
      monthEndYear   = monthEnd
   else:
      monthStartYear = 1
Exemple #30
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "2013-01-01/to/2013-01-31",
    "expver": "1",
    "grid": "0.125/0.125",
    "levelist":
    "400/450/500/550/600/650/700/750/775/800/825/850/875/900/925/950/975/1000",
    "levtype": "pl",
    "param": "130.128",
    "step": "0",
    "stream": "oper",
    "area": "-2/37/-4/38",
    "target": "2013001_2013031__temperature.nc",
    "time": "12",
    "type": "an",
})
Exemple #31
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

import sys

str=sys.argv[1]

server = ECMWFDataServer()
server.retrieve({
   	 "class": "ei",
   	 "dataset": "interim",
   	 "date": str,
   	 "expver": "1",
   	 "levelist": "46/to/60",
   	 "levtype": "sfc",
   	 "param": "134.128/167.128",
   	 "step": "0",
   	 "stream": "oper",
   	 "target": "2013/"+str.replace('-','')+"_sfc.nc",
	 "format": "netcdf",
   	 "time": "00/06/12/18",
   	 "type": "an",
	 "area": "60/70/10/150",
	 "grid": "0.36/0.36",
})
Exemple #32
0
from ecmwfapi import ECMWFDataServer
from pathlib import Path
from uclgeog.geog_data import procure_dataset

ecmwf_file = 'europe_data_2016_2017.nc'

# pull the years info from ifile
# if the file is multiple years eg europe_data_2010_2011.nc
# then split it into multiple files
years = np.array(Path(ecmwf_file).stem.split('_'))[2:].astype(int)

if not (Path('data') / ecmwf_file).exists():
    # try to get it from UCL servers
    done = procure_dataset(ofile, verbose=True)
    if not done:
        server = ECMWFDataServer()
        print('requesting data ... may take some time')
        server.retrieve({
            "class": "ei",
            "dataset": "interim",
            "date": f"{years[0]}-01-01/to/{years[1]+1}-01-01",  # Time period
            "expver": "1",
            "levtype": "sfc",
            "param":
            "2t",  # Parameters. Here we use 2m Temperature (2t)  See the ECMWF parameter database, at http://apps.ecmwf.int/codes/grib/param-db
            "stream": "oper",
            "type": "an",
            "time": "00/60/12/18",
            "step": "0",
            "area":
            "75/-20/10/60",  # Subset or clip to an area, here to Europe. Specify as North/West/South/East in Geographic lat/long degrees. Southern latitudes and Western longitudes must be given as negative numbers.
Exemple #33
0
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 19 12:20:00 2020
ERA-Interim api request template
@author: Michael Tadesse
"""
import os 
os.chdir('D:\\data\\era_interim\\era_interim_netcdf')

#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": '2007-01-01/to/2007-12-31',
    "expver": "1",
    "grid": "0.75/0.75",
    "levtype": "sfc",
    "param": '34.128',
    "step": "0",
    "stream": "oper",
    "time": "00:00:00/06:00:00/12:00:00/18:00:00",
    "type": "an",
    "format": "netcdf",
    "target": 'era_interim_sst_2007_.nc',
})
                string=""
                range_dates=""
                range_hdates=""
                range_end=eval(string.join(["len(starts_", month_name_list[dm-1] , ")"]))
                for kd in range(1,range_end):
                    range_hdates += string.join([string.join([str(dy),eval(string.join(["starts_" , month_name_list[dm-1] , "[kd-1]"]))]),"/"])
                    range_dates += string.join([string.join([str(2017),eval(string.join(["starts_", month_name_list[dm-1] , "[kd-1]"]))]),"/"])
                    continue 
                # adds final date without the final /
                range_hdates += string.join([str(dy),eval(string.join(["starts_" , month_name_list[dm-1] , "[range_end-1]"]))])
                range_dates += string.join([str(2017),eval(string.join(["starts_",month_name_list[dm-1] , "[range_end-1]"]))])

                file_name=[ pathto, "ECMWF_", variable, "_", region, "_pf_reforecast_", str(dy),"_", month_name_list[dm-1] , ".grib"];

                from ecmwfapi import ECMWFDataServer
                server = ECMWFDataServer()
                server.retrieve({
                            "class": "s2",
                            "dataset": "s2s",
                            "hdate":  range_hdates,
                            "date": range_dates,
                            "origin": "ecmf",
                            "expver": "prod",
                            "levtype": level_type,
                            "level":level,
                            "param": "167",
                            "step": paso,
                            "stream": "enfh",
                            "target": string.join(file_name),
                            "area": region_limits,
                            "time": "00",
Exemple #35
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()

# get all of the Nov-Mar data
for YEAR in range(1996, 2017):
    for MONTH in [11, 12, 1, 2, 3]:
        qmonth = str(MONTH).zfill(
            2
        )  # in some months 9 starts are available, i am only using the first 8 for each month.
        if MONTH == 1:
            day_list = [4, 7, 11, 14, 18, 21, 25, 28]
        if MONTH == 2:
            day_list = [1, 4, 8, 11, 15, 18, 22, 25]  #,29]
        if MONTH == 3:
            day_list = [3, 7, 10, 14, 17, 21, 24, 28]  #,31]
        if MONTH == 11:
            day_list = [3, 7, 10, 14, 17, 21, 24, 28]
        if MONTH == 12:
            day_list = [1, 5, 8, 12, 15, 19, 22, 26]  # ,29]

        for DAY in day_list:  # these are the days the fcast starts at.
            server.retrieve({
                "class":
                "s2",
                "dataset":
                "s2s",
                "date":
                "2016-" + str(MONTH).zfill(2) + "-" + str(DAY).zfill(2),
                "expver":
                "prod",
Exemple #36
0
#!/usr/bin/env python
# eastward vapour flux
# northward vapour flux

from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
for d in [2015, 2016, 2017]:
    # print("{y}-06-01/to/{y}-09-30".format(y=d))
    server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": "{y}-06-01/to/{y}-12-31".format(y=d),
        "expver": "1",
        "grid": "0.25/0.25",
        "levtype": "sfc",
        "param": "71.162/72.162",
        "step": "0",
        # "3,6,9,12",
        "stream": "oper",
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        # "time": "00:00:00/12:00:00",
        "area": "11/70/5/76",
        "type": "an",
        "target": "mf_{y}.grib".format(y=d),
        # "format": "netcdf",
    })
def main(argv):

    try:
        opts,argv = getopt.getopt(argv,":h:i:e:s:E:o:g:P:t:f:r:",['help','[outFile]','code','[shapeFile]','start','end','[tr]'])
    except getopt.GetoptError:
        print 'error in parameter for eraInterimDownload. type eraInterimDownload.py -help for more detail on use '
        sys.exit(2)
    
    for opt, arg in opts:
        if opt == '-h':
            print 'eraInterimDownload.py  '
            print '    [mandatory] : '
            print '        --init <dateStart YYYY-MM-DD>'
            print '        --end <dateEnd YY-MM-DD>'
            print '        --shapefile <shapefile> OU -Extend < xmin,ymax,xmax,ymin>'
            print '    [optional] :'
            print '        --typeData  < analyse , forcast> (default forcast)'
            print '        --grid <EraInterim Time> (default 0.75)'
            print '        --outfile <outfolder> (default /home/user/eraInterim)'
            print '        --proxy <True/False> (default False)'
            print '        --temporaryFile <True/False> (default False)'
            print '        --result < TxtFile / RasterFile> default RasterFile'
            print ''
            sys.exit() 
        elif opt in ('-o','--outFolder'):
            oFolder = arg
        elif opt in ('-i','--start'):
            startDate = arg
        elif opt in ('-e','--end'):
            endDate = arg
        elif opt in ('-s','--shapefile'):
            pathToShapefile = arg
        elif opt in ('-E','--tr'):
            extend = arg.split(',')
        elif opt in ('-g','--grid'):
            grid = arg
        elif opt in ('-P','--proxy'):
            proxy = arg
        elif opt in ('-t','--typeData'):
            typeData = arg
        elif opt in ('-f','--temporaryFile'):
            temporaryFile = arg
        elif opt in ('-r','--result'):
            typeOutput = arg
    
    if len(sys.argv) < 7:
        print 'eraInterimDownload.py'
        print '    -i <dateStart YYYY-MM-DD> '
        print '    -e <dateEnd YY-MM-DD>'
        print '    -s <shapefile> '
        print '  or'
        print '    -E < xmin,ymax,xmax,ymin>]'
        print ''
        print '    [-t < analyse , forcast> (default analyse)]'
        print '    [-g <size of grid in 0.125/0.25/0.5/0.75/1.125/1.5/2/2.5/3> (default0.75)]'
        print '    [-o <outfolder> (default /home/user/eraInterim)]'
        print '    [-P <proxy : True/False> (default False)]'
        print '    [-f <temporaryFile : True/False> (default False)]'
        print '    [-r <resultOutput : TxtFile/RasterFile> (default RasterFile)]'
        print ''
        print 'For help on interimCode -help'
        sys.exit(2)
        
    try:
        oFolder
    except NameError:
        oFolder = os.path.expanduser('~')
        oFolder = oFolder + '/eraInterim'
        print "output folder not precised : downloaded eraInterim images on "+oFolder
    
    # verification du folder/or creation if not exists
    utils.checkForFolder(oFolder) 
        
    try:
        startDate
    except NameError:
        exit ('init Date not precised')
    # verification si sartDate est une date
    startDate=utils.checkForDate(startDate) 
    
    try:
        endDate
    except NameError:
        exit ('end Date not specified')
    # verification si sartDate est une date
    endDate=utils.checkForDate(endDate) 
    
    if (startDate>endDate):
        exit('startDate could not be greater than endDate')

    today=date.today()
    limitDate = today - timedelta(days=31*3)
    limitDate=date(limitDate.year,limitDate.month,calendar.monthrange(limitDate.year,limitDate.month)[1])
    if (startDate>limitDate or endDate>limitDate ):
        exit('date could not exceed 2014-12-31')
    
    try:
        pathToShapefile
    except NameError:
        try:
            extend
        except NameError:
            exit ('no Area of interest have been specified. please use -shp or -tr to declare it')
    
    if 'pathToShapefile' in locals():
        extendArea=utils.convertShpToExtend(pathToShapefile)
    else:
        extendArea=extend

    extendArea=utils.checkForExtendValidity(extendArea)
      
    try:
        typeData
    except NameError:
        typeData='analyse'

    try:
        grid
    except NameError:
        grid='0.75'
    grid=utils.checkForGridValidity(grid)
            
    try:
        proxy
    except NameError:
        proxy=False

    #Proxy parameteres needed
    if(proxy):
        login = raw_input('login proxy : ')
        pwd = raw_input('password proxy :  : ')
        site = raw_input('site (surf.cnes.fr) : ')
        os.environ["http_proxy"] = "http://%s:%s@%s:8050"%(login,pwd,site)
        os.environ["https_proxy"] = "http://%s:%s@%s:8050"%(login,pwd,site)
    
    try:
        temporaryFile
    except NameError:
        temporaryFile=False
    
    try:
        typeOutput
    except NameError:
        typeOutput='RasterFile'
    """----------------------------------------"""
    
    
    #Create param if first Time
    if (not utils.checkForFile(os.path.expanduser('~')+'/.ecmwfapirc')):
        print ('for first connexion you have to define yout key and password on ecmwf')
        print ('cf  https://apps.ecmwf.int/auth/login/')
        print ('')
        u = raw_input('user (mail) : ')
        k = raw_input('keys : ')
        utils.createParamFile(os.path.expanduser('~')+'/.ecmwfapirc',u,k)
        
    delta = endDate - startDate
    nbDays = delta.days + float(delta.seconds) / 86400 + 1

    #--------------------------On charge les rasters
    if typeData == "analyse":
        time = ['00',"12","06","18"]
        step = []
        nbBandByDay=len(time)
    else:
        time = ['00',"12"]
        step = [3,6,9,12]
        nbBandByDay=(12*len(time))/(len(step))+1
    server = ECMWFDataServer()
    
    
    """ altitude de la grille EraInterim """
    # Only Forcast possible
    codeGeopot= [129]
    GeoFile = oFolder+"/129"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'
    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeGeopot, GeoFile,typeData)
    server.retrieve(struct)
    Geo = utils.convertNETCDFtoDicArray(GeoFile)
    Geo = utils.convertGeoToAlt(Geo)
    #un peu inutile car ne change pas ... mais bon! 
    Geo = utils.computeDailyMax(Geo, nbBandByDay, typeData)
    
    """ Vitesse du vent """
    codeVent= [165,166]
    vent={}
    ventFile=[]
    for i in codeVent:
        ventFile.append(oFolder+"/"+str(i)+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc')
        struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, [i], ventFile[-1],typeData)
        server.retrieve(struct)
        vent[i]=utils.convertNETCDFtoDicArray(ventFile[-1])
    
     
    vent = utils.fusVentFromDict(vent,nbBandByDay)
    vent=utils.computeDailyMean(vent,nbBandByDay,typeData)

    """ Humidité relative """
    
    codePressure= [134]
    pressureFile = oFolder+"/134"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'

    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codePressure, pressureFile,typeData)
    server.retrieve(struct)
    pressure = utils.convertNETCDFtoDicArray(pressureFile)
    #oulalal c'est moche
    pressureMean = utils.convertPaToKgPa(pressure)
    pressure = utils.convertToHectoPascal(pressure)
    pressureMean = utils.computeDailyMean(pressureMean,nbBandByDay,typeData)
    
    codeT2m= [167]
    T2mFile = oFolder+"/167"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'

    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeT2m, T2mFile,typeData)
    server.retrieve(struct)
    T2m = utils.convertNETCDFtoDicArray(T2mFile)

    Tmean = utils.computeDailyMean(T2m, nbBandByDay, typeData)
    Tmax = utils.computeDailyMax(T2m,nbBandByDay)
    Tmin = utils.computeDailyMin(T2m,nbBandByDay)
    
    T2m = utils.convertKToD(T2m)
    #T2m = utils.computeDailyMean(T2m,nbBandByDay,typeData)
    
    codeDewP= [168]
    DewPFile = oFolder+"/168"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'

    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeDewP, DewPFile,typeData)
    server.retrieve(struct)
    DewP = utils.convertNETCDFtoDicArray(DewPFile)
    DewP = utils.convertKToD(DewP)
    #DewP = utils.computeDailyMean(DewP,nbBandByDay,typeData)

    humidity = utils.ComputeHumidityFromPT(pressure,T2m,DewP)
    #humidity = utils.computeDailyMean(humidity,nbBandByDay,typeData)
    Hmax = utils.computeDailyMax(humidity,nbBandByDay)
    Hmin = utils.computeDailyMin(humidity,nbBandByDay)
    Hmean = utils.computeDailyMean(humidity,nbBandByDay,typeData)
    

    """ ONLY FORCAST FOR THESE VAR"""
    typeData="forcast"
    time = ['00',"12"]
    step = [3,6,9,12]
    nbBandByDay=(12*len(time))/(len(step))+1
    
    """ Rayonnement global incident journalier """
    # Only Forcast possiblet
    codeRay= [176]
    RayFile = oFolder+"/176"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'
    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeRay, RayFile,typeData)
    server.retrieve(struct)
    Ray = utils.convertNETCDFtoDicArray(RayFile)
    Ray = utils.computeDailyMean(Ray,nbBandByDay,typeData)
    Ray = utils.convertWToMJ(Ray)
    
    """ downward surface solar radiation """
    # Only Forcast possiblet
    codeRay= [169]
    RayFileDownShort = oFolder+"/169"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'
    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeRay, RayFileDownShort,typeData)
    server.retrieve(struct)
    RayDownShort = utils.convertNETCDFtoDicArray(RayFileDownShort)
    RayDownShort = utils.computeDailyMean(RayDownShort,nbBandByDay,typeData)
    RayDownShort = utils.convertWToMJ(RayDownShort)
    
    """ downward surface thermal radiation """
    # Only Forcast possiblet
    codeRay= [175]
    RayFileDownLong = oFolder+"/175"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'
    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeRay, RayFileDownLong,typeData)
    server.retrieve(struct)
    RayDownLong = utils.convertNETCDFtoDicArray(RayFileDownLong)
    RayDownLong = utils.computeDailyMean(RayDownLong,nbBandByDay,typeData)
    RayDownLong = utils.convertWToMJ(RayDownLong)

    """ Evaporation """
    codeEvap= [182]
    EvapFile = oFolder+"/182"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'
    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codeEvap, EvapFile,typeData)
    server.retrieve(struct)
    Evap = utils.convertNETCDFtoDicArray(EvapFile)
    Evap = utils.computeDailyMean(Evap,nbBandByDay,typeData)
    #Evap = utils.convertMToMm(Evap)
    
    
    """ Precipitation """
    #NOT NEEDED FOR ETO BUT Exported
    
    utils.checkForTimeValidity(time)
    utils.checkForStepValidity(step,typeData)
    codePrecipitation= [228]
    precipitationFile = oFolder+"/228"+'_'+startDate.strftime('%Y%m%d')+'_'+endDate.strftime('%Y%m%d')+'.nc'

    struct=utils.create_request_sfc(startDate, endDate, time, step, grid, extendArea, codePrecipitation, precipitationFile)
    server.retrieve(struct)
    precipitation = utils.convertNETCDFtoDicArray(precipitationFile)
    precipitation=utils.computeDailyAccumulation(precipitation,nbBandByDay,typeData)
    
    
    """ Grid of latitude [0],longitude[1] in WGS84"""
    geoTransform=utils.getGeoTransform(RayFile)
    shape=Ray[0].shape
    latlon = utils.getCentroidLatFromArray(shape,geoTransform,grid)

    """ --------------------- Compute ET0---------------------- """
    
    ET0_0={}
    ET0_1={}
    ET0_2={}
    DoyList=[]
    DateList=[]
    
    for i in range(0,int(nbDays)):
        #jour Julien
        J = utils.doy(startDate,i)
        dateEnCours=startDate+ timedelta(days=i)
        DateList.append(dateEnCours)
        DoyList.append(J)
        Hmax[i] = np.where(Hmax[i]>100,100,Hmax[i])
        
        # --- Constants ---# 
        #Solar constant
        Gsc = 0.0820 # [MJ.m-2.min-1]
        #Albedo - grass reference crop
        a = 0.23
        #Ratio of molecular weight of water vapor/dry air
        epsilon=0.622 
        #Latente heat of vaporisation
        Lv=2.45 # [MJ.kg-1]
        # Specific heat at constant pressure
        Cp= 1.013e-3; # [MJ.kg-1.°C-1]
        # Stefan-Boltzmann constant [MJ.K-4.m-2.day-1]
        StefBoltz=4.903e-9; #FAO
        
        # --- Equations ---# 
        # Psychometric constant [kPa.°C-1]
        cte_psy = (Cp*pressureMean[i])/(epsilon*Lv) # Equation 8 Chap 3 FAO
        #Mean sturation vapor presure [kPa]
        #es = (utils.esat(pressureMean[i],Tmax[i]) + utils.esat(pressureMean[i],Tmin[i]))/2;    #Equation 12 Chap 3
        es = (utils.eocalc(Tmax[i]-273.16)+utils.eocalc(Tmin[i]-273.16))/2    #Equation 12 Chap 3
        # Slope of saturation vapour pressure curve at air temperature [kPa.°C-1]
        delta = utils.delta_calc(Tmean[i]);                    # Equation 13 Chap 3
        # Actual vapour pressure derived from relative humidity [kPa]
        #ea = (utils.esat(pressureMean[i]/100,Tmax[i]-273.16)*(Hmax[i]/100) + utils.esat(pressureMean[i]/100,Tmin[i]-273.16)*(Hmin[i]/100))/2;      # Equation 17 Chap 3
        ea = (utils.eocalc(Tmax[i]-273.16)*(Hmax[i]/100)+utils.eocalc(Tmin[i]-273.16)*(Hmin[i]/100))/2
        # Conversion of latitude from degrees to radians
        phi = (np.pi/180)* latlon[1];     
        # Relative distance Earth-Sun
        dr = 1 + 0.033*math.cos(2*math.pi*J/365);         # Equation 23 Chap 3
        # Solar declination
        d = 0.4093*math.sin(2*math.pi*J/365 - 1.39);      # Equation 24 Chap 3
        # sunset hour angle
        ws = np.arccos(-np.tan(phi)*math.tan(d));                # Equation 25 Chap 3
        
        """Classical calculation FAO """
        
        # Extraterestrial radiation for daily periods
        Ra = (24.*60/np.pi)*Gsc*dr*(ws*np.sin(phi)*np.sin(d) + np.cos(phi)*np.cos(d)*np.sin(ws))    # Equation 21 Chap 3
        # Clear sky solar radiation [MJ.m-2.day-1]
        Rso = (0.75 + 2e-5*Geo[i])*Ra;                 # Equation 37 Chap 3
        # Net solar radiation [MJ.m-2.day-1]
        Rns = (1 - a)*RayDownShort[i];                          # Equation 38 Chap 3
        #
        f=(1.35*(np.fmin(RayDownShort[i]/Rso,1)) - 0.35);
        # Net longwave radiation [MJ.m-2.day-1]
        Rnl = StefBoltz*((Tmax[i]**4 + Tmin[i]**4)/2)*(0.34 - 0.14*np.sqrt(ea))*f; # Equation 39 Chap 3
        # Net Radiation [MJ.m-2.day-1]
        Rn =Rns - Rnl;                              # Equation 40 Chap 3
        G = 0;                                      # Equation 42 Chap 3
        ET0_0[i] = ( 0.408*delta*( Rn-G )+ cte_psy*( 900/(Tmean[i] + 273) )*(es - ea)*vent[i] )/( delta + cte_psy*(1 + 0.34*vent[i]) );  # Equation 6 Chap 4
        
        
        """ Considering product 176 = RN these equations are not needed """
        Rn = Ray[i]
        # Soil heat flux at daily scale
        G = 0;                                      # Equation 42 Chap 3
        ET0_1[i] = ( 0.408*delta*( Rn-G )+ cte_psy*( 900/(Tmean[i] + 273) )*(es - ea)*vent[i] )/( delta + cte_psy*(1 + 0.34*vent[i]) );  # Equation 6 Chap 4
    
        """ Considering product 176 Evaporation """
        ET0_2[i] = Evap[i]
    
    if typeOutput=='RasterFile':
        #On ecrit le fichier ET0 
        geoTransform=utils.getGeoTransform(RayFile)
        shape=Ray[0].shape
        utils.writeTiffFromDicoArray(ET0_0,oFolder+"/tmp.tif",shape,geoTransform)
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/ET0.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/ET0.tif")

        #On écrit le fichier Precipitation
        geoTransform=utils.getGeoTransform(precipitationFile)
        shape=precipitation[0].shape
        utils.writeTiffFromDicoArray(precipitation,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/precipitationAcc.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/precipitationAcc.tif")
        
    else:
        #On ecrit le fichier au format Txt
        proj=utils.getProj(pathToShapefile)
        utils.WriteTxtFileForEachPixel(oFolder,ET0_0,ET0_1,ET0_2,DateList,DoyList,Ray,RayDownShort,RayDownLong,Tmean,Tmax,Tmin,Hmean,Hmax,Hmin,vent,precipitation,pressureMean,Geo,latlon,proj)
        utils.WritePointList(oFolder,latlon,proj)
    
    """ ------------------------------------------- """
    if(temporaryFile):
        #On ecrit le fichier latlon 
        geoTransform=utils.getGeoTransform(GeoFile)
        shape=Geo[0].shape
        utils.writeTiffFromDicoArray(Geo,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/altitude.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/altitude.tif")
    
        #On ecrit le fichier latlon 
        geoTransform=utils.getGeoTransform(RayFile)
        shape=Ray[0].shape
        utils.writeTiffFromDicoArray(latlon,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/latLon.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/latLon.tif")
        
        
        #On ecrit le fichier vent --> a enlever
        geoTransform=utils.getGeoTransform(ventFile[-1])
        shape=vent[0].shape
        utils.writeTiffFromDicoArray(vent,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/ventMean.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/ventMean.tif")
        
        #On ecrit le fichier Rhmin
        geoTransform=utils.getGeoTransform(pressureFile)
        shape=pressureMean[0].shape
        utils.writeTiffFromDicoArray(pressureMean,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/pressureMean.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/pressureMean.tif")
        
        #On ecrit le fichier Rhmax
        geoTransform=utils.getGeoTransform(pressureFile)
        shape=Hmax[0].shape
        utils.writeTiffFromDicoArray(Hmax,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/humidityMax.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/humidityMax.tif")
        
        #On ecrit le fichier Rhmin
        geoTransform=utils.getGeoTransform(pressureFile)
        shape=Hmin[0].shape
        utils.writeTiffFromDicoArray(Hmin,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/humidityMin.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/humidityMin.tif")
    
        #On ecrit le fichier Tmax
        geoTransform=utils.getGeoTransform(T2mFile)
        shape=Tmax[0].shape
        utils.writeTiffFromDicoArray(Tmax,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/TemperatureMax.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/TemperatureMax.tif")
    
        #On ecrit le fichier Tmin
        geoTransform=utils.getGeoTransform(T2mFile)
        shape=Tmin[0].shape
        utils.writeTiffFromDicoArray(Tmin,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/TemperatureMin.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/TemperatureMin.tif")
        
        #On ecrit le fichier Rayonnement
        geoTransform=utils.getGeoTransform(RayFile)
        shape=Ray[0].shape
        utils.writeTiffFromDicoArray(Ray,oFolder+"/tmp.tif",shape,geoTransform)
    
        if 'pathToShapefile' in locals():
            utils.reprojRaster(oFolder+"/tmp.tif", oFolder+"/RayonnementMean.tif",shape, pathToShapefile)
            os.remove(oFolder+"/tmp.tif")
        else : 
            utils.moveFile(oFolder+"/tmp.tif", oFolder+"/RayonnementMean.tif")
    
    #on supprime les fichier intermédiare !
    os.remove(pressureFile)
    os.remove(T2mFile)
    os.remove(DewPFile)
    os.remove(RayFile)
    os.remove(GeoFile)
    for i in ventFile:
        os.remove(i)
    os.remove(precipitationFile)
    os.remove(EvapFile)
    os.remove(RayFileDownLong)
    os.remove(RayFileDownShort)
#!/usr/bin/env python
"""
Download ERA-Interim example dataset using the ECMWF Python API
(requires API key).

See https://software.ecmwf.int/wiki/display/WEBAPI/Access+ECMWF+Public+Datasets

"""
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()

server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date":
    "19790101/19790201/19790301/19790401/19790501/19790601/19790701/19790801/19790901/19791001/19791101/19791201/19800101/19800201/19800301/19800401/19800501/19800601/19800701/19800801/19800901/19801001/19801101/19801201/19810101/19810201/19810301/19810401/19810501/19810601/19810701/19810801/19810901/19811001/19811101/19811201/19820101/19820201/19820301/19820401/19820501/19820601/19820701/19820801/19820901/19821001/19821101/19821201/19830101/19830201/19830301/19830401/19830501/19830601/19830701/19830801/19830901/19831001/19831101/19831201/19840101/19840201/19840301/19840401/19840501/19840601/19840701/19840801/19840901/19841001/19841101/19841201/19850101/19850201/19850301/19850401/19850501/19850601/19850701/19850801/19850901/19851001/19851101/19851201/19860101/19860201/19860301/19860401/19860501/19860601/19860701/19860801/19860901/19861001/19861101/19861201/19870101/19870201/19870301/19870401/19870501/19870601/19870701/19870801/19870901/19871001/19871101/19871201/19880101/19880201/19880301/19880401/19880501/19880601/19880701/19880801/19880901/19881001/19881101/19881201/19890101/19890201/19890301/19890401/19890501/19890601/19890701/19890801/19890901/19891001/19891101/19891201/19900101/19900201/19900301/19900401/19900501/19900601/19900701/19900801/19900901/19901001/19901101/19901201/19910101/19910201/19910301/19910401/19910501/19910601/19910701/19910801/19910901/19911001/19911101/19911201/19920101/19920201/19920301/19920401/19920501/19920601/19920701/19920801/19920901/19921001/19921101/19921201/19930101/19930201/19930301/19930401/19930501/19930601/19930701/19930801/19930901/19931001/19931101/19931201/19940101/19940201/19940301/19940401/19940501/19940601/19940701/19940801/19940901/19941001/19941101/19941201/19950101/19950201/19950301/19950401/19950501/19950601/19950701/19950801/19950901/19951001/19951101/19951201/19960101/19960201/19960301/19960401/19960501/19960601/19960701/19960801/19960901/19961001/19961101/19961201/19970101/19970201/19970301/19970401/19970501/19970601/19970701/19970801/19970901/19971001/19971101/19971201/19980101/19980201/19980301/19980401/19980501/19980601/19980701/19980801/19980901/19981001/19981101/19981201/19990101/19990201/19990301/19990401/19990501/19990601/19990701/19990801/19990901/19991001/19991101/19991201/20000101/20000201/20000301/20000401/20000501/20000601/20000701/20000801/20000901/20001001/20001101/20001201/20010101/20010201/20010301/20010401/20010501/20010601/20010701/20010801/20010901/20011001/20011101/20011201/20020101/20020201/20020301/20020401/20020501/20020601/20020701/20020801/20020901/20021001/20021101/20021201/20030101/20030201/20030301/20030401/20030501/20030601/20030701/20030801/20030901/20031001/20031101/20031201/20040101/20040201/20040301/20040401/20040501/20040601/20040701/20040801/20040901/20041001/20041101/20041201/20050101/20050201/20050301/20050401/20050501/20050601/20050701/20050801/20050901/20051001/20051101/20051201/20060101/20060201/20060301/20060401/20060501/20060601/20060701/20060801/20060901/20061001/20061101/20061201/20070101/20070201/20070301/20070401/20070501/20070601/20070701/20070801/20070901/20071001/20071101/20071201/20080101/20080201/20080301/20080401/20080501/20080601/20080701/20080801/20080901/20081001/20081101/20081201/20090101/20090201/20090301/20090401/20090501/20090601/20090701/20090801/20090901/20091001/20091101/20091201/20100101/20100201/20100301/20100401/20100501/20100601/20100701/20100801/20100901/20101001/20101101/20101201/20110101/20110201/20110301/20110401/20110501/20110601/20110701/20110801/20110901/20111001/20111101/20111201/20120101/20120201/20120301/20120401/20120501/20120601/20120701/20120801/20120901/20121001/20121101/20121201/20130101/20130201/20130301/20130401/20130501/20130601/20130701/20130801/20130901/20131001/20131101/20131201/20140101/20140201/20140301/20140401/20140501/20140601/20140701/20140801/20140901/20141001/20141101/20141201/20150101/20150201/20150301/20150401/20150501/20150601/20150701/20150801/20150901/20151001/20151101/20151201/20160101/20160201/20160301/20160401/20160501/20160601/20160701/20160801/20160901/20161001/20161101/20161201/20170101",
    "expver": "1",
    "grid": "0.75/0.75",
    "levtype": "sfc",
    "param": "134.128/165.128/166.128/167.128",
    "stream": "moda",
    "type": "an",
    'format': "netcdf",
    'target': "ERA-Interim-MonthlyAvg-TUVP.nc"
})
Exemple #39
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()
server.retrieve({
   	 "class": "ei",
   	 "dataset": "interim",
   	 "date": "2015-01-01",
   	 "expver": "1",
   	 "levelist": "46/to/60",
   	 "levtype": "sfc",
   	 "param": "134.128/167.128",
   	 "step": "0",
   	 "stream": "oper",
   	 "target": "20150101_sfc.nc",
	 "format": "netcdf",
   	 "time": "00/06/12/18",
   	 "type": "an",
	 "area": "75/-20/30/70",
	 "grid": "0.36/0.36",
})
Exemple #40
0
#!/usr/bin/env python26
from ecmwfapi import ECMWFDataServer
import os


def leap_year(year):
    if (year % 400 == 0):
        x = 1
    elif (year % 4 == 0):
        x = 1
    else:
        x = 0
    return x


server = ECMWFDataServer()

mon_name = [
    '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'
]
ord_mon = [
    '31', '28', '31', '30', '31', '30', '31', '31', '30', '31', '30', '31'
]
leap_mon = [
    '31', '29', '31', '30', '31', '30', '31', '31', '30', '31', '30', '31'
]

for year in range(1989, 2014):
    for mon in range(1, 13):
        # if (year<2008 and mon<10):
        #  continue
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "2006-06-01/to/2010-09-30",
    "expver": "1",
    "grid": "0.75/0.75",
    "levtype": "sfc",
    "param": "59.128/134.128/143.128/146.128/147.128/151.128/159.128/165.128/166.128/167.128/168.128/182.128/201.128/202.128/228.128/235.128/244.128",
    "step": "6",
    "stream": "oper",
    "time": "12:00:00",
    "type": "fc",
    "area": "22/-12/8/12",
    "format": "netcdf",
    "target": "/localscratch/wllf030/cornkle/ERA-I/daily_2006-2010_12UTCsrfc_forecast.nc"
})
# convert the args to useful info
lonmin = args.bound[1]
lonmax = args.bound[3]
latmin = args.bound[2]
latmax = args.bound[0]
outfile = args.outfile[0]
syear = args.syear[0]
eyear = args.eyear[0]
grid = args.grid[0]

# load module
from ecmwfapi import ECMWFDataServer

# run
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "%d-01-01/to/%d-12-31" % (syear, eyear),
    "expver": "1",
    "grid": "%f/%f" % (grid, grid),
    "levtype": "sfc",
    "param": "71.162/72.162",
    "step": "0",
    "stream": "oper",
    "time": "00:00:00/06:00:00/12:00:00/18:00:00",
    "type": "an",
    "format": "netcdf",  # delete this if for some reason you want GRIB...
    "area":
    "%f/%f/%f/%f" % (latmax, lonmin, latmin, lonmax),  # NORTH/WEST/SOUTH/EAST
Exemple #43
0
    print " python get_erai.py <country>"
    print "where <country> can be;"
    print " Australia"
    print " USA"
    sys.exit

if sys.argv[1].lower()=="australia":
    erai_info["area"] = "-10/110/-45/155"
    target_directory = "/mnt/OzFlux/ERAI/"
    start_year = 2014
    end_year = 2016
elif sys.argv[1].lower()=="usa":
    erai_info["area"] = "70/229.5/30/300"
    target_directory = "/mnt/AmeriFlux/ERAI/"
    start_year = 1991
    end_year = 2016
else:
    print "Unrecognised country option entered on command line"
    print "Valid country options are:"
    print " australia"
    print " usa"
    sys.exit()

server = ECMWFDataServer()
year_list = range(start_year,end_year,1)
for year in year_list:
    print " Processing year: ",str(year)
    erai_info["date"] = str(year)+"-01-01/to/"+str(year+1)+"-01-01"
    erai_info["target"] = target_directory+"ERAI_"+str(year)+".nc"
    server.retrieve(erai_info)
Exemple #44
0
                'significant_height_of_combined_wind_waves_and_swell'
                ],
            'area': [lat_st, lon_st, lat_en, lon_en],
            # 'year': '2021',
            # 'month': ['01', '02'],
            # 'day' : [f'{i:02d}' for i in range(1, 32)],
        },
        Path(dir_save) / file_nc_name
    )




if False: # old
    from ecmwfapi import ECMWFDataServer
    server = ECMWFDataServer()
    l.info('part 1')
    server.retrieve({**common, **{
        'step': '0',
        'time': '00:00/06:00/12:00/18:00',
        'type': 'an',
        'target': file_date_prefix + 'analysis.nc',
        }})
    l.info('part 2')
    server.retrieve({**common, **{
        'step': '3/9',  # '3/6/9/12'
        'time': '00:00/12:00',
        'type': 'fc',
        'target': file_date_prefix + 'forecast.nc',
        }})
Exemple #45
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
import datetime

day = datetime.datetime.today().day
month = datetime.datetime.today().month

if day < 10:
    day = "0" + str(day)
else:
    day = str(day)

if month < 10:
    month = "0" + str(month)
else:
    month = str(month)

server = ECMWFDataServer()
server.retrieve({
    "class": "e2",
    "dataset": "era20c",
    "date": "2010-" + month + "-" + day,
    "expver": "1",
    "levtype": "sfc",
    "param": "15.128/134.128/137.128/167.128",
    "stream": "oper",
    "time": "12:00:00",
    "type": "an",
    "target": "mars.grib",
})
Exemple #46
0
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 28 16:54:48 2017

@author: sebastian
"""

#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
    
server = ECMWFDataServer()
    
server.retrieve({
    'stream'    : "oper",
    'levtype'   : "sfc",
    'param'     : "165.128/166.128",
    'dataset'   : "interim",
    'step'      : "0",
    'grid'      : "0.5/0.5",
    'time'      : "12",
    'date'      : "2015-01-01/to/2016-12-31",
    'type'      : "an",
    'class'     : "ei",
    'area'      : "50/8/49/9",
    'format'    : "netcdf",
    'target'    : "/home/sebastian/analysis_raw.nc"
 })
#!/usr/bin/python
from ecmwfapi import ECMWFDataServer
import sys

server = ECMWFDataServer()

idate = sys.argv[1]
if len(idate) != 10:
    raise ValueError, "get_erainterim.py idate grid"
grid = sys.argv[2]
fname = sys.argv[3]

server.retrieve({
        'dataset' : "interim",
        'levtype' : 'pl',
        'date'    : idate[:8],
        'time'    : idate[-2:],
        'levelist': "1000/975/950/925/900/875/850/825/800/775/750/700/650/600/550/500/450/400/350/300/250/225/200/175/150/125/100/70/50/30/20/10/7/5/3/2/1",
        'type'    : "an",
        'param'   : 'z/u/v/t/q/o3/clwc/ciwc',
        'grid'    : '{0}/{0}'.format(grid),
        'area'    : "90/0/-90/360",
        'target'  : fname
        })

Exemple #48
0
#!/usr/bin/env python26
from ecmwfapi import ECMWFDataServer
import os


def leap_year(year):
    if (year % 400 == 0):
        x = 1
    elif (year % 4 == 0):
        x = 1
    else:
        x = 0
    return x


server = ECMWFDataServer()

variables = 'Soil temperature level 1'
out_name = 'ST1'
mon_name = [
    '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'
]
ord_mon = [
    '31', '28', '31', '30', '31', '30', '31', '31', '30', '31', '30', '31'
]
leap_mon = [
    '31', '29', '31', '30', '31', '30', '31', '31', '30', '31', '30', '31'
]

for year in range(2014, 2015):
    for mon in range(1, 6):
from ecmwfapi import ECMWFDataServer
import numpy as np
import pdb
y = np.arange(1979,2018,1)
stri = ''

for yy in y:

    stri = stri+"{0}0101/{0}0201/{0}0301/{0}0401/{0}0501/{0}0601/{0}0701/{0}0801/{0}0901/{0}1001/{0}1101/{0}1201/".format(yy)

stri = stri[0:-1]

server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": stri,
    "expver": "1",
    "grid": "0.75/0.75",
    "time": "00:00:00/06:00:00/12:00:00/18:00:00",
    "levtype": "sfc",
    "param": "81.162/134.128/137.128/164.128/165.128/166.128/167.128/168.128/207.128",
    "stream": "mnth",
    "type": "an",
    "step" : 0,
    "area": "34/-23/-42/55",
    "format": "netcdf",
    "target": "/localscratch/wllf030/cornkle/ERA-I/monthly/monthly_synop_1979-2017_srfc_full.nc"
})

#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ea",
    "dataset": "era5",
    "date": "2008-01-01/to/2008-01-31",
    "expver": "1",
    "levtype": "sfc",
    "param": "165.128",
    "grid": "0.25/0.25",
    "area": "72.5/-22/26.5/45.5",
    "stream": "oper",
    "time":
    "00:00:00/01:00:00/02:00:00/03:00:00/04:00:00/05:00:00/06:00:00/07:00:00/08:00:00/09:00:00/10:00:00/11:00:00/12:00:00/13:00:00/14:00:00/15:00:00/16:00:00/17:00:00/18:00:00/19:00:00/20:00:00/21:00:00/22:00:00/23:00:00",
    "type": "an",
    "format": "netcdf",
    "target": "ERA5_10mUwindanalysis_2008.nc",
})
from ecmwfapi import ECMWFDataServer
import xarray as xr
import numpy as np

file = "/localscratch/wllf030/cornkle/ERA-I/daily_2004-2014_pl.nc"
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "1983-03-01/to/2014-09-30",
    "expver": "1",
    "grid": "0.75/0.75",
    "levtype": "pl",
    "levelist": "600/650/700/850/925/950",
    "param": "60.128/130.128/131.128/132.128/133.128/135.128/155.128/157.128",
    "step": "0",
    "stream": "oper",
    "time": "12:00:00",
    "type": "an",
    "area": "22/-15/3/15",
    "format": "netcdf",
    "target": file
})


Exemple #52
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer

import sys

str=sys.argv[1]

server = ECMWFDataServer()
server.retrieve({
   	 "class": "ei",
   	 "dataset": "interim",
   	 "date": str,
   	 "expver": "1",
   	 "levelist": "46/to/60",
   	 "levtype": "ml",
   	 "param": "130.128/131.128/132.128/138.128",
   	 "step": "0",
   	 "stream": "oper",
   	 "target": "2006/"+str.replace('-','')+"_ml.nc",
	 "format": "netcdf",
   	 "time": "00/06/12/18",
   	 "type": "an",
	 "area": "75/-20/30/70",
	 "grid": "0.36/0.36",
})
Exemple #53
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "1989-01-01",
    "expver": "1",
    "grid": "0.25/0.25",
    "levtype": "sfc",
    "param": "129.128",
    "step": "0",
    "stream": "oper",
    "target": "China.nc",
    "time": "12",
	"area": "60/70/10/150",
    "type": "an",
	"format": "netcdf",
	})

Exemple #54
0
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()

server.retrieve({
    'use':
    "infrequent",
    'stream':
    "oper",
    'dataset':
    "interim",
    'step':
    "0",
    'levtype':
    "pl",
    'date':
    "2008-01-03/to/2008-01-03",
    'time':
    "00/06/12/18",
    'levelist':
    "750/775/800/825/850/875/900/925/950/1000",
    'type':
    "an",
    'param':
    "129.128",
    'area':
    "90/00/-90/360",
    'grid':
    "0.125/0.125",
    'target':
    "/home/j_timmermans/Simulations/Matlab/SEBS/SEBS4SIGMA/Data/TMP/ECMWF/Z_profile.grib"
Exemple #55
0
#!/usr/bin/env python
from ecmwfapi import ECMWFDataServer
server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "date": "2015-01-01",
    "expver": "1",
    "levelist": "46/to/60",
    "levtype": "ml",
    "param": "131.128/132.128",
    "step": "0",
    "stream": "oper",
    "target": "20150101_U_V_ml.nc",
    "time": "00/06/12/18",
    "type": "an",
	"area": "60/70/10/150",
	"format": "netcdf",
	"grid": "0.36/0.36"
	})
Exemple #56
0
from multiprocessing import Pool
from math import ceil, floor
from ecmwfapi import ECMWFDataServer
import pandas as pd
from datetime import date
import helpers
import os.path


def parsedate(ts):
    return date.fromtimestamp(int(ts))


server = ECMWFDataServer()
data = pd.read_csv('data/avalanches.csv',
                   parse_dates=['date_posix_ts'],
                   date_parser=parsedate)
params = []
for i in range(0, data.shape[0]):
    [lat, lon] = helpers.CH1903toWGS1984(data.loc[i]['starting_zone_X'],
                                         data.loc[i]['starting_zone_Y'])
    area = str(ceil(lat * 1000) / 1000) + '/' + str(
        floor(lon * 1000) / 1000) + '/' + str(
            floor(lat * 1000) / 1000) + '/' + str(ceil(lon * 1000) / 1000)
    target = './data/grib/avalanche' + str(i) + '.grb'
    file_exists = os.path.isfile('./data/grib/avalanche' + str(i) + '.grb')
    for j in range(0, 11):
        file_exists |= os.path.isfile('./data/grib_batch_' + str(j) +
                                      '/avalanche' + str(i) + '.grb')
    if (not file_exists):
        params.append({'area': area, 'target': target})
Exemple #57
0
def ECMWF_retrieve(Hind_start, Hind_end, area_range, area_name):
    """
	Function:  ECMWF_retrieve(Hind_start, Hind_end, save_grib, save_dat)

		List of Inputs:
			Hind_start [str]: Download the ECMWF data: start date, e.g. '1999-01-01'
			Hind_end   [str]: Download the ECMWF data: end   date, e.g. '1999-01-31'
			area_range [str]: Area of MetOcean to be downloaded, "N/W/S/E, e.g. "90/-80/0/10" means North Atlantic
			area_name  [str]: Give a name for your downloading area

			save_grid  [str]: Folder to save the grid file 
			save_dat   [str]: Folder to save the converted ASCII file


	Here we need to define the following parameters:
		1, The starting day of the Hindcast data downloading
		2, The ending day of the Hindcast data downloading 
		3, The save folder of the Grib2 file from ECMWF
		4, The output folder for ASCII file folder
	All the input will be read from a job file.
	"""
    # Construct the input and output parameters
    dateinterval = Hind_start + '/to/' + Hind_end
    AREA = area_range
    wave_grib_name = 'wave_' + area_name + Hind_start + '_' + Hind_end + '.grib'
    wind_grib_name = 'wind_' + area_name + Hind_start + '_' + Hind_end + '.grib'

    #  Define the sever and download the data
    server = ECMWFDataServer()

    #  Download the wave data, which will include significant wave height Hs, and wave period Tp
    ##  01, Define the save folder and file for downloading
    wave_folder = '/Users/wengang/Documents/Python/Spatio_temporal_model/Downloaded_data/GribWaveData/'
    wind_folder = '/Users/wengang/Documents/Python/Spatio_temporal_model/Downloaded_data/GribWindData/'

    ##  02, Define the file name and its associated folder
    filename_wave = wave_folder + wave_grib_name
    filename_wind = wind_folder + wind_grib_name

    ##  03, Download the wave data, i.e. significant wave height, mean wave period, and wave direction
    params = datasets['significant_wave_height']['param'] + '/' + datasets[
        'mean_wave_period']['param'] + '/' + datasets['mean_wave_dir']['param']
    stream = datasets['significant_wave_height']['stream']
    server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": dateinterval,  #"2014-07-01/to/2014-07-31",
        "expver": "1",
        "grid": "0.75/0.75",
        "repres": "ll",
        "levtype": "sfc",
        "param": params,
        "stream": stream,
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "area": AREA,
        "target": filename_wave,
    })
    print "wind data is downloaded:" + wave_grib_name

    ##  04,Download the secondary data, such as the sea temperature, wind_U, wind_V
    params = datasets['u_wind']['param'] + '/' + datasets['v_wind']['param']
    #params =  datasets['sea_surface_temp']['param'] + '/' +  datasets['u_wind']['param'] + '/' +  datasets['v_wind']['param']

    stream = datasets['sea_surface_temp']['stream']
    server.retrieve({
        "class": "ei",
        "dataset": "interim",
        "date": dateinterval,  #"2014-07-01/to/2014-07-31",
        "expver": "1",
        "grid": "0.75/0.75",
        "repres": "ll",
        "levtype": "sfc",
        "param": params,
        "stream": stream,
        "time": "00:00:00/06:00:00/12:00:00/18:00:00",
        "type": "an",
        "area": AREA,
        "target": filename_wind,
    })
    print "wind data is downloaded:" + wind_grib_name
Exemple #58
0
from ecmwfapi import ECMWFDataServer
import numpy as np

y = np.arange(1979, 2018, 1)
stri = ''

for yy in y:

    stri = stri + "{0}0101/{0}0201/{0}0301/{0}0401/{0}0501/{0}0601/{0}0701/{0}0801/{0}0901/{0}1001/{0}1101/{0}1201/".format(
        yy)

stri = stri[0:-1]

server = ECMWFDataServer()
server.retrieve({
    "class":
    "ei",
    "dataset":
    "interim",
    "date":
    stri,
    "expver":
    "1",
    "grid":
    "0.75/0.75",
    "levtype":
    "pl",
    "param":
    "130.128/131.128/132.128/133.128/135.128/157.128",  #"60.128/129.128/130.128/131.128/132.128/133.128/135.128/155.128/157.128",
    "levelist":
    "200/250/300/350/400/450/500/550/600/650/700/750/800/850/900/925/950",
def get(year, month, outdir='.'):
    """
    Get the ECMWF ERA-20C FVCOM forcing data for a given year.

    Gets the following variables at the surface and model levels.

    Model:
    ML_PARAMS=z/q/u/v/t/d/vo
    LEVELS=1000/925/850/700/500/300/250/200/150/100/70/50/30/20/10

    Surface:
    SFC_PARAMS=sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4

    Parameters
    ----------
    year : int
        Year for which to download data.
    outdir : str, optional
        Output directory for the files. Defaults to the current directory.

    Returns
    -------
    files : tuple
        File paths for the analysis and forecast data (in that order).

    """

    server = ECMWFDataServer()

    files = []

    # Buffer by four days from the current month.
    date = datetime.datetime(year, month, 01, 00, 00, 00)
    dom = calendar.monthrange(year, month)[-1]
    start_date = date - datetime.timedelta(days=4)
    end_date = date + datetime.timedelta(dom + 4)
    s_start = start_date.strftime('%Y-%m-%d')
    s_end = end_date.strftime('%Y-%m-%d')

    prefixes = ('{:04d}-{:02d}_ML.grb2', '{:04d}-{:02d}_SFC.grib')
    files = (os.path.join(outdir, prefixes[0].format(year, month)),
             os.path.join(outdir, prefixes[1].format(year, month)))

    if not os.path.exists(files[0]):
        try:
            # Pressure levels data
            server.retrieve({
                "class": "e2",
                "dataset": "era20c",
                "date": "{}/to/{}".format(s_start, s_end),
                "expver": "1",
                "levelist": "1/2/3/5/7/10/20/30/50/70/100/125/150/175/200/225/250/300/350/400/450/500/550/600/650/700/750/775/800/825/850/875/900/925/950/975/1000",
                "levtype": "pl",
                "param": "129.128/130.128/131.128/132.128/133.128/138.128/155.128",
                "stream": "oper",
                "target": files[0],
                "time": "00/03/06/09/12/15/18/21",
                "type": "an",
            })
        except Exception:
            os.remove(files[1])

    if not os.path.exists(files[1]):
        try:
            # Surface data
            server.retrieve({
                "class": "e2",
                "dataset": "era20c",
                "date": "{}/to/{}".format(s_start, s_end),
                "expver": "1",
                "levtype": "sfc",
                "param": "31.128/34.128/39.128/40.128/41.128/42.128/134.128/139.128/"
                         "141.128/151.128/165.128/166.128/167.128/168.128/170.128/"
                         "183.128/235.128/236.128",
                "stream": "oper",
                "target": files[1],
                "time": "00/06/12/18",
                "type": "an",
            })
        except Exception:
            os.remove(files[1])

    return files
Exemple #60
0
from ecmwfapi import ECMWFDataServer

server = ECMWFDataServer()
server.retrieve({
    "class": "ei",
    "dataset": "interim",
    "expver": "1",
    "stream": "oper",
    "type": "fc",
    "levtype": "sfc",
    "param": "167.128",
    "date": "2017-08-01/to/2017-08-03",
    "time": "00:00:00",
    "step": "3",
    "grid": "0.75/0.75",
    "area": "75/-20/10/60",
    "format": "netcdf",
    "target": "test.nc"
})