Example #1
0
 def test_function_parameter_dataset_lons(self):
     rcmed.urllib2.urlopen = self.return_text
     self.assert1DArraysEqual(
         rcmed.parameter_dataset(self.dataset_id, self.parameter_id,
                                 self.min_lat, self.max_lat, self.min_lon,
                                 self.max_lon, self.start_time,
                                 self.end_time).lons, self.lons)
def _load_dataset(dataset_config_data):
    """"""
    if dataset_config_data['data_source'] == 'local':
        if dataset_config_data['file_count'] > 1:
            logger.error(
                'Multi-file datasets are currently not supported. Cancelling load '
                'of the following dataset: {}'.format(dataset_config_data))
            return None

        return local.load_file(dataset_config_data['path'],
                               dataset_config_data['variable'],
                               **dataset_config_data.get('optional_args', {}))
    elif dataset_config_data['data_source'] == 'rcmed':
        return rcmed.parameter_dataset(
            dataset_config_data['dataset_id'],
            dataset_config_data['parameter_id'],
            dataset_config_data['min_lat'], dataset_config_data['max_lat'],
            dataset_config_data['min_lon'], dataset_config_data['min_lon'],
            dataset_config_data['start_time'], dataset_config_data['end_time'],
            **dataset_config_data.get('optional_args', {}))
    elif dataset_config_data['data_source'] == 'esgf':
        return esgf.load_dataset(
            dataset_config_data['dataset_id'], dataset_config_data['variable'],
            dataset_config_data['esgf_username'],
            dataset_config_data['esgf_password'],
            **dataset_config_data.get('optional_args', {}))
    elif dataset_config_data['data_source'] == 'dap':
        return dap.load(dataset_config_data['url'],
                        dataset_config_data['variable'],
                        **dataset_config_data('optional_args', {}))
Example #3
0
def _load_dataset(dataset_config_data):
    """"""
    if dataset_config_data['data_source'] == 'local':
        if dataset_config_data['file_count'] > 1:
            logger.error(
                'Multi-file datasets are currently not supported. Cancelling load '
                'of the following dataset: {}'.format(dataset_config_data)
            )
            return None

        return local.load_file(dataset_config_data['path'],
                               dataset_config_data['variable'],
                               **dataset_config_data.get('optional_args', {}))
    elif dataset_config_data['data_source'] == 'rcmed':
        return rcmed.parameter_dataset(dataset_config_data['dataset_id'],
                                       dataset_config_data['parameter_id'],
                                       dataset_config_data['min_lat'],
                                       dataset_config_data['max_lat'],
                                       dataset_config_data['min_lon'],
                                       dataset_config_data['min_lon'],
                                       dataset_config_data['start_time'],
                                       dataset_config_data['end_time'],
                                       **dataset_config_data.get('optional_args', {}))
    elif dataset_config_data['data_source'] == 'esgf':
        return esgf.load_dataset(dataset_config_data['dataset_id'],
                                 dataset_config_data['variable'],
                                 dataset_config_data['esgf_username'],
                                 dataset_config_data['esgf_password'],
                                 **dataset_config_data.get('optional_args', {}))
    elif dataset_config_data['data_source'] == 'dap':
        return dap.load(dataset_config_data['url'],
                        dataset_config_data['variable'],
                        **dataset_config_data('optional_args', {}))
Example #4
0
 def test_function_parameter_dataset_values(self):
     rcmed.urlopen = self.return_text
     self.assert1DArraysEqual(
         rcmed.parameter_dataset(self.dataset_id, self.parameter_id,
                                 self.min_lat, self.max_lat, self.min_lon,
                                 self.max_lon, self.start_time,
                                 self.end_time).values.flatten(),
         self.values.flatten())
Example #5
0
 def test_function_parameter_dataset_lats_monthly(self):
     self.dataset_id = 6
     self.parameter_id = 32
     self.end_time_for_url = "20021031T0000Z"
     rcmed.urlopen = self.return_text
     self.assert1DArraysEqual(
         rcmed.parameter_dataset(self.dataset_id, self.parameter_id,
                                 self.min_lat, self.max_lat, self.min_lon,
                                 self.max_lon, self.start_time,
                                 self.end_time).lats, self.lats)
Example #6
0
 def test_function_parameter_dataset_times(self):
     rcmed.urllib2.urlopen = self.return_text
     self.assert1DArraysEqual(rcmed.parameter_dataset(self.dataset_id,
                                                      self.parameter_id,
                                                      self.min_lat,
                                                      self.max_lat,
                                                      self.min_lon,
                                                      self.max_lon,
                                                      self.start_time,
                                                      self.end_time).times,
                              self.times)
Example #7
0
 def test_function_parameter_dataset_values(self):
     rcmed.urllib2.urlopen = self.return_text
     ds = rcmed.parameter_dataset(self.dataset_id,
                                  self.parameter_id,
                                  self.min_lat,
                                  self.max_lat,
                                  self.min_lon,
                                  self.max_lon,
                                  self.start_time,
                                  self.end_time,
                                  name='foo')
     self.assertEquals(ds.name, 'foo')
Example #8
0
 def test_function_parameter_dataset_name(self):
     rcmed.urllib2.urlopen = self.return_text
     ds = rcmed.parameter_dataset(self.dataset_id,
                                  self.parameter_id,
                                  self.min_lat,
                                  self.max_lat,
                                  self.min_lon,
                                  self.max_lon,
                                  self.start_time,
                                  self.end_time,
                                  name='foo')
     self.assertEquals(ds.name, 'foo')
Example #9
0
def _load_rcmed_dataset_object(dataset_info, eval_bounds):
    ''' Create an ocw.dataset.Dataset object from supplied data.

    :param dataset_info: The necessary data to load a RCMED dataset with
        ocw.data_source.rcmed. Must be of the form:
        {
            'dataset_id': The dataset id to grab from RCMED.
            'parameter_id': The variable id value used by RCMED.
            'name': Optional dataset name
        }
    :type dataset_info: Dictionary

    :param eval_bounds: The time, lat, and lon bounds values for this Evaluation.
        Must be of the form:
        {
            'start_time': request.query.start_time,
            'end_time': request.query.end_time,
            'lat_min': request.query.lat_min,
            'lat_max': request.query.lat_max,
            'lon_min': request.query.lon_min,
            'lon_max': request.query.lon_max
        }
    ;type eval_bounds: Dictionary

    :returns: An ocw.dataset.Dataset object containing the requested information.

    :raises KeyError: If the required keys aren't present in the dataset_info or
        eval_bounds objects.
    '''
    dataset = rcmed.parameter_dataset(int(dataset_info['dataset_id']),
                                      int(dataset_info['parameter_id']),
                                      eval_bounds['lat_min'],
                                      eval_bounds['lat_max'],
                                      eval_bounds['lon_min'],
                                      eval_bounds['lon_max'],
                                      eval_bounds['start_time'],
                                      eval_bounds['end_time'])

    # If a name is passed for the dataset, use it. Otherwise, use the file name.
    if 'name'in dataset_info.keys():
        name = dataset_info['name']
    else:
        for m in rcmed.get_parameters_metadata():
            if m['parameter_id'] == str(dataset_info['parameter_id']):
                name = m['longname']
                break
        else:
            # If we can't find a name for the dataset, default to something...
            name = "RCMED dataset"

    dataset.name = name

    return dataset
Example #10
0
def _load_rcmed_dataset_object(dataset_info, eval_bounds):
    ''' Create an ocw.dataset.Dataset object from supplied data.

    :param dataset_info: The necessary data to load a RCMED dataset with
        ocw.data_source.rcmed. Must be of the form:
        {
            'dataset_id': The dataset id to grab from RCMED.
            'parameter_id': The variable id value used by RCMED.
            'name': Optional dataset name
        }
    :type dataset_info: Dictionary

    :param eval_bounds: The time, lat, and lon bounds values for this Evaluation.
        Must be of the form:
        {
            'start_time': request.query.start_time,
            'end_time': request.query.end_time,
            'lat_min': request.query.lat_min,
            'lat_max': request.query.lat_max,
            'lon_min': request.query.lon_min,
            'lon_max': request.query.lon_max
        }
    ;type eval_bounds: Dictionary

    :returns: An ocw.dataset.Dataset object containing the requested information.

    :raises KeyError: If the required keys aren't present in the dataset_info or
        eval_bounds objects.
    '''
    dataset = rcmed.parameter_dataset(int(dataset_info['dataset_id']),
                                      int(dataset_info['parameter_id']),
                                      eval_bounds['lat_min'],
                                      eval_bounds['lat_max'],
                                      eval_bounds['lon_min'],
                                      eval_bounds['lon_max'],
                                      eval_bounds['start_time'],
                                      eval_bounds['end_time'])

    # If a name is passed for the dataset, use it. Otherwise, use the file name.
    if 'name'in dataset_info.keys():
        name = dataset_info['name']
    else:
        for m in rcmed.get_parameters_metadata():
            if m['parameter_id'] == str(dataset_info['parameter_id']):
                name = m['longname']
                break
        else:
            # If we can't find a name for the dataset, default to something...
            name = "RCMED dataset"

    dataset.name = name

    return dataset
Example #11
0
 def test_function_parameter_dataset_lats_monthly(self):
     self.dataset_id = 6
     self.parameter_id = 32
     self.end_time_for_url = "20021031T0000Z"
     rcmed.urllib2.urlopen = self.return_text
     self.assert1DArraysEqual(rcmed.parameter_dataset(self.dataset_id,
                                                      self.parameter_id,
                                                      self.min_lat,
                                                      self.max_lat,
                                                      self.min_lon,
                                                      self.max_lon,
                                                      self.start_time,
                                                      self.end_time).lats,
                              self.lats)
Example #12
0
    def test_dataset_origin(self):
        rcmed.urllib2.urlopen = self.return_text
        ds = rcmed.parameter_dataset(self.dataset_id,
                                     self.parameter_id,
                                     self.min_lat,
                                     self.max_lat,
                                     self.min_lon,
                                     self.max_lon,
                                     self.start_time,
                                     self.end_time,
                                     name='foo')

        self.assertEquals(ds.origin['source'], 'rcmed')
        self.assertEquals(ds.origin['dataset_id'], self.dataset_id)
        self.assertEquals(ds.origin['parameter_id'], self.parameter_id)
Example #13
0
    def test_dataset_origin(self):
        rcmed.urllib2.urlopen = self.return_text
        ds = rcmed.parameter_dataset(self.dataset_id,
                                     self.parameter_id,
                                     self.min_lat,
                                     self.max_lat,
                                     self.min_lon,
                                     self.max_lon,
                                     self.start_time,
                                     self.end_time,
                                     name='foo')

        self.assertEquals(ds.origin['source'], 'rcmed')
        self.assertEquals(ds.origin['dataset_id'], self.dataset_id)
        self.assertEquals(ds.origin['parameter_id'], self.parameter_id)
import ssl

if hasattr(ssl, '_create_unverified_context'):
    ssl._create_default_https_context = ssl._create_unverified_context

# rectangular boundary
min_lat = 15.75
max_lat = 55.75
min_lon = -125.75
max_lon = -66.75

start_time = datetime(1998, 1, 1)
end_time = datetime(1998, 12, 31)

TRMM_dataset = rcmed.parameter_dataset(3, 36, min_lat, max_lat, min_lon, max_lon,
                                       start_time, end_time)

Cuba_and_Bahamas_bounds = Bounds(
    boundary_type='countries', countries=['Cuba', 'Bahamas'])
# to mask out the data over Mexico and Canada
TRMM_dataset2 = dsp.subset(
    TRMM_dataset, Cuba_and_Bahamas_bounds, extract=False)

plotter.draw_contour_map(ma.mean(TRMM_dataset2.values, axis=0), TRMM_dataset2.lats,
                         TRMM_dataset2.lons, fname='TRMM_without_Cuba_and_Bahamas')

NCA_SW_bounds = Bounds(boundary_type='us_states', us_states=[
                       'CA', 'NV', 'UT', 'AZ', 'NM', 'CO'])
# to mask out the data over Mexico and Canada
TRMM_dataset3 = dsp.subset(TRMM_dataset2, NCA_SW_bounds, extract=True)
Example #15
0
ref_data_info = config['datasets']['reference']
ref_lat_name = None
ref_lon_name = None
if 'latitude_name' in ref_data_info.keys():
    ref_lat_name = ref_data_info['latitude_name']
if 'longitude_name' in ref_data_info.keys():
    ref_lon_name = ref_data_info['longitude_name']
print 'Loading observation dataset:\n',ref_data_info
ref_name = ref_data_info['data_name']
if ref_data_info['data_source'] == 'local':
    ref_dataset = local.load_file(ref_data_info['path'],
                                  ref_data_info['variable'], name=ref_name,
                                  lat_name=ref_lat_name, lon_name=ref_lon_name)
elif ref_data_info['data_source'] == 'rcmed':
      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
                                            ref_data_info['parameter_id'],
                                            min_lat, max_lat, min_lon, max_lon,
                                            start_time, end_time)
elif ref_data_info['data_source'] == 'ESGF':
      username=raw_input('Enter your ESGF OpenID:\n')
      password=raw_input('Enter your ESGF password:\n')
      ds = esgf.load_dataset(dataset_id = ref_data_info['dataset_id'],
                             variable = ref_data_info['variable'],
                             esgf_username=username,
                             esgf_password=password)
      ref_dataset = ds[0]
else:
    print ' '
if temporal_resolution == 'daily' or temporal_resolution == 'monthly':
    ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
if 'multiplying_factor' in ref_data_info.keys():
    ref_dataset.values = ref_dataset.values*ref_data_info['multiplying_factor']
Example #16
0
if not path.exists(FILE_2):
    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)

if not path.exists(FILE_3):
    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
print(
    "Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation"
)
# the dataset_id and the parameter id were determined from
# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX,
                                START, END)
""" Step 3: Processing Datasets so they are the same shape """
print("Processing datasets ...")
CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
print("... on units")
CRU31 = dsp.water_flux_unit_conversion(CRU31)

for member, each_target_dataset in enumerate(target_datasets):
    target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
    target_datasets[member] = dsp.water_flux_unit_conversion(
        target_datasets[member])
    target_datasets[member] = dsp.normalize_dataset_datetimes(
        target_datasets[member], 'monthly')

print("... spatial regridding")
new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)

if not path.exists(FILE_3):
    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)

""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))


""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
# the dataset_id and the parameter id were determined from
# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
CRU31 = rcmed.parameter_dataset(
    10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)


""" Step 3: Processing datasets so they are the same shape ... """
print("Processing datasets so they are the same shape")
CRU31 = dsp.water_flux_unit_conversion(CRU31)
CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')

for member, each_target_dataset in enumerate(target_datasets):
    target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
    target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
                                                             member])
    target_datasets[member] = dsp.normalize_dataset_datetimes(
        target_datasets[member], 'monthly')

print("... spatial regridding")
start_time = max([cru_start, knmi_start])
# Grab the Min End Time
end_time = min([cru_end, knmi_end])
print("Overlap computed to be: %s to %s" % (start_time.strftime("%Y-%m-%d"),
                                            end_time.strftime("%Y-%m-%d")))
print("We are going to grab the first %s year(s) of data" % YEARS)
end_time = datetime.datetime(
    start_time.year + YEARS, start_time.month, start_time.day)
print("Final Overlap is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
                                      end_time.strftime("%Y-%m-%d")))

print("Fetching data from RCMED...")
cru31_dataset = rcmed.parameter_dataset(dataset_id,
                                        parameter_id,
                                        min_lat,
                                        max_lat,
                                        min_lon,
                                        max_lon,
                                        start_time,
                                        end_time)

# Step 3: Resample Datasets so they are the same shape.
print("CRU31_Dataset.values shape: (times, lats, lons) - %s" %
      (cru31_dataset.values.shape,))
print("KNMI_Dataset.values shape: (times, lats, lons) - %s" %
      (knmi_dataset.values.shape,))
print("Our two datasets have a mis-match in time. We will subset on time to %s years\n" % YEARS)

# Create a Bounds object to use for subsetting
new_bounds = Bounds(lat_min=min_lat, lat_max=max_lat, lon_min=min_lon,
                    lon_max=max_lon, start=start_time, end=end_time)
knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
Example #19
0
import ssl

if hasattr(ssl, '_create_unverified_context'):
    ssl._create_default_https_context = ssl._create_unverified_context

# rectangular boundary
min_lat = 15.75
max_lat = 55.75
min_lon = -125.75
max_lon = -66.75

start_time = datetime(1998, 1, 1)
end_time = datetime(1998, 12, 31)

TRMM_dataset = rcmed.parameter_dataset(3, 36, min_lat, max_lat, min_lon,
                                       max_lon, start_time, end_time)

Cuba_and_Bahamas_bounds = Bounds(boundary_type='countries',
                                 countries=['Cuba', 'Bahamas'])
# to mask out the data over Mexico and Canada
TRMM_dataset2 = dsp.subset(TRMM_dataset,
                           Cuba_and_Bahamas_bounds,
                           extract=False)

plotter.draw_contour_map(ma.mean(TRMM_dataset2.values, axis=0),
                         TRMM_dataset2.lats,
                         TRMM_dataset2.lons,
                         fname='TRMM_without_Cuba_and_Bahamas')

NCA_SW_bounds = Bounds(boundary_type='us_states',
                       us_states=['CA', 'NV', 'UT', 'AZ', 'NM', 'CO'])
# Load local knmi model data
knmi_dataset = local.load_file(FILE_1, "tasmax")
knmi_dataset.name = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax"

wrf311_dataset = local.load_file(FILE_2, "tasmax")
wrf311_dataset.name = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax"



""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
metadata = rcmed.get_parameters_metadata()

cru_31 = [m for m in metadata if m['parameter_id'] == "39"][0]

""" The RCMED API uses the following function to query, subset and return the 
raw data from the database:

rcmed.parameter_dataset(dataset_id, parameter_id, min_lat, max_lat, min_lon, 
                        max_lon, start_time, end_time)

The first two required params are in the cru_31 variable we defined earlier
"""
# Must cast to int since the rcmed api requires ints
dataset_id = int(cru_31['dataset_id'])
parameter_id = int(cru_31['parameter_id'])

#  The spatial_boundaries() function returns the spatial extent of the dataset
min_lat, max_lat, min_lon, max_lon = wrf311_dataset.spatial_boundaries()

#  There is a boundry alignment issue with the datasets.  To mitigate this
# Download necessary NetCDF file if not present
if path.exists(MODEL):
    pass
else:
    urllib.urlretrieve(FILE_LEADER + MODEL, MODEL)
""" Step 1: Load Local NetCDF File into OCW Dataset Objects """
print("Loading %s into an OCW Dataset Object" % (MODEL, ))
knmi_dataset = local.load_file(MODEL, "tasmax")
print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" %
      (knmi_dataset.values.shape, ))
""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
metadata = rcmed.get_parameters_metadata()

cru_31 = [m for m in metadata if m['parameter_id'] == "39"][0]
""" The RCMED API uses the following function to query, subset and return the 
raw data from the database:

rcmed.parameter_dataset(dataset_id, parameter_id, min_lat, max_lat, min_lon, 
                        max_lon, start_time, end_time)

The first two required params are in the cru_31 variable we defined earlier
"""
# Must cast to int since the rcmed api requires ints
dataset_id = int(cru_31['dataset_id'])
parameter_id = int(cru_31['parameter_id'])

print("We are going to use the Model to constrain the Spatial Domain")
#  The spatial_boundaries() function returns the spatial extent of the dataset
print(
    "The KNMI_Dataset spatial bounds (min_lat, max_lat, min_lon, max_lon) are: \n"
Example #22
0
min_lon = math.ceil(min_lon)
max_lon = math.floor(max_lon)

print("Calculating the Maximum Overlap in Time for the datasets")

cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
# Set the Time Range to be the year 1989
start_time = datetime.datetime(1989, 1, 1)
end_time = datetime.datetime(1989, 12, 1)

print("Fetching data from RCMED...")
cru31_dataset = rcmed.parameter_dataset(dataset_id,
                                        parameter_id,
                                        min_lat,
                                        max_lat,
                                        min_lon,
                                        max_lon,
                                        start_time,
                                        end_time)

# Step 3: Resample Datasets so they are the same shape.

# Running Temporal Rebin early helps negate the issue of datasets being on different
# days of the month (1st vs. 15th)
print("Temporally Rebinning the Datasets to an Annual Timestep")
# To run annual temporal Rebinning,
knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution='annual')
dataset_start, dataset_end = knmi_dataset.temporal_boundaries()
start_time = max([start_time, dataset_start])
end_time = min([end_time, dataset_end])
Example #23
0
space_info = config['space']
min_lat = space_info['min_lat']
max_lat = space_info['max_lat']
min_lon = space_info['min_lon']
max_lon = space_info['max_lon']
""" Step 1: Load the reference data """
ref_data_info = config['datasets']['reference']
print 'Loading observation dataset:\n', ref_data_info
ref_name = ref_data_info['data_name']
if ref_data_info['data_source'] == 'local':
    ref_dataset = local.load_file(ref_data_info['path'],
                                  ref_data_info['variable'],
                                  name=ref_name)
elif ref_data_info['data_source'] == 'rcmed':
    ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
                                          ref_data_info['parameter_id'],
                                          min_lat, max_lat, min_lon, max_lon,
                                          start_time, end_time)
else:
    print ' '
    # TO DO: support ESGF

ref_dataset = dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
if 'multiplying_factor' in ref_data_info.keys():
    ref_dataset.values = ref_dataset.values * ref_data_info[
        'multiplying_factor']
""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
model_data_info = config['datasets']['targets']
print 'Loading model datasets:\n', model_data_info
if model_data_info['data_source'] == 'local':
    model_datasets, model_names = local.load_multiple_files(
        file_path=model_data_info['path'],
Example #24
0
def run_screen(model_datasets, models_info, observations_info,
               overlap_start_time, overlap_end_time, overlap_min_lat,
               overlap_max_lat, overlap_min_lon, overlap_max_lon,
               temp_grid_setting, spatial_grid_setting, working_directory, plot_title):
     '''Generates screen to show running evaluation process.

     :param model_datasets: list of model dataset objects
     :type model_datasets: list
     :param models_info: list of dictionaries that contain information for each model
     :type models_info: list
     :param observations_info: list of dictionaries that contain information for each observation
     :type observations_info: list
     :param overlap_start_time: overlap start time between model and obs start time
     :type overlap_start_time: datetime
     :param overlap_end_time: overlap end time between model and obs end time
     :type overlap_end_time: float
     :param overlap_min_lat: overlap minimum lat between model and obs minimum lat
     :type overlap_min_lat: float
     :param overlap_max_lat: overlap maximum lat between model and obs maximum lat
     :type overlap_max_lat: float
     :param overlap_min_lon: overlap minimum lon between model and obs minimum lon
     :type overlap_min_lon: float
     :param overlap_max_lon: overlap maximum lon between model and obs maximum lon
     :type overlap_max_lon: float
     :param temp_grid_setting: temporal grid option such as hourly, daily, monthly and annually
     :type temp_grid_setting: string
     :param spatial_grid_setting:
     :type spatial_grid_setting: string
     :param working_directory: path to a directory for storring outputs
     :type working_directory: string
     :param plot_title: Title for plot
     :type plot_title: string
     '''

     option = None
     if option != "0":
          ready_screen("manage_obs_screen")
          y = screen.getmaxyx()[0]
          screen.addstr(2, 2, "Evaluation started....")
          screen.refresh()

          OUTPUT_PLOT = "plot"

          dataset_id = int(observations_info[0]['dataset_id'])       #just accepts one dataset at this time
          parameter_id = int(observations_info[0]['parameter_id'])  #just accepts one dataset at this time

          new_bounds = Bounds(overlap_min_lat, overlap_max_lat, overlap_min_lon, overlap_max_lon, overlap_start_time, overlap_end_time)
          model_dataset = dsp.subset(new_bounds, model_datasets[0])   #just accepts one model at this time

          #Getting bound info of subseted model file to retrive obs data with same bound as subseted model
          new_model_spatial_bounds = model_dataset.spatial_boundaries()
          new_model_temp_bounds = model_dataset.time_range()
          new_min_lat = new_model_spatial_bounds[0]
          new_max_lat = new_model_spatial_bounds[1]
          new_min_lon = new_model_spatial_bounds[2]
          new_max_lon = new_model_spatial_bounds[3]
          new_start_time = new_model_temp_bounds[0]
          new_end_time = new_model_temp_bounds[1]

          screen.addstr(4, 4, "Retrieving data...")
          screen.refresh()

          #Retrieve obs data
          obs_dataset = rcmed.parameter_dataset(
                                        dataset_id,
                                        parameter_id,
                                        new_min_lat,
                                        new_max_lat,
                                        new_min_lon,
                                        new_max_lon,
                                        new_start_time,
                                        new_end_time)
          screen.addstr(4, 4, "--> Data retrieved.")
          screen.refresh()

          screen.addstr(5, 4, "Temporally regridding...")
          screen.refresh()
          if temp_grid_setting.lower() == 'hourly':
               days = 0.5
          elif temp_grid_setting.lower() == 'daily':
               days = 1
          elif temp_grid_setting.lower() == 'monthly':
               days = 31
          else:
               days = 365
          model_dataset = dsp.temporal_rebin(model_dataset, timedelta(days))
          obs_dataset = dsp.temporal_rebin(obs_dataset, timedelta(days))
          screen.addstr(5, 4, "--> Temporally regridded.")
          screen.refresh()

          new_lats = np.arange(new_min_lat, new_max_lat, spatial_grid_setting)
          new_lons = np.arange(new_min_lon, new_max_lon, spatial_grid_setting)

          screen.addstr(6, 4, "Spatially regridding...")
          screen.refresh()
          spatial_gridded_model = dsp.spatial_regrid(model_dataset, new_lats, new_lons)
          spatial_gridded_obs = dsp.spatial_regrid(obs_dataset, new_lats, new_lons)
          screen.addstr(6, 4, "--> Spatially regridded.")
          screen.refresh()

          screen.addstr(7, 4, "Setting up metrics...")
          screen.refresh()
          bias = metrics.Bias()
          bias_evaluation = evaluation.Evaluation(spatial_gridded_model, [spatial_gridded_obs], [bias])
          screen.addstr(7, 4, "--> Metrics setting done.")
          screen.refresh()

          screen.addstr(8, 4, "Running evaluation.....")
          screen.refresh()
          bias_evaluation.run()
          results = bias_evaluation.results[0][0]
          screen.addstr(8, 4, "--> Evaluation Finished.")
          screen.refresh()

          screen.addstr(9, 4, "Generating plots....")
          screen.refresh()
          lats = new_lats
          lons = new_lons

          gridshape = (1, 1)
          sub_titles = [""]   #No subtitle set for now

          if not os.path.exists(working_directory):
               os.makedirs(working_directory)

          for i in range(len(results)):
               fname = working_directory + OUTPUT_PLOT + str(i)
               plotter.draw_contour_map(results[i], lats, lons, fname,
                               gridshape=gridshape, ptitle=plot_title,
                               subtitles=sub_titles)
          screen.addstr(9, 4, "--> Plots generated.")
          screen.refresh()
          screen.addstr(y-2, 1, "Press 'enter' to Exit: ")
          option = screen.getstr()