def test_load_file_with_captured_warnings(self):
     test_filename = tests.get_data_path(('NetCDF', 'testing', 'units.nc'))
     with warnings.catch_warnings(record=True) as filtered_warnings:
         with suppress_unit_warnings():
             iris.load(test_filename)
     filtered_warnings_list = [str(w.message) for w in filtered_warnings]
     self.assertEqual(len(filtered_warnings_list), 0)
Example #2
0
 def test_deprecated_callbacks(self):
     # Tests that callback functions that return values are still supported but that warnings are generated
     
     def returns_cube(cube, field, filename):
         return cube
         
     def returns_no_cube(cube, field, filename):
         return iris.io.NO_CUBE
         
     fname = tests.get_data_path(["PP", "trui", "air_temp_init", "200812011200__qwqu12ff.initanl.pp"])
     
     # Catch all warnings for returns_cube
     with warnings.catch_warnings(record=True) as generated_warnings_cube:
         warnings.simplefilter("always")
         r = iris.load(fname, callback=returns_cube)
         
         # Test that our warnings are present in the generated warnings:
         gen_warnings_cube = [str(x.message) for x in generated_warnings_cube]
         self.assertIn(iris.io.CALLBACK_DEPRECATION_MSG, gen_warnings_cube, "Callback deprecation warning message not issued.")
     
     # Catch all warnings for returns_no_cube
     with warnings.catch_warnings(record=True) as generated_warnings_no_cube:
         warnings.simplefilter("always")  
         r = iris.load(fname, callback=returns_no_cube)
         
         # Test that our warnings are present in the generated warnings:
         gen_warnings_no_cube = [str(x.message) for x in generated_warnings_no_cube]
         self.assertIn(iris.io.CALLBACK_DEPRECATION_MSG, gen_warnings_no_cube, "Callback deprecation warning message not issued.")
Example #3
0
 def test_unhandled_grid_type(self):
     self.filename = tests.get_data_path(("FF", "n48_multi_field"))
     with self.temp_filename() as temp_path:
         shutil.copyfile(self.filename, temp_path)
         ffv = um.FieldsFileVariant(temp_path, mode=um.FieldsFileVariant.UPDATE_MODE)
         ffv.fields[3].lbuser4 = 60
         ffv.close()
         with mock.patch("warnings.warn") as warn_fn:
             iris.load(temp_path)
         self.assertIn("Assuming the data is on a P grid.", warn_fn.call_args[0][0])
Example #4
0
 def test_load(self):
             
     cubes = iris.load(tests.get_data_path(('GRIB', 'rotated_uk', "uk_wrongparam.grib1")))
     self.assertCML(cubes, ("grib_load", "rotated.cml"))
     
     cubes = iris.load(tests.get_data_path(('GRIB', "time_processed", "time_bound.grib2")))
     self.assertCML(cubes, ("grib_load", "time_bound.cml"))
     
     cubes = iris.load(tests.get_data_path(('GRIB', "3_layer_viz", "3_layer.grib2")))
     cubes = iris.cube.CubeList([cubes[1], cubes[0], cubes[2]])
     self.assertCML(cubes, ("grib_load", "3_layer.cml"))
Example #5
0
    def test_iris_loading(self):
        ff32_fname = tests.get_data_path(('FF', 'n48_multi_field.ieee32'))
        ff64_fname = tests.get_data_path(('FF', 'n48_multi_field'))

        ff32_cubes = iris.load(ff32_fname)
        ff64_cubes = iris.load(ff64_fname)

        for ff32, ff64 in zip(ff32_cubes, ff64_cubes):
            # load the data
            _, _ = ff32.data, ff64.data
            self.assertEqual(ff32, ff64)
Example #6
0
    def setUp(self):
        # Call parent setup.
        super(TestFastCallbackLocationInfo, self).setUp()

        # Create a basic load test case.
        self.callback_collations = []
        self.callback_filepaths = []

        def fast_load_callback(cube, collation, filename):
            self.callback_collations.append(collation)
            self.callback_filepaths.append(filename)

        flds = self.fields(c_t='11112222', c_h='11221122', phn='01010101')
        self.test_filepath = self.save_fieldcubes(flds)
        iris.load(self.test_filepath, callback=fast_load_callback)
Example #7
0
 def test_duplicate(self):
     paths = (
         tests.get_data_path(["PP", "aPPglob1", "global.pp"]),
         tests.get_data_path(["PP", "aPPglob1", "gl?bal.pp"]),
     )
     cubes = iris.load(paths)
     self.assertEqual(len(cubes), 2)
Example #8
0
 def test_load_global_xyzt_gems_iter(self):
     # Test loading stepped single xyzt CF-netCDF file (multi-cube).
     for i, cube in enumerate(iris.load(
         tests.get_data_path(('NetCDF', 'global', 'xyz_t',
                              'GEMS_CO2_Apr2006.nc')))):
         self.assertCML(cube, ('netcdf',
                               'netcdf_global_xyzt_gems_iter_%d.cml' % i))
Example #9
0
 def test_time_mean(self):
     # This test for time-mean fields also tests negative forecast time.
     source_grib = tests.get_data_path(("GRIB", "time_processed",
                                        "time_bound.grib2"))
     cubes = iris.load(source_grib)
     expect_diffs = {'totalLength': (21232, 21227),
                     'productionStatusOfProcessedData': (0, 255),
                     'scaleFactorOfRadiusOfSphericalEarth': (MDI,
                                                             0),
                     'shapeOfTheEarth': (0, 1),
                     'scaledValueOfRadiusOfSphericalEarth': (MDI,
                                                             6367470),
                     'longitudeOfLastGridPoint': (356249908, 356249809),
                     'latitudeOfLastGridPoint': (-89999938, -89999944),
                     'typeOfGeneratingProcess': (0, 255),
                     'generatingProcessIdentifier': (128, 255),
                     'typeOfTimeIncrement': (2, 255)
                     }
     self.skip_keys.append('stepType')
     self.skip_keys.append('stepTypeInternal')
     with self.temp_filename(suffix='.grib2') as temp_file_path:
         iris.save(cubes, temp_file_path)
         self.assertGribMessageDifference(source_grib, temp_file_path,
                                          expect_diffs, self.skip_keys,
                                          skip_sections=[2])
Example #10
0
 def test_lbproc(self):
     data_path = tests.get_data_path(("PP", "meanMaxMin", "200806081200__qwpb.T24.pp"))
     # Set up standard name and T+24 constraint
     constraint = iris.Constraint("air_temperature", forecast_period=24)
     cubes = iris.load(data_path, constraint)
     cubes = iris.cube.CubeList([cubes[0], cubes[3], cubes[1], cubes[2], cubes[4]])
     self.assertCML(cubes, ("pp_rules", "lbproc_mean_max_min.cml"))
Example #11
0
    def test_units(self):
        # Test exercising graceful cube and coordinate units loading.
        cube0, cube1 = iris.load(tests.get_data_path(('NetCDF', 'testing',
                                                      'units.nc')))

        self.assertCML(cube0, ('netcdf', 'netcdf_units_0.cml'))
        self.assertCML(cube1, ('netcdf', 'netcdf_units_1.cml'))
Example #12
0
    def test_name2_field(self):
        filepath = tests.get_data_path(('NAME', 'NAMEII_field.txt'))
        name_cubes = iris.load(filepath)
        # Check gribapi version, because we currently have a known load/save
        # problem with gribapi 1v14 (at least).
        gribapi_ver = gribapi.grib_get_api_version()
        gribapi_fully_supported_version = \
            (StrictVersion(gribapi.grib_get_api_version()) <
             StrictVersion('1.13'))
        for i, name_cube in enumerate(name_cubes):
            if not gribapi_fully_supported_version:
                data = name_cube.data
                if np.min(data) == np.max(data):
                    msg = ('NAMEII cube #{}, "{}" has empty data : '
                           'SKIPPING test for this cube, as save/load will '
                           'not currently work with gribabi > 1v12.')
                    warnings.warn(msg.format(i, name_cube.name()))
                    continue

            with self.temp_filename('.grib2') as temp_filename:
                iris.save(name_cube, temp_filename)
                grib_cube = iris.load_cube(temp_filename, callback=name_cb)
                self.check_common(name_cube, grib_cube)
                self.assertCML(
                    grib_cube, tests.get_result_path(
                        ('integration', 'name_grib', 'NAMEII',
                         '{}_{}.cml'.format(i, name_cube.name()))))
Example #13
0
 def test_lbproc(self):
     data_path = tests.get_data_path(('PP', 'meanMaxMin', '200806081200__qwpb.T24.pp'))
     # Set up standard name and T+24 constraint
     constraint = iris.Constraint('air_temperature', forecast_period=24)
     cubes = iris.load(data_path, constraint)
     cubes = iris.cube.CubeList([cubes[0], cubes[3], cubes[1], cubes[2], cubes[4]]) 
     self.assertCML(cubes, ('pp_rules', 'lbproc_mean_max_min.cml'))
Example #14
0
    def get_variable_names(self, filenames, data_type=None):
        """
        This is exactly the same as the inherited version except I also exclude the mlev dimension
        """
        import iris
        import cf_units as unit
        variables = []

        cubes = iris.load(filenames, callback=self.load_multiple_files_callback)

        for cube in cubes:
            is_time_lat_lon_pressure_altitude_or_has_only_1_point = True
            for dim in cube.dim_coords:
                units = dim.units
                if dim.points.size > 1 and \
                        not units.is_time() and \
                        not units.is_time_reference() and \
                        not units.is_vertical() and \
                        not units.is_convertible(unit.Unit('degrees')) and \
                        dim.var_name != 'mlev':
                    is_time_lat_lon_pressure_altitude_or_has_only_1_point = False
                    break
            if is_time_lat_lon_pressure_altitude_or_has_only_1_point:
                if cube.var_name:
                    variables.append(cube.var_name)
                else:
                    variables.append(cube.name())

        return set(variables)
Example #15
0
 def test_y_fastest(self):
     cubes = iris.load(tests.get_data_path(("GRIB", "y_fastest", "y_fast.grib2")))
     self.assertCML(cubes, ("grib_load", "y_fastest.cml"))
     iplt.contourf(cubes[0])
     iplt.gcm(cubes[0]).drawcoastlines()
     plt.title("y changes fastest")
     self.check_graphic()
    def get_variable_names(self, filenames, data_type=None):
        import iris
        import cf_units as unit
        from cis.utils import single_warnings_only
        # Removes warnings and prepares for future Iris change
        iris.FUTURE.netcdf_promote = True

        variables = []

        # Filter the warnings so that they only appear once - otherwise you get lots of repeated warnings
        with single_warnings_only():
            cubes = iris.load(filenames)

        for cube in cubes:
            is_time_lat_lon_pressure_altitude_or_has_only_1_point = True
            for dim in cube.dim_coords:
                units = dim.units
                if dim.points.size > 1 and \
                        not units.is_time() and \
                        not units.is_time_reference() and \
                        not units.is_vertical() and \
                        not units.is_convertible(unit.Unit('degrees')):
                    is_time_lat_lon_pressure_altitude_or_has_only_1_point = False
                    break
            if is_time_lat_lon_pressure_altitude_or_has_only_1_point:
                name = cube.var_name or cube.name()
                if name == 'unknown' and 'STASH' in cube.attributes:
                    name = '{}'.format(cube.attributes['STASH'])
                variables.append(name)

        return set(variables)
Example #17
0
 def test_duplicate(self):
     paths = (
         tests.get_data_path(['PP', 'aPPglob1', 'global.pp']),
         tests.get_data_path(['PP', 'aPPglob1', 'gl?bal.pp'])
     )
     cubes = iris.load(paths)
     self.assertEqual(len(cubes), 2)
Example #18
0
    def get_variable_names(self, filenames, data_type=None):
        import iris
        import cf_units as unit
        from cis.utils import single_warnings_only

        variables = []

        # Filter the warnings so that they only appear once - otherwise you get lots of repeated warnings
        with single_warnings_only():
            cubes = iris.load(filenames)

        for cube in cubes:
            is_time_lat_lon_pressure_altitude_or_has_only_1_point = True
            for dim in cube.dim_coords:
                units = dim.units
                if dim.points.size > 1 and \
                        not units.is_time() and \
                        not units.is_time_reference() and \
                        not units.is_vertical() and \
                        not units.is_convertible(unit.Unit('degrees')):
                    is_time_lat_lon_pressure_altitude_or_has_only_1_point = False
                    break
            if is_time_lat_lon_pressure_altitude_or_has_only_1_point:
                if cube.var_name:
                    variables.append(cube.var_name)
                else:
                    variables.append(cube.name())

        return set(variables)
    def event_means(self):
        """Calculate time mean and ntime for each event in time domain.

        Create cube_event_means and cube_event_ntimes attributes."""
        # Check that time domain is of type 'event'
        self.tdomain.time_domain_type()
        if self.tdomain.type!='event':
            raise UserWarning("Warning: time domain type is '{0.tdomain.type}'.  It must be 'event'.".format(self))
        # Load list of cubes
        x1=iris.load(self.filein1,self.name)
        # Loop over events in time domain
        cube_event_means=[]
        cube_event_ntimes=[]
        for eventc in self.tdomain.partial_date_times:
            # Create time constraint
            time_beg=eventc[0]
            time_end=eventc[1]
            print('time_beg: {0!s}'.format(time_beg))
            print('time_end: {0!s}'.format(time_end))
            time_constraint=iris.Constraint(time=lambda cell: time_beg <=cell<= time_end)
            with iris.FUTURE.context(cell_datetime_objects=True):
                x2=x1.extract(time_constraint)
            x3=x2.concatenate_cube()
            ntime=x3.coord('time').shape[0]
            cube_event_ntimes.append(ntime)
            x4=x3.collapsed('time',iris.analysis.MEAN)
            cube_event_means.append(x4)
        self.cube_event_means=cube_event_means
        self.cube_event_ntimes=cube_event_ntimes
Example #20
0
 def test_round_trip(self):
     cube, = iris.load(self.fname)
     with self.temp_filename(suffix='.nc') as filename:
         iris.save(cube, filename, unlimited_dimensions=[])
         round_cube, = iris.load_raw(filename)
         self.assertEqual(len(round_cube.cell_measures()), 1)
         self.assertEqual(round_cube.cell_measures()[0].measure, 'area')
Example #21
0
def cosp_plot_column_2D(fnc, varname='equivalent_reflectivity_factor', level=0, column = 0, time = 0):
    """
    Function that plots one column of lat/lon data.
    """

    plt.interactive(True)
    fig=plt.figure()
    ax = fig.add_subplot(111)
    # Read cube
    z=iris.load(fnc)
    z=z[0]

    # Get coords
    c = z.coord('column')
    t = z.coord('time')

    # Select time and column
    y=z.extract(iris.Constraint(column=c.points[column]))
    y=y.extract(iris.Constraint(time=t.points[time]))
    # Select level. Not managed to make constrain with 'atmospheric model level'
    y=y[level]

    color_map = mpl_cm.get_cmap('Paired')
    qplt.pcolormesh(y,cmap=color_map,vmin=-20,vmax=20)
    plt.gca().coastlines()

    return
Example #22
0
    def _add_available_aux_coords(self, cube, filenames):
        from iris.aux_factory import HybridPressureFactory
        from iris.coords import AuxCoord
        from iris.exceptions import CoordinateNotFoundError
        import iris

        if cube.coords('hybrid A coefficient at layer midpoints'):

            # First convert the hybrid coefficients to hPa, so that air pressure will be in hPa
            cube.coord('hybrid A coefficient at layer midpoints').convert_units('hPa')

            try:
                surface_pressure = cube.coord('surface pressure')
            except iris.exceptions.CoordinateNotFoundError as e:
                # If there isn't a surface pressure coordinate we can try and pull out the lowest pressure level
                with demote_warnings():
                    surface_pressure_cubes = iris.load(filenames, 'atmospheric pressure at interfaces',
                                                       callback=self.load_multiple_files_callback)
                surface_pressure_cube = surface_pressure_cubes.concatenate_cube()[:, -1, :, :]
                surface_pressure = AuxCoord(points=surface_pressure_cube.data, long_name='surface pressure', units='Pa')
                cube.add_aux_coord(surface_pressure, (0, 2, 3))
 
            surface_pressure.convert_units('hPa')
 
            if len(cube.coords(long_name='hybrid level at layer midpoints')) > 0:
                cube.add_aux_factory(HybridPressureFactory(delta=cube.coord('hybrid A coefficient at layer midpoints'),
                                                           sigma=cube.coord('hybrid B coefficient at layer midpoints'),
                                                          surface_air_pressure=surface_pressure))
def load_all_steps(pp_name):
    cubes=iris.load(pp_name)
    
    #########################################
    log.info("pp_name="+str(step_file))
    #########################################

    for cube in cubes:
        #capturing stash code from pp file
        stash_code=ukl.get_stash(cube)
        #print stash_code
        stashcodes.append(stash_code)
        #print stashcodes
        if stash_code in vd.variable_reference_stash:
            if not isinstance(cube.long_name,str):
                cube.long_name=vd.variable_reference_stash[stash_code].long_name
                # print 'added long_name',cube.long_name, 'to', stash_code
                if not isinstance(cube._var_name,str):
                    if not vd.variable_reference_stash[stash_code].short_name=='':
                        cube._var_name=vd.variable_reference_stash[stash_code].short_name
                        # print 'added short_name as cube._var_name',cube._var_name, 'to', stash_code
        #########################################
        log.info("cube.long_name= "+str(cube.long_name))
        #########################################
        
        folder_NETCDF=output_files_directory+'All_time_steps/'
        if cube._standard_name:
            saving_name=folder_NETCDF+'All_time_steps_'+stash_code+'_'+cube._standard_name+'.nc'
        elif isinstance(cube.long_name,str):
            saving_name=folder_NETCDF+'All_time_steps_'+stash_code+'_'+cube.long_name+'.nc'
        else:
            saving_name=folder_NETCDF+'All_time_steps_'+stash_code+'.nc'

        iris.save(cube,saving_name, netcdf_format="NETCDF4")
Example #24
0
    def test_FAIL_pseudo_levels(self):
        # Show how pseudo levels are handled.
        flds = self.fields(c_t='000111222',
                           pse='123123123')
        file = self.save_fieldcubes(flds)
        results = iris.load(file)
        expected = CubeList(flds).merge()

# NOTE: this problem is now fixed : Structured load gives the same answer.
#
#        if not self.do_fast_loads:
#            expected = CubeList(flds).merge()
#        else:
#            # Structured loading doesn't understand pseudo-level.
#            # The result is rather horrible...
#
#            # First get a cube over 9 timepoints.
#            flds = self.fields(c_t='012345678',
#                               pse=1)  # result gets level==2, not clear why.
#
#            # Replace the time coord with an AUX coord.
#            nine_timepoints_cube = CubeList(flds).merge_cube()
#            co_time = nine_timepoints_cube.coord('time')
#            nine_timepoints_cube.remove_coord(co_time)
#            nine_timepoints_cube.add_aux_coord(AuxCoord.from_coord(co_time),
#                                               0)
#            # Set the expected timepoints equivalent to '000111222'.
#            nine_timepoints_cube.coord('time').points = \
#                np.array([0.0, 0.0, 0.0, 24.0, 24.0, 24.0, 48.0, 48.0, 48.0])
#            # Make a cubelist with this single cube.
#            expected = CubeList([nine_timepoints_cube])

        self.assertEqual(results, expected)
Example #25
0
 def test_phenomena(self):
     # Show that different phenomena are merged into distinct cubes.
     flds = self.fields(c_t='1122', phn='0101')
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     expected = CubeList(flds).merge()
     self.assertEqual(results, expected)
Example #26
0
    def test_FAIL_scalar_vector_concatenate(self):
        # Structured load can produce a scalar coordinate from one file, and a
        # matching vector one from another file, but these won't "combine".
        # We'd really like to fix this one...
        single_timepoint_fld, = self.fields(c_t='1')
        multi_timepoint_flds = self.fields(c_t='23')
        file_single = self.save_fieldcubes([single_timepoint_fld],
                                           basename='single')
        file_multi = self.save_fieldcubes(multi_timepoint_flds,
                                          basename='multi')

        results = iris.load((file_single, file_multi))
        if not self.do_fast_loads:
            # This is what we'd LIKE to get (what iris.load gives).
            expected = CubeList(multi_timepoint_flds +
                                [single_timepoint_fld]).merge()
        else:
            # This is what we ACTUALLY get at present.
            # It can't combine the scalar and vector time coords.
            expected = CubeList([CubeList(multi_timepoint_flds).merge_cube(),
                                 single_timepoint_fld])
            # NOTE: in this case, we need to sort the results to ensure a
            # repeatable ordering, because ??somehow?? the random temporary
            # directory name affects the ordering of the cubes in the result !
            results = CubeList(sorted(results,
                                      key=lambda cube: cube.shape))

        self.assertEqual(results, expected)
def getJobs(db):
    try:
        q = tinydb.Query()
        rows = db.search(q.saved is None  q.success == f)


        info = manifest.runnames[file.split("_")[-2]]
        newfiles = []
        variables = info["variables"]

        for variable in info["variables"]:
            print "Ingesting " + variable
            try:
                thisdata = iris.load_cube(file, variable)
                stem, fname = os.path.split(file)
                newname = file.split("_")[-2] + "_" + variable + "_" + fname.split("_")[0] + "_piotr_" + fname.split("_")[-1].replace("grib2", "nc")
                iris.save(thisdata, os.path.join(stem, newname))
                #postJob(newname)
            except iris.exceptions.ConstraintMismatchError:
                print variable + " not found in file " + file + ". continuing"
                cubes = iris.load(file)
                print cubes

    except:
        raise
    finally:
        print "Removing file", file
Example #28
0
 def test_basic(self):
     # Show that basic load merging works.
     flds = self.fields(c_t='123', cft='000', ctp='123', c_p=0)
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     expected = CubeList(flds).merge()
     self.assertEqual(results, expected)
Example #29
0
  def showOpenDialog(self):
    """
    Handles the loading of a file, and calls functions to set up the GUI accordingly and then display the cube
    """
    
    self.filename, _ = QtGui.QFileDialog.getOpenFileName(self, 'Open File', '/project/avd/iris/resources/public_sample_data_1.0')
    self.clearAll()
    
    QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
    self.statusBar().showMessage('Loading Cube')
    try:
      self.cubes = iris.load(self.filename)
    except ValueError as e:
      flags = QtGui.QMessageBox.StandardButton.Ok
      response = QtGui.QMessageBox.critical(self, 'Unable to Load Cube: File type could not be read', str(e), flags) 
      self.statusBar().showMessage('Load Failed')
      QApplication.restoreOverrideCursor()
    QApplication.restoreOverrideCursor()
    
    for self.cube in self.cubes:
      self.selectCube.addItem(self.cube.name())        #fills the selectCube combo box with the cubes from the file
    if len(self.cubes) == 1:
      self.selectCube.setEnabled(False)
    else:
      self.selectCube.setEnabled(True)
    self.cubeLoaded = True
    
    self.printCubeBrowser.setText(str(self.cube))

    self.setDimensionCombos()
    self.update()
Example #30
0
    def test_netcdf_hybrid_height(self):
        # Test saving a CF-netCDF file which contains a hybrid height
        # (i.e. dimensionless vertical) coordinate.
        # Read PP input file.
        names = ['air_potential_temperature', 'surface_altitude']
        file_in = tests.get_data_path(
            ('PP', 'COLPEX', 'small_colpex_theta_p_alt.pp'))
        cube = iris.load_cube(file_in, names[0])

        # Write Cube to netCDF file.
        with self.temp_filename(suffix='.nc') as file_out:
            iris.save(cube, file_out)

            # Check the netCDF file against CDL expected output.
            self.assertCDL(file_out,
                           ('netcdf', 'netcdf_save_hybrid_height.cdl'))

            # Read netCDF file.
            cubes = iris.load(file_out)
            cubes_names = [c.name() for c in cubes]
            self.assertEqual(cubes_names, names)

            # Check the PP read, netCDF write, netCDF read mechanism.
            self.assertCML(cubes.extract(names[0])[0],
                           ('netcdf', 'netcdf_save_load_hybrid_height.cml'))
Example #31
0
def load_cubelist(
    filepath: Union[str, List[str]],
    constraints: Optional[Union[Constraint, str]] = None,
    no_lazy_load: bool = False,
) -> CubeList:
    """Load cubes from filepath(s) into a cubelist. Strips off all
    var names except for "threshold"-type coordinates, where this is different
    from the standard or long name.

    Args:
        filepath:
            Filepath(s) that will be loaded.
        constraints:
            Constraint to be applied when loading from the input filepath.
            This can be in the form of an iris.Constraint or could be a string
            that is intended to match the name of the cube.
            The default is None.
        no_lazy_load:
            If True, bypass cube deferred (lazy) loading and load the whole
            cube into memory. This can increase performance at the cost of
            memory. If False (default) then lazy load.

    Returns:
        CubeList that has been created from the input filepath given the
        constraints provided.
    """
    # Remove legacy metadata prefix cube if present
    constraints = (
        iris.Constraint(cube_func=lambda cube: cube.long_name != "prefixes")
        & constraints
    )

    # Load each file individually to avoid partial merging (not used
    # iris.load_raw() due to issues with time representation)
    with iris_nimrod_patcher():
        if isinstance(filepath, str):
            cubes = iris.load(filepath, constraints=constraints)
        else:
            cubes = iris.cube.CubeList([])
            for item in filepath:
                cubes.extend(iris.load(item, constraints=constraints))

    if not cubes:
        message = "No cubes found using constraints {}".format(constraints)
        raise ValueError(message)

    # Remove var_name from cubes and coordinates (except where needed to
    # describe probabilistic data)
    cubes = strip_var_names(cubes)

    for cube in cubes:

        # Remove metadata attributes pointing to legacy prefix cube
        cube.attributes.pop("bald__isPrefixedBy", None)

        # Ensure the probabilistic coordinates are the first coordinates within
        # a cube and are in the specified order.
        enforce_coordinate_ordering(cube, ["realization", "percentile", "threshold"])
        # Ensure the y and x dimensions are the last within the cube.
        y_name = cube.coord(axis="y").name()
        x_name = cube.coord(axis="x").name()
        enforce_coordinate_ordering(cube, [y_name, x_name], anchor_start=False)
        if no_lazy_load:
            # Force cube's data into memory by touching the .data attribute.
            # pylint: disable=pointless-statement
            cube.data

    return cubes
Example #32
0
from matplotlib.colors import LogNorm
from mpl_toolkits.basemap import Basemap
from matplotlib.patches import Polygon

import iris
import iris.plot as iplt
import iris.quickplot as qplt

c5h8_stash = ['m01s34i027']

filename = '/exports/csce/datastore/geos/users/s1731217/output_ukca/u-av256/apm.pp/*'

stash_constraint = iris.AttributeConstraint(STASH=c5h8_stash[0])
latitude_constraint = iris.Constraint(coord_values={'latitude':lambda cell: 17.5 < cell < 45})
longitude_constraint = iris.Constraint(coord_values={'longitude':lambda cell: 107.5 < cell < 125})
cube = iris.load(filename, stash_constraint).extract(latitude_constraint & longitude_constraint)


print cube

print cube[0]

cube_ppb = cube[0]*29.0*1.0e9/65.0

print cube_ppb

fig= plt.figure(figsize=(15, 8))

plt.subplot(3,4,1)
plt.title('C5H8 Jan 2014',  fontsize=5)
tick_levels = [0.01, 0.1, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5, 3, 3.5, 4]
Example #33
0
 def insert_netcdf(self, path):
     for cube in iris.load(path):
         for coord in cube.coords():
             self.insert_coordinate(path, cube.var_name, coord.name())
Example #34
0
 def _handle_load(self, sender):
     """Load button action."""
     fpfs = [fp.files for fp in self.file_pickers]
     selected_files = reduce(list.__add__, (list(files) for files in fpfs))
     self._cubes = iris.load(selected_files)
     self.update_cubes_list()
Example #35
0
import numpy as np
import iris
import matplotlib.pyplot as plt

cube1 = iris.load('japp_default_new.nc')[0]
cube2 = iris.load('japp_lehtinen_rc_units.nc')[0]

level_num = cube1.coord('model_level_number').points
plt.figure()
plt.xlabel('Japp')
plt.ylabel('Model_level_number')
plt.plot((cube1[:, 72, 96].data), level_num, 'b*-', (cube2[:, 72, 96].data),
         level_num, 'r*-')
plt.title('Japp_both_at (0,0)')
plt.legend(('default', 'lehtinen'))
plt.grid(True)
#plt.show()
#plt.show()

plt.figure()
plt.xlabel('Japp_default')
plt.ylabel('Model_level_number')
plt.plot((cube1[:, 72, 96].data), level_num, 'b*-')
plt.title('Japp_default_at (0,0)')
#plt.show()

plt.figure()
plt.xlabel('Japp_lehtinen')
plt.ylabel('Model_level_number')
plt.plot((cube2[:, 72, 96].data), level_num, 'b*-')
plt.title('Japp_lehtinen_at (0,0)')
Example #36
0
 def load_forecast(self):
     return iris.load(self.forecast_filename)
def main():

# Plot diagnostics, model and pressure levels etc. to plot on for looping through

 plot_type='mean'
 plot_diags=['temp', 'sp_hum']
 plot_levels = [925, 850, 700, 500] 
 #plot_levels = [925]
 #experiment_ids = ['djznq', 'djzns', 'dklyu', 'dkmbq', 'dklwu', 'dklzq' ]
 experiment_ids = ['dkjxq']
#experiment_id = 'djzny'

 p_levels = [1000, 950, 925, 850, 700, 500, 400, 300, 250, 200, 150, 100, 70, 50, 30, 20, 10]


###### Unrotate global model data ##############################

######### Regrid to global, and difference  #######
############################################################################

## Load global wind and orography
 
 fw_global = '/nfs/a90/eepdw/Mean_State_Plot_Data/pp_files/djzn/djzny/30201_mean.pp'
 fo_global = '/nfs/a90/eepdw/Mean_State_Plot_Data/pp_files/djzn/djzny/33.pp'
    
 u_global,v_global = iris.load(fw_global)
 oro_global = iris.load_cube(fo_global)
    
# Unrotate global coordinates

 cs_glob = u_global.coord_system('CoordSystem')
 cs_glob_v = v_global.coord_system('CoordSystem')

 cs_glob_oro = oro_global.coord_system('CoordSystem')

 lat_g = u_global.coord('grid_latitude').points
 lon_g = u_global.coord('grid_longitude').points

 lat_g_oro = oro_global.coord('grid_latitude').points
 lon_g_oro = oro_global.coord('grid_longitude').points
    
 if cs_glob!=cs_glob_v:
    print 'Global model u and v winds have different poles of rotation'

# Unrotate global winds

 if isinstance(cs_glob, iris.coord_systems.RotatedGeogCS):
        print ' Global Model - Winds - djzny - Unrotate pole %s' % cs_glob
        lons_g, lats_g = np.meshgrid(lon_g, lat_g)
        lons_g,lats_g = iris.analysis.cartography.unrotate_pole(lons_g,lats_g, cs_glob.grid_north_pole_longitude, cs_glob.grid_north_pole_latitude)
        
        lon_g=lons_g[0]
        lat_g=lats_g[:,0]

 for i, coord in enumerate (u_global.coords()):
            if coord.standard_name=='grid_latitude':
                lat_dim_coord_uglobal = i
            if coord.standard_name=='grid_longitude':
                lon_dim_coord_uglobal = i

 csur_glob=cs_glob.ellipsoid
 u_global.remove_coord('grid_latitude')
 u_global.remove_coord('grid_longitude')
 u_global.add_dim_coord(iris.coords.DimCoord(points=lat_g, standard_name='grid_latitude', units='degrees', coord_system=csur_glob), lat_dim_coord_uglobal)
 u_global.add_dim_coord(iris.coords.DimCoord(points=lon_g, standard_name='grid_longitude', units='degrees', coord_system=csur_glob), lon_dim_coord_uglobal)

#print u_global
    
 v_global.remove_coord('grid_latitude')
 v_global.remove_coord('grid_longitude')
 v_global.add_dim_coord(iris.coords.DimCoord(points=lat_g, standard_name='grid_latitude', units='degrees', coord_system=csur_glob),  lat_dim_coord_uglobal)
 v_global.add_dim_coord(iris.coords.DimCoord(points=lon_g, standard_name='grid_longitude', units='degrees', coord_system=csur_glob),  lon_dim_coord_uglobal)
    
 #print v_global

# Unrotate global model

 if isinstance(cs_glob_oro, iris.coord_systems.RotatedGeogCS):
        print ' Global Model - Orography - djzny - Unrotate pole %s - Winds and other diagnostics may have different number of grid points' % cs_glob_oro
        lons_go, lats_go = np.meshgrid(lon_g_oro, lat_g_oro)
        lons_go,lats_go = iris.analysis.cartography.unrotate_pole(lons_go,lats_go, cs_glob_oro.grid_north_pole_longitude, cs_glob_oro.grid_north_pole_latitude)
        
        lon_g_oro=lons_go[0]
        lat_g_oro=lats_go[:,0]

 for i, coord in enumerate (oro_global.coords()):
            if coord.standard_name=='grid_latitude':
                lat_dim_coord_og = i
            if coord.standard_name=='grid_longitude':
                lon_dim_coord_og = i 

 csur_glob_oro=cs_glob_oro.ellipsoid
 oro_global.remove_coord('grid_latitude')
 oro_global.remove_coord('grid_longitude')
 oro_global.add_dim_coord(iris.coords.DimCoord(points=lat_g_oro, standard_name='grid_latitude', units='degrees', coord_system=csur_glob_oro), lat_dim_coord_og)
 oro_global.add_dim_coord(iris.coords.DimCoord(points=lon_g_oro, standard_name='grid_longitude', units='degrees', coord_system=csur_glob_oro), lon_dim_coord_og)

   ###############################################################################
####################  Load global heights and temp/sp_hum #####################

 
 f_glob_h = '/nfs/a90/eepdw/Mean_State_Plot_Data/Mean_Heights_Temps_etc/408_pressure_levels_interp_pressure_djzny_%s' % (plot_type)
 
######################################################################################
 with h5py.File(f_glob_h, 'r') as i:
     mh = i['%s' % plot_type]
     mean_heights_global = mh[. . .]

######################################################################################
## Loop through experiment id's ######################################################
 
    
 for  pl in plot_diags:
  plot_diag=pl

  f_glob_d = '/nfs/a90/eepdw/Mean_State_Plot_Data/Mean_Heights_Temps_etc/%s_pressure_levels_interp_djzny_%s' % (plot_diag, plot_type)
  
  with h5py.File(f_glob_d, 'r') as i:
   mg = i['%s' % plot_type]
   mean_var_global = mg[. . .]

  for experiment_id in experiment_ids:
    expmin1 = experiment_id[:-1]

 

    ###############################################################################
####################  Load global heights and temp/sp_hum #####################

    fname_h = '/nfs/a90/eepdw/Mean_State_Plot_Data/Mean_Heights_Temps_etc/408_pressure_levels_interp_pressure_%s_%s' % (experiment_id, plot_type)
    fname_d = '/nfs/a90/eepdw/Mean_State_Plot_Data/Mean_Heights_Temps_etc/%s_pressure_levels_interp_%s_%s' % (plot_diag, experiment_id, plot_type)
    # print fname_h
    # print fname_d
#  Height data file
    with h5py.File(fname_h, 'r') as i:
        mh = i['%s' % plot_type]
        mean_heights = mh[. . .]
    # print mean_heights.shape
    with h5py.File(fname_d, 'r') as i:
        mh = i['%s' % plot_type]
        mean_var = mh[. . .]
    # print mean_var.shape

    f_oro =  '/nfs/a90/eepdw/Mean_State_Plot_Data/pp_files/%s/%s/409.pp' % (expmin1, experiment_id)
    oro = iris.load_cube(f_oro)

    #print oro
    
    for i, coord in enumerate (oro.coords()):
        if coord.standard_name=='grid_latitude':
            lat_dim_coord_oro = i
        if coord.standard_name=='grid_longitude':
            lon_dim_coord_oro = i

    fu = '/nfs/a90/eepdw/Mean_State_Plot_Data/pp_files/%s/%s/30201_mean.pp' % (expmin1, experiment_id)
       
    u_wind,v_wind = iris.load(fu)
    
# Wind may have different number of grid points so need to do unrotate again for wind grid points

    lat_w = u_wind.coord('grid_latitude').points
    lon_w = u_wind.coord('grid_longitude').points
    p_levs = u_wind.coord('pressure').points

    lat = oro.coord('grid_latitude').points
    lon = oro.coord('grid_longitude').points
    

    cs_w = u_wind.coord_system('CoordSystem')
    cs = oro.coord_system('CoordSystem')

    if isinstance(cs_w, iris.coord_systems.RotatedGeogCS):
        print ' Wind - %s - Unrotate pole %s' % (experiment_id, cs_w)
        lons_w, lats_w = np.meshgrid(lon_w, lat_w)
        lons_w,lats_w = iris.analysis.cartography.unrotate_pole(lons_w,lats_w, cs_w.grid_north_pole_longitude, cs_w.grid_north_pole_latitude)
        
        lon_w=lons_w[0]
        lat_w=lats_w[:,0]

        csur_w=cs_w.ellipsoid

        for i, coord in enumerate (u_wind.coords()):
            if coord.standard_name=='grid_latitude':
                lat_dim_coord_uwind = i
            if coord.standard_name=='grid_longitude':
                lon_dim_coord_uwind = i
       
        u_wind.remove_coord('grid_latitude')
        u_wind.remove_coord('grid_longitude')
        u_wind.add_dim_coord(iris.coords.DimCoord(points=lat_w, standard_name='grid_latitude', units='degrees', coord_system=csur_w),lat_dim_coord_uwind )
        u_wind.add_dim_coord(iris.coords.DimCoord(points=lon_w, standard_name='grid_longitude', units='degrees', coord_system=csur_w), lon_dim_coord_uwind)

        v_wind.remove_coord('grid_latitude')
        v_wind.remove_coord('grid_longitude')
        v_wind.add_dim_coord(iris.coords.DimCoord(points=lat_w, standard_name='grid_latitude', units='degrees', coord_system=csur_w), lat_dim_coord_uwind)
        v_wind.add_dim_coord(iris.coords.DimCoord(points=lon_w, standard_name='grid_longitude', units='degrees', coord_system=csur_w),lon_dim_coord_uwind )
        
    if isinstance(cs, iris.coord_systems.RotatedGeogCS):
        print ' 409.pp  - %s - Unrotate pole %s' % (experiment_id, cs)
        lons, lats = np.meshgrid(lon, lat) 

        lon_low= np.min(lons)
        lon_high = np.max(lons)
        lat_low = np.min(lats)
        lat_high = np.max(lats)

        lon_corners, lat_corners = np.meshgrid((lon_low, lon_high), (lat_low, lat_high))

        lons,lats = iris.analysis.cartography.unrotate_pole(lons,lats, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)
        lon_corner_u,lat_corner_u = iris.analysis.cartography.unrotate_pole(lon_corners, lat_corners, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)
        #lon_highu,lat_highu = iris.analysis.cartography.unrotate_pole(lon_high, lat_high, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)

        lon=lons[0]
        lat=lats[:,0]

        lon_low = lon_corner_u[0,0]
        lon_high = lon_corner_u[0,1]
        lat_low = lat_corner_u[0,0]
        lat_high = lat_corner_u[1,0]
                
        for i, coord in enumerate (oro.coords()):
            if coord.standard_name=='grid_latitude':
                lat_dim_coord_oro = i
            if coord.standard_name=='grid_longitude':
                lon_dim_coord_oro = i

        csur=cs.ellipsoid  
     
        oro.remove_coord('grid_latitude')
        oro.remove_coord('grid_longitude')
        oro.add_dim_coord(iris.coords.DimCoord(points=lat, standard_name='grid_latitude', units='degrees', coord_system=csur), lat_dim_coord_oro)
        oro.add_dim_coord(iris.coords.DimCoord(points=lon, standard_name='grid_longitude', units='degrees', coord_system=csur), lon_dim_coord_oro)

    else:

        lons, lats = np.meshgrid(lon, lat)
        lons_w, lats_w = np.meshgrid(lon_w, lat_w)

        lon_low= np.min(lons)
        lon_high = np.max(lons)
        lat_low = np.min(lats)
        lat_high = np.max(lats)
        


############## Regrid and Difference #################################

  # Regrid Height and Temp/Specific humidity to global grid
    h_regrid = np.empty((len(lat_g_oro), len(lon_g_oro)))
    v_regrid = np.empty((len(lat_g_oro), len(lon_g_oro)))

    for p in plot_levels:
  
        ### Search for pressure level match
    
        s = np.searchsorted(p_levels[::-1], p)
        h_regrid = scipy.interpolate.griddata((lats.flatten(),lons.flatten()),mean_heights[:,:,-(s+1)].flatten() , (lats_go,lons_go),method='linear')


        v_regrid = scipy.interpolate.griddata((lats.flatten(),lons.flatten()),mean_var[:,:,-(s+1)].flatten() , (lats_go,lons_go),method='linear')
   
# Difference heights

        plt_h = np.where(np.isnan(h_regrid), np.nan, h_regrid - mean_heights_global[:,:,-(s+1)])

#Difference temperature/specific humidity
   
        plt_v = np.where(np.isnan(v_regrid), np.nan, v_regrid - mean_var_global[:,:,-(s+1)])
    
    # Set u,v for winds, linear interpolate to approx. 2 degree grid
        sc =  np.searchsorted(p_levs, p)

                        
    ##### Does not work on iris1.0 as on Leeds computers. Does work on later versions

        #u_interp = u_wind[sc,:,:]
        #v_interp = v_wind[sc,:,:].
        #sample_points = [('grid_latitude', np.arange(lat_low,lat_high,2)), ('grid_longitude', np.arange(lon_low,lon_high,2))]

        #u = iris.analysis.interpolate.linear(u_interp, sample_points, extrapolation_mode='linear')
        #v = iris.analysis.interpolate.linear(v_interp, sample_points).data


    ##### Does work on Iris 1.0

        # 2 degree lats lon lists for wind regridding on plot

        lat_wind_1deg = np.arange(lat_low,lat_high, 2)
        lon_wind_1deg = np.arange(lon_low,lon_high, 2)

    ### Regrid winds to global, difference, and then regrid to 2 degree spacing

        fl_la_lo = (lats_w.flatten(),lons_w.flatten())

        # print u_wind[sc,:,:].data.flatten().shape

        u_wind_rg_to_glob = scipy.interpolate.griddata(fl_la_lo, u_wind[sc,:,:].data.flatten(), (lats_g, lons_g), method='linear')
        v_wind_rg_to_glob = scipy.interpolate.griddata(fl_la_lo, v_wind[sc,:,:].data.flatten(), (lats_g, lons_g), method='linear')

        u_w=u_wind_rg_to_glob-u_global[sc,:,:].data
        v_w=v_wind_rg_to_glob-v_global[sc,:,:].data

        #u_interp = u_wind[sc,:,:].data
        #v_interp = v_wind[sc,:,:].data
       
        lons_wi, lats_wi = np.meshgrid(lon_wind_1deg, lat_wind_1deg)

        fl_la_lo = (lats_g.flatten(),lons_g.flatten())

        u = scipy.interpolate.griddata(fl_la_lo, u_w.flatten(), (lats_wi, lons_wi), method='linear')
        v = scipy.interpolate.griddata(fl_la_lo, v_w.flatten(), (lats_wi, lons_wi), method='linear')
       


        
#######################################################################################

### Plotting #########################################################################

    

        #m_title = 'Height of %s-hPa level (m)' % (p)

# Set pressure height contour min/max
        if p == 925:
            clev_min = -24.
            clev_max = 24.
        elif p == 850:
            clev_min = -24.
            clev_max = 24.
        elif p == 700:
            clev_min = -24.
            clev_max = 24.
        elif p == 500:
            clev_min = -24.
            clev_max = 24.
        else:
            print 'Contour min/max not set for this pressure level'

# Set potential temperature min/max       
        if p == 925:
            clevpt_min = -10.
            clevpt_max = 10.
        elif p == 850:
            clevpt_min = -3.
            clevpt_max = 3.
        elif p == 700:
            clevpt_min = -3.
            clevpt_max = 3.
        elif p == 500:
            clevpt_min = -3.
            clevpt_max = 3.
        else:
            print 'Potential temperature min/max not set for this pressure level'

 # Set specific humidity min/max       
        if p == 925:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        elif p == 850:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        elif p == 700:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        elif p == 500:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        else:
            print 'Specific humidity min/max not set for this pressure level'

       #clevs_col = np.arange(clev_min, clev_max)
        clevs_lin = np.linspace(clev_min, clev_max, num=24)


        m =\
Basemap(llcrnrlon=lon_low,llcrnrlat=lat_low,urcrnrlon=lon_high,urcrnrlat=lat_high, rsphere = 6371229)

        #x, y = m(lons, lats)
        x,y = m(lons_go,lats_go)
        print x.shape
        x_w,y_w = m(lons_wi, lats_wi)
        fig=plt.figure(figsize=(8,8))
        ax = fig.add_axes([0.05,0.05,0.9,0.85], axisbg='#262626')

        m.drawcoastlines(color='#262626')  
        m.drawcountries(color='#262626')  
        m.drawcoastlines(linewidth=0.5)
        #m.fillcontinents(color='#CCFF99')
        m.drawparallels(np.arange(-80,81,10),labels=[1,1,0,0])
        m.drawmeridians(np.arange(0,360,10),labels=[0,0,0,1])
    
        cs_lin = m.contour(x,y, plt_h, clevs_lin,colors='#262626',linewidths=0.5)
       
        cmap=plt.cm.RdBu_r
        #cmap.set_bad('#262626', 1.)
        #cmap.set_over('#262626')
        #cmap.set_under('#262626')
        

        if plot_diag=='temp':
            
             plt_v = np.ma.masked_outside(plt_v, clevpt_max+20,  clevpt_min-20)

             cs_col = m.contourf(x,y, plt_v,  np.linspace(clevpt_min, clevpt_max), cmap=cmap, extend='both')
             
             cbar = m.colorbar(cs_col,location='bottom',pad="5%", format = '%d')  
             cbar.set_ticks(np.arange(clevpt_min,clevpt_max+2,2.))
             cbar.set_ticklabels(np.arange(clevpt_min,clevpt_max+2,2.))
             cbar.set_label('K')  
             plt.suptitle('Difference from global model (Model - global ) of Height, Potential Temperature and Wind Vectors  at %s hPa'% (p), fontsize=10)  

        elif plot_diag=='sp_hum':
             plt_v = np.ma.masked_outside(plt_v, clevsh_max+20,  clevsh_min-20)

             cs_col = m.contourf(x,y, plt_v,  np.linspace(clevsh_min, clevsh_max), cmap=cmap, extend='both')
             cbar = m.colorbar(cs_col,location='bottom',pad="5%", format = '%.3f') 
             cbar.set_label('kg/kg')
             
             plt.suptitle('Difference from global model (Model - Global Model ) of Height, Specific Humidity and Wind Vectors  at %s hPa'% (p), fontsize=10) 

        wind = m.quiver(x_w,y_w, u, v, scale=150,color='#262626' )
        qk = plt.quiverkey(wind, 0.1, 0.1, 5, '5 m/s', labelpos='W')
                
        plt.clabel(cs_lin, fontsize=10, fmt='%d', color='black')

        #plt.title('%s\n%s' % (m_title, model_name_convert_title.main(experiment_id)), fontsize=10)
        plt.title('\n'.join(wrap('%s' % (model_name_convert_title.main(experiment_id)), 80)), fontsize=10)
        
        #plt.show()  
        if not os.path.exists('/nfs/a90/eepdw/Mean_State_Plot_Data/Figures/%s/%s' % (experiment_id, plot_diag)): os.makedirs('/nfs/a90/eepdw/Mean_State_Plot_Data/Figures/%s/%s' % (experiment_id, plot_diag))
        plt.savefig('/nfs/a90/eepdw/Mean_State_Plot_Data/Figures/%s/%s/geop_height_difference_120LAM_%shPa_%s_%s.png' % (experiment_id, plot_diag, p, experiment_id, plot_diag), format='png', bbox_inches='tight')
import iris
import netCDF4 as nc

import climtools_lib as ctl
import climdiags as cd

from matplotlib.colors import LogNorm

#######################################
nr = 1
ifile = '/data-hobbes/fabiano/OBS/ERA/ERA40+Int_daily_1957-2018_zg500_remap25_meters.nc'

print('IRIS read\n')
for i in range(nr):
    t1 = datetime.now()
    fh = iris.load(ifile)
    data = fh[0].data
    print(data.shape, np.max(data))
    t2 = datetime.now()

    diff = (t2-t1).total_seconds()
    print('Data loaded in {:8.3f} s\n'.format(diff))

del data
print('NetCDF read\n')
for i in range(nr):
    t1 = datetime.now()
    fh = nc.Dataset(ifile, mode='r')
    data = list(fh.variables.values())[-1][:]
    print(data.shape, np.max(data))
    t2 = datetime.now()
emissions_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/emissions/combined_1960/0.5x0.5/combined_sources_BC_fossil_1960_360d.nc'
#
# STASH code emissions are associated with
#  301-320: surface
#  m01s00i310: Black carbon fossil fuel surface emissions
#
#  321-340: full atmosphere
#
stash='m01s00i310'

# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---

species_name='BC_fossil'

# this is the grid we want to regrid to, e.g. N96 ENDGame
grd=iris.load(grid_file)[0]
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()

# This is the original data
ems=iris.load_cube(emissions_file)

# make intersection between 0 and 360 longitude to ensure that 
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))

# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system=grd.coord_system()
nems.coord(axis='y').coord_system=grd.coord_system()

# now guess the bounds of the new grid prior to regridding
Example #40
0
            longitude=lambda c: left_lon <= c.point <= right_lon)
        #OUTPATH = '/scratch/vportge/plots/Climpact/'+MIN_OR_MAX+'_LST_in_cold_window/'+REGION+'/'
        OUTPATH = '/scratch/vportge/plots/warm_window_10_3/Climpact/' + MIN_OR_MAX + '_LST_in_cold_window/' + REGION + '/'

        for TIMERANGE in possible_times:
            if TIMERANGE == 'ANN':
                TITLE_TIME = 'annually'
            elif TIMERANGE == 'MON':
                TITLE_TIME = 'monthly'
            elif TIMERANGE == 'DAY':
                TITLE_TIME = 'daily'

            FPATH = glob.glob(INDIR + INAME + '_' + TIMERANGE + '*-' +
                              MIN_OR_MAX + '*.nc')
            #Load in the data for the index and delete 'file_created' attribute.
            data = iris.load(FPATH)
            for i in range(len(data)):
                del data[i].attributes['file_created']

            #concatenate data so that all tiles are sticked together.
            data = data.concatenate_cube()
            #Extract only a subregion of the whole region.
            data = data.extract(lat_constraint)
            data = data.extract(lon_constraint)

            #needed for collapsing the data
            data.coord('latitude').guess_bounds()
            data.coord('longitude').guess_bounds()

            #Those indices had problems due to thresholds.
            if INAME not in ['hw', 'tnx', 'txx', 'tx95t', 'tnm', 'tmm']:
Example #41
0
    lons = coords[1].points
    nj, ni = lats.shape
    # number of grid lines
    nc = 20
    njstep, nistep = max(1, nj // nc), max(1, ni // nc)
    for j in range(0, nj, njstep):
        y = lats[j, :]
        x = lons[j, :]
        pylab.plot(x, y, lineType)
    for i in range(0, ni, nistep):
        y = lats[:, i]
        x = lons[:, i]
        pylab.plot(x, y, lineType)


srcCubes = iris.load(args.src_file)
srcCube = None
for cb in srcCubes:
    if cb.var_name == 'pointData':
        srcCube = cb

dstCubes = iris.load(args.dst_file)
dstCube = None
for cb in dstCubes:
    if cb.var_name == 'pointData':
        dstCube = cb

#plotCellAreas(srcCube)
plotGrid(srcCube, 'g-')
plotGrid(dstCube, 'r-')
#air density is not the same
#cube_unit_beijing = cube[0]*1.284*1.0e9
#cube_unit_shijiazhuang = cube[0]*1.275*1.0e9
#cube_unit_shanghai = cube[0]*1.248*1.0e9
#cube_unit_nanjing = cube[0]*1.252*1.0e9
#cube_unit_guangzhou = cube[0]*1.213*1.0e9
#cube_unit_hk = cube[0]*1.209*1.0e9
#
#cube_beijing_no2 = iris.analysis.interpolate.extract_nearest_neighbour(cube_unit_beijing, [('latitude', 39.90), ('longitude', 116.41)])
#cube_shijiazhuang_no2 = iris.analysis.interpolate.extract_nearest_neighbour(cube_unit_shijiazhuang, [('latitude', 38.04), ('longitude', 114.51)])
#cube_shanghai_no2 = iris.analysis.interpolate.extract_nearest_neighbour(cube_unit_shanghai, [('latitude', 31.23), ('longitude', 121.47)])
#cube_nanjing_no2 = iris.analysis.interpolate.extract_nearest_neighbour(cube_unit_nanjing, [('latitude', 32.06), ('longitude', 118.80)])
#cube_guangzhou_no2 = iris.analysis.interpolate.extract_nearest_neighbour(cube_unit_guangzhou, [('latitude', 23.13), ('longitude', 113.26)])
#cube_hongkong_no2 = iris.analysis.interpolate.extract_nearest_neighbour(cube_unit_hk, [('latitude', 22.33), ('longitude', 114.19)])

cube2 = iris.load(filename2)

cube_unit_beijing_nudging = cube2[0] * 1.284 * 1.0e9
cube_unit_shijiazhuang_nudging = cube2[0] * 1.275 * 1.0e9
cube_unit_shanghai_nudging = cube2[0] * 1.248 * 1.0e9
cube_unit_nanjing_nudging = cube2[0] * 1.252 * 1.0e9
cube_unit_guangzhou_nudging = cube2[0] * 1.213 * 1.0e9
cube_unit_hk_nudging = cube2[0] * 1.209 * 1.0e9

cube_beijing_no2_nudging = iris.analysis.interpolate.extract_nearest_neighbour(
    cube_unit_beijing_nudging, [('latitude', 39.90), ('longitude', 116.41)])
cube_shijiazhuang_no2_nudging = iris.analysis.interpolate.extract_nearest_neighbour(
    cube_unit_shijiazhuang_nudging, [('latitude', 38.04),
                                     ('longitude', 114.51)])
cube_shanghai_no2_nudging = iris.analysis.interpolate.extract_nearest_neighbour(
    cube_unit_shanghai_nudging, [('latitude', 31.23), ('longitude', 121.47)])
Example #43
0
#directory='/nfs/a201/earhg/UKCA/tests-2016/xmmlla/'
directory='/group_workspaces/jasmin2/crescendo/hgordon/u-an970-2/'
directory='/group_workspaces/jasmin/clarify/users/hgordon/u-ap203/'
directory='/group_workspaces/jasmin/clarify/users/hgordon/u-ar931/'
directory='/group_workspaces/jasmin2/crescendo/hgordon/u-ax337/'

directory='/group_workspaces/jasmin2/asci/eeara/'

#folder= directory+'umnsa_glm_nc_time/'
folder=directory+'All_months/'# +'All_time_steps/' #+'All_months'
#folder=directory+'20160801'
saving_folder_l1=directory+'L1/'
ukl.create_folder(saving_folder_l1)

#Reading necesary cubes
potential_temperature=iris.load(ukl.Obtain_name(folder,'m01s00i004'))[0]
air_pressure=iris.load(ukl.Obtain_name(folder,'m01s00i408'))[0]

p0 = iris.coords.AuxCoord(1000.0,
                          long_name='reference_pressure',
                          units='hPa')
p0.convert_units(air_pressure.units)

Rd=287.05 # J/kg/K
cp=1005.46 # J/kg/K
Rd_cp=Rd/cp

print '..................HELLOOOO...................'

temperature=potential_temperature*(air_pressure/p0)**(Rd_cp)
print temperature.data[0,0,0]
trial1 = iris.cube.CubeList()
trial2 = iris.cube.CubeList()

path = '/group_workspaces/jasmin2/gassp/eeara/CARIBIC/'  #ANANTH

mpath1 = '/group_workspaces/jasmin2/asci/eeara/model_runs/u-by920/'  # biogenic nucelation branch
mpath1 = '/group_workspaces/jasmin2/asci/eeara/model_runs/u-by921/'  # baseline simulation branch
#mpath1='/gws/nopw/j04/gassp/hgordon/u-be424-noColinFix/L1/'

alt_path = '/group_workspaces/jasmin2/asci/eeara/model_runs/u-bf834/L1/'
#alt_path='/gws/nopw/j04/gassp/hgordon/u-be424-noColinFix/L1/'
#mpath2='/group_workspaces/jasmin2/gassp/eeara/model_runs/u-bc244/L1/'
cutoff = 2  # this is a radius...12nm is a diameter!!

altitude_path = alt_path + 'L1_rad_accsol_Radius_of_mode_accsol.nc'
cube = iris.load(altitude_path)
cube = cube[0]
alt_data = cube.coord('altitude').points
alt_data_2 = cube.coord('altitude').points
alt_data = alt_data[:, 72, 96]
alt_data = alt_data / 1000

sigma = [1.59, 1.59, 1.40, 2.0, 1.59, 1.59, 2.0]

pref = 1.013e5
tref = 293.0
zboltz = 1.3807e-23
staird = pref / (tref * 287.058)

#f=plt.figure()
indx = [10, 20, 30, 35, 40, 45, 50, 55]
Example #45
0
 def Read_cube(self, string):
     try:
         self.cube = iris.load(ukl.Obtain_name(self.path, string))[0]
     except:
         self.cube = iris.load(
             ukl.Obtain_name(self.path[:-15] + 'L1/', string))[0]
Example #46
0
 def setUp(self):
     file = tests.get_data_path(('PP', 'aPProt1', 'rotatedMHtimecube.pp'))
     cubes = iris.load(file)
     self.cube = cubes[0]
     self.assertCML(self.cube, ('analysis', 'original.cml'))
Example #47
0
def outflow_grid(k=0,
                 levels=3,
                 hoursafterinit=[42, 36, 42, 24],
                 thlevs=[[320, 325, 330], [325, 330, 335], [310, 315, 320],
                         [310, 315, 320]],
                 folder='IOP3/T42',
                 strapp=''):
    "This presently defines the grid of points from the sime time that the region is defined"
    "But by loading the forward trajectories this could easily be adapted to"
    "define the grid at any time along the trajectory"

    #save_dir = '/glusterfs/msc/users_2018/bn826011/NAWDEX/Final/'
    save_dir = '/storage/silver/scenario/bn826011/WCB_outflow/Final/'

    basetime = [
        datetime.datetime(2016, 9, 22, 12),
        datetime.datetime(2016, 9, 26, 12),
        datetime.datetime(2016, 9, 30, 12),
        datetime.datetime(2016, 10, 03, 12)
    ]
    basetime_str = basetime[k].strftime('%Y%m%d_%H')

    TrB = load(save_dir + folder +
               '/{}_TrajectoryEnsemble_backward'.format(basetime_str) + strapp)

    start_time = [datetime.timedelta(hours=0)]
    # trajectory at definition time

    datadir = '/export/cloud/migrated-NCASweather/ben/nawdex/mi-ar482/{}/'.format(
        basetime_str)

    #hoursafterinit = [42, 42, 42, 72, 96]#42

    time = basetime[k] + datetime.timedelta(hours=hoursafterinit[k])

    mapping = {}
    leadtimehr = np.int((time - basetime[k]).total_seconds()) / 3600
    fn = 'prodm_op_gl-mn_{0}_d{1:03d}_thgrid.pp'.\
        format(basetime_str, 12 * (leadtimehr / 12))
    mapping[time] = datadir + fn

    #    dexs = iris.load('/export/cloud/NCASweather/ben/nawdex/mi-ar482/20160922_12/' +
    #            'prodm_op_gl-mn_20160922_12_d*_thsfcs_5K.nc', 'dimensionless_exner_function')
    #    dexs[-1] = iris.util.new_axis(dexs[-1], 'time')
    #    dex = dexs.concatenate_cube()
    #
    #    temps = iris.load('/export/cloud/NCASweather/ben/nawdex/mi-ar482/20160922_12/' +
    #            'prodm_op_gl-mn_20160922_12_d*_thsfcs_5K.nc', 'air_temperature')
    #    temps[-1] = iris.util.new_axis(temps[-1], 'time')
    #    temp = temps.concatenate_cube()
    #
    #    lvls = ('air_potential_temperature', thlevs[k])
    #
    #    altd = convert.calc('altitude', iris.cube.CubeList([dex, temp]), levels = lvls)
    #    # 3D caltra works on altitude not theta levels
    #    # however I don't know how to get altitude?!
    #    # I think leo's data must've have altitude as a coordinate

    cubes = iris.load(mapping[time], iris.Constraint(time=time))

    plt.figure(figsize=(10, 14))

    zc = iris.load(
        '/export/cloud/migrated-NCASweather/ben/nawdex/mi-ar482/' +
        basetime_str + '/prodm_op_gl-mn_' + basetime_str + '_d*_thsfcs_5K.nc',
        'altitude')
    zc = zc[:-1].concatenate_cube()
    # the last time step has different metadata?

    for kl in xrange(levels):

        theta_level = thlevs[k][kl]

        trajectories = TrB.select('air_potential_temperature',
                                  '==',
                                  theta_level,
                                  time=start_time)

        x = trajectories.x[:, 0]
        y = trajectories.y[:, 0]
        #

        #        tlev_cstrnt = iris.Constraint(air_potential_temperature = theta_level)
        #
        #        altdth = altd.extract(tlev_cstrnt)

        lvls = ('air_potential_temperature', [theta_level])

        w = convert.calc('upward_air_velocity', cubes, levels=lvls)
        # I think fairly arbitrary cube

        #        z = grid.make_cube(w, 'altitude')
        #        # altitude
        #
        #        print z
        #
        #        lvls = ('air_potential_temperature', [theta_level])
        #
        #        coord_name, values = lvls

        # I now need some way to interpolate altitude to desired theta level
        #        z = interpolate.to_level(z, **{coord_name: values})

        #        z = convert.calc('altitude', iris.cube.CubeList([z]), levels = lvls)

        glon, glat = grid.get_xy_grids(w)
        gridpoints = np.array([glon.flatten(), glat.flatten()]).transpose()

        points = np.array([x, y]).transpose()
        pth = Path(points)

        # Mask all points that are not contained in the circuit
        mask = np.logical_not(
            pth.contains_points(gridpoints).reshape(glat.shape))

        tlev_cstrnt = iris.Constraint(air_potential_temperature=theta_level)
        time_cstrnt = iris.Constraint(time=time)

        #try this for altitude
        z = zc.extract(tlev_cstrnt & time_cstrnt)

        plt.subplot(levels, 2, 2 * kl + 1)
        plt.contourf(mask, cmap='gray')

        masked_lon = []
        masked_lat = []
        alt_list = []
        for i, col in enumerate(mask):
            for j, point in enumerate(col):
                if point == False:
                    lat = glat[i, j]
                    lon = glon[i, j]
                    alt = z.data[i, j]
                    masked_lon.append(lon)
                    masked_lat.append(lat)
                    alt_list.append(alt)

        plt.subplot(levels, 2, 2 * kl + 2)
        plt.scatter(masked_lon,
                    masked_lat,
                    s=2,
                    c='k',
                    marker='.',
                    edgecolor='k')

        lt = len(masked_lon)

        points3d = np.zeros([lt, 3])

        points3d[:, 0] = np.array(masked_lon)

        points3d[:, 1] = np.array(masked_lat)

        points3d[:, 2] = np.array(alt_list)

        pointsth = np.zeros([lt, 3])

        pointsth[:, 0] = np.array(masked_lon)

        pointsth[:, 1] = np.array(masked_lat)

        pointsth[:, 2] = theta_level * np.ones([lt])

        if kl == 0:

            outflow_volume = points3d
            outflow_volume_th = pointsth

        else:

            outflow_volume = np.concatenate((outflow_volume, points3d))
            outflow_volume_th = np.concatenate((outflow_volume_th, pointsth))

    np.save(
        '/storage/silver/scenario/bn826011/WCB_outflow/Final/' + folder +
        '/inflow/' + basetime_str + strapp + 'initial_grid.npy',
        outflow_volume)
    np.save(
        '/storage/silver/scenario/bn826011/WCB_outflow/Final/' + folder +
        '/inflow/' + basetime_str + strapp + 'initial_grid_thlevs.npy',
        outflow_volume_th)

    #plt.savefig('/glusterfs/msc/users_2018/bn826011/NAWDEX/Final/' + folder + '/' + basetime_str + strapp + '_masks.jpg')

    plt.show()
Example #48
0
 def load_nc_file(self):
     ''' Method to read a NetCDF file '''
     if not self.quiet:
         print('>> Reading NetCDF file ' + self.ncfile)
     self.cubes = iris.load(self.ncfile)
Example #49
0
 def test_monotonic(self):
     cubes = iris.load(
         tests.get_data_path(
             ('NetCDF', 'testing', 'test_monotonic_coordinate.nc')))
     self.assertCML(cubes, ('netcdf', 'netcdf_monotonic.cml'))
def main():

 #experiment_ids = ['djzns', 'djznq', 'djzny', 'djznw', 'dkhgu', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq','dkbhu' ] 
 
 experiment_ids = ['dkbhu', 'djznu', 'dkhgu' ] 
 for experiment_id in experiment_ids:
 
  expmin1 = experiment_id[:-1]
  pfile = '%s%s/%s/%s.pp' % (pp_file_dir, expmin1, experiment_id, pp_file)

  ofile = '%s%s/%s/33.pp' % (pp_file_dir, expmin1, experiment_id)

 
 
 # Get min and max latitude/longitude and unrotate  to get min/max corners to crop plot automatically - otherwise end with blank bits on the edges 
  oc = iris.load_cube(ofile)

  lats = oc.coord('grid_latitude').points
  lons = oc.coord('grid_longitude').points

  print lons
  
  cs = oc.coord_system('CoordSystem')
  if isinstance(cs, iris.coord_systems.RotatedGeogCS):

      print 'Rotated CS %s' % cs
     
      lon_low= np.min(lons)
      lon_high = np.max(lons)
      lat_low = np.min(lats)
      lat_high = np.max(lats)

      lon_corners, lat_corners = np.meshgrid((lon_low, lon_high), (lat_low, lat_high))
      
      lon_corner_u,lat_corner_u = unrotate.unrotate_pole(lon_corners, lat_corners, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)
      lon_low = lon_corner_u[0,0]
      lon_high = lon_corner_u[0,1]
      lat_low = lat_corner_u[0,0]
      lat_high = lat_corner_u[1,0]

  else: 
      lon_low= np.min(lons)
      lon_high = np.max(lons)
      lat_low = np.min(lats)
      lat_high = np.max(lats)

  lon_low_tick=lon_low -(lon_low%divisor)
  lon_high_tick=math.ceil(lon_high/divisor)*divisor

  lat_low_tick=lat_low - (lat_low%divisor)
  lat_high_tick=math.ceil(lat_high/divisor)*divisor
 
  print lat_high_tick
  print lat_low_tick

 
  plist = iris.load(pfile)

  pcubef=np.empty(oc.shape, np.float32)
  
  latitude = iris.coords.DimCoord(oc.coord('grid_latitude').points, standard_name='grid_latitude',
                    units='degrees')
  longitude = iris.coords.DimCoord(oc.coord('grid_longitude').points, standard_name='grid_longitude',
                     units='degrees')

#latitude = iris.coords.DimCoord(oc.coord('grid_latitude').points, standard_name='grid_latitude',
                    #iris.coord_systems.RotatedGeogCS(76.0, 263.0, ellipsoid=iris.coord_systems.GeogCS(6371229.0)))
  #longitude = iris.coords.DimCoord(oc.coord('grid_longitude').points, standard_name='grid_longitude',
                     #iris.coord_systems.RotatedGeogCS(76.0, 263.0, ellipsoid=iris.coord_systems.GeogCS(6371229.0))))           

  for pcube in plist:
      armin = np.searchsorted(oc.coord('grid_longitude').points, min(pcube.coord('grid_longitude').points))
      armax = np.searchsorted(oc.coord('grid_longitude').points, max(pcube.coord('grid_longitude').points))
      pcubef[:,armin:armax+1] = pcube.data

  pc = iris.cube.Cube(pcubef,standard_name=pcube.standard_name, units =pcube.units,dim_coords_and_dims=[(oc.coord('grid_latitude'), 0), (oc.coord('grid_longitude'), 1)])
  print pc

  plt.figure(figsize=(8,8))
         
  cmap= cmap=plt.cm.YlOrRd
    
  ax = plt.axes(projection=ccrs.PlateCarree(), extent=(lon_low,lon_high,lat_low+degs_crop_bottom,lat_high-degs_crop_top))
  
  clevs = np.linspace(min_contour, max_contour,16)
  cont = iplt.contourf(pc, clevs, cmap=cmap, extend='both')
                     
  #plt.clabel(cont, fmt='%d')
  #ax.stock_img()
  ax.coastlines(resolution='110m', color='#262626') 
                     
  gl = ax.gridlines(draw_labels=True,linewidth=0.5, color='#262626', alpha=0.5, linestyle='--')
  gl.xlabels_top = False
  gl.ylabels_right = False

  gl.xlines = True
  gl.ylines = True
  dx, dy = 10, 10

  gl.xlocator = mticker.FixedLocator(range(int(lon_low_tick),int(lon_high_tick)+dx,dx))
  gl.ylocator = mticker.FixedLocator(range(int(lat_low_tick),int(lat_high_tick)+dy,dy))
  gl.xformatter = LONGITUDE_FORMATTER
  gl.yformatter = LATITUDE_FORMATTER
  
  gl.xlabel_style = {'size': 12, 'color':'black'}
  #gl.xlabel_style = {'color': '#262626', 'weight': 'bold'}
  gl.ylabel_style = {'size': 12, 'color':'black'}         

  cbar = plt.colorbar(cont, orientation='horizontal', pad=0.05, extend='both', format = '%d')
  #cbar.set_label('') 
  cbar.set_label(pcube.units, fontsize=10)
  cbar.set_ticks(np.arange(min_contour, max_contour+tick_interval,tick_interval))
  ticks = (np.arange(min_contour, max_contour+tick_interval,tick_interval))
  cbar.set_ticklabels(['%d' % i for i in ticks])
  main_title=pcube.standard_name.title().replace('_',' ')
  model_info=re.sub('(.{68}. )', '\\1\n', str(model_name_convert_title.main(experiment_id)), 0, re.DOTALL)
  model_info = re.sub(r'[(\']', ' ', model_info)
  model_info = re.sub(r'[\',)]', ' ', model_info)
  print model_info

  if not os.path.exists('%s%s/%s' % (save_path, experiment_id, pp_file)): os.makedirs('%s%s/%s' % (save_path, experiment_id, pp_file))

  plt.savefig('%s%s/%s/%s_%s_notitle.png' % (save_path, experiment_id, pp_file, experiment_id, pp_file), format='png', bbox_inches='tight')

  plt.title('\n'.join(wrap('%s\n%s' % (main_title, model_info), 1000,replace_whitespace=False)), fontsize=16)
 
  #plt.show()
 
  plt.savefig('%s%s/%s/%s_%s.png' % (save_path, experiment_id, pp_file, experiment_id, pp_file), format='png', bbox_inches='tight')
  
  plt.close()
Example #51
0
 def test_duplication(self):
     cubes = iris.load(self._data_path)
     self.assertRaises(iris.exceptions.DuplicateDataError,
                       (cubes + cubes).merge)
     cubes2 = (cubes + cubes).merge(unique=False)
     self.assertEqual(len(cubes2), 2 * len(cubes))
def _extr_var_n_calc_abs_tas(short_name, var, cfg, filepath, out_dir):
    """Extract variable."""
    # load tas anomaly, climatology and sftlf
    with catch_warnings():
        filterwarnings(
            action='ignore',
            message='Ignoring netCDF variable .* invalid units .*',
            category=UserWarning,
            module='iris',
        )
        filterwarnings(
            action='ignore',
            message='.* not used since it\ncannot be safely cast to variable'
            ' data type *',
            category=UserWarning,
            module='iris',
        )
        cubes = iris.load(filepath)

    # tas anomaly
    raw_var = var.get('raw', short_name)
    cube_anom = cubes.extract(utils.var_name_constraint(raw_var))[0]

    # tas climatology
    raw_var_clim = var.get('rawclim', short_name)
    cube_clim = cubes.extract(utils.var_name_constraint(raw_var_clim))[0]
    # information on time for the climatology are only present in the long_name
    climstart, climend = [
        int(x) for x in re.findall(r"\d{4}", cube_clim.long_name)
    ]

    # redo the broken time coordinate
    cube_anom, cube_clim = reinit_broken_time(cube_anom, cube_clim, climstart,
                                              climend)

    # derive absolute tas values
    cube_abs = calc_abs_temperature(cube_anom, cube_clim, short_name)

    # fix coordinates
    logger.info("Fixing coordinates")
    attrs = cfg['attributes']
    attrs['mip'] = var['mip']
    short_names = [short_name, var['short_anom']]
    for s_name, cube in zip(short_names, [cube_abs, cube_anom]):
        cmor_info = cfg['cmor_table'].get_variable(var['mip'], s_name)

        utils.fix_coords(cube)
        if 'height2m' in cmor_info.dimensions:
            utils.add_height2m(cube)

        cube.units = var['raw_units']
        if s_name != 'tasa':
            cube.convert_units(cmor_info.units)

        utils.fix_var_metadata(cube, cmor_info)

    # save temperature data
    logger.info("Saving temperature data")
    comments = {
        'tas':
        "Temperature time-series calculated from the anomaly "
        "time-series by adding the temperature climatology "
        "for {}-{}".format(climstart, climend),
        'tasa':
        "Temperature anomaly with respect to the period"
        " {}-{}".format(climstart, climend)
    }

    for s_name, cube in zip(short_names, [cube_abs, cube_anom]):
        attrs['comment'] = comments[s_name]
        utils.set_global_atts(cube, attrs)
        utils.save_variable(cube,
                            s_name,
                            out_dir,
                            attrs,
                            unlimited_dimensions=['time'])

    # sftlf
    # extract sftlf
    raw_var_sftlf = var.get('rawsftlf', short_name)
    cube_sftlf = cubes.extract(utils.var_name_constraint(raw_var_sftlf))[0]

    # fix coordinates
    utils.fix_coords(cube_sftlf)

    # cmorize sftlf units
    cmor_info_sftlf = cfg['cmor_table'].get_variable(var['rawsftlf_mip'],
                                                     var['rawsftlf_varname'])
    attrs_sftlf = cfg['attributes']
    attrs_sftlf['mip'] = var['rawsftlf_mip']
    if 'rawsftlf_units' in var:
        if 'rawsftlf_units' in var:
            cube_sftlf.units = var['rawsftlf_units']
        cube_sftlf.convert_units(cmor_info_sftlf.units)

    # fix metadata and save
    logger.info("Saving sftlf")
    utils.fix_var_metadata(cube_sftlf, cmor_info_sftlf)
    utils.set_global_atts(cube_sftlf, attrs_sftlf)
    utils.save_variable(cube_sftlf, var['rawsftlf_varname'], out_dir,
                        attrs_sftlf)
#times_range=np.argwhere((times_ceres >= tdi) & (times_ceres <=tde))
#times_range=np.logical_and([times_ceres >= tdi],[times_ceres <=tde])[0]

coord = np.zeros([len(grid_lon_2[0, ].flatten()), 2])
coord[:, 0] = grid_lon_2[0, ].flatten()
coord[:, 1] = grid_lat_2[0, ].flatten()
#X,Y=np.meshgrid(model_lons, model_lats)
#grid_z0 = sc.interpolate.griddata(coord, sat_SW, (X,Y), method='nearest')
grid_z1 = sc.interpolate.griddata(coord,
                                  mean_run2, (grid_lon_12, grid_lat_12),
                                  method='linear')

plt.imshow(grid_z1[:, :])
plt.colorbar()

run12 = iris.load(jle.store + 'sample_12km.nc')[0]
run2 = iris.load(jle.store + 'sample_2km.nc')[0]
run2 = iris.load(jle.store + 'sample_12km.nc')[0]

regrided_cube = run2.regrid(run12, iris.analysis.Linear())

plt.imshow(regrided_cube[0, :, :].data)
plt.imshow(run2[0, :, :].data)
plt.colorbar()

#rotated_air_temp = global_air_temp.regrid(rotated_psl, iris.analysis.Linear())

#%%

bm = Basemap()  # default: projection='cyl'
Example #54
0
 def test_colpex(self):
     cubes = iris.load(self._data_path)
     self.assertEqual(len(cubes), 3)
     self.assertCML(cubes, ("COLPEX", "small_colpex_theta_p_alt.cml"))
Example #55
0
 def test_monotonic(self):
     cubes = iris.load(
         tests.get_data_path(
             ('NetCDF', 'testing', 'test_monotonic_coordinate.nc')))
     cubes = sorted(cubes, key=lambda cube: cube.var_name)
     self.assertCML(cubes, ('netcdf', 'netcdf_monotonic.cml'))
Example #56
0
 def test_normal_cubes(self):
     cubes = iris.load(self._data_path)
     self.assertEqual(len(cubes), self._num_cubes)
     self.assertCML(cubes, ["merge", self._prefix + ".cml"])
Example #57
0
sat_lon = longitude.flatten()
sat_lat = latitude.flatten()
sat_data = data.flatten()
#for att in sds.attributes():
#    print att
plt.imshow(data)
plt.colorbar()
plt.show()
#%%
sim_path = '/nfs/a201/eejvt/CASIM/SO_KALLI/'
sub_folder = 'L1/'
sub_folder = 'All_time_steps/'
code = 'top_temp'
code = 'm01s01i208'
cloud_top = iris.load(
    ukl.Obtain_name(sim_path + 'TRY2/ALL_ICE_PROC/' + sub_folder, code))[0]
cloud_top = iris.load(
    ukl.Obtain_name(sim_path + 'TRY2/LARGE_DOMAIN/' + sub_folder, code))[0]
mb = netCDF4.Dataset(
    path + 'modis/' + 'MYD06_L2.A2014343.1325.006.2014344210847.nc', 'r')
mb.variables['Cloud_Mask_1km']
#plt.imshow(mb.variables['Cloud_Fraction'])
sat_dat = mb.variables['Cloud_Fraction'][:, ].flatten()
sat_dat = mb.variables['Cloud_Mask_1km'][:, ].flatten()
coord = np.zeros([len(sat_lon), 2])
coord[:, 0] = sat_lon
coord[:, 1] = sat_lat
cm = plt.cm.RdBu_r
model_lons, model_lats = stc.unrotated_grid(cloud_top)
X, Y = np.meshgrid(model_lons, model_lats)
reload(stc)
Example #58
0

if __name__ == "__main__":

    import doctest
    import warnings
    warnings.simplefilter("ignore")
    doctest.testmod()
    from pyaerocom.io.testfiles import get
    from pyaerocom import GriddedData
    files = get()
    data = GriddedData(files['models']['aatsr_su_v4.3'], var_name="od550aer")
    lons = data.grid.coord("longitude")
    try:
        get_lon_constraint(lon_range=(170, -160), meridian_centre=True)
    except ValueError:
        print("Expected behaviour")

    from iris import load
    cubes = load(files['models']['aatsr_su_v4.3'])
    lons = cubes[0].coord("longitude").points
    meridian_centre = True if lons.max() > 180 else False
    c = get_constraint(var_names="od550aer",
                       lon_range=(50, 150),
                       lat_range=(20, 60),
                       time_range=("2008-02-01", "2008-02-05"))

    cube_crop = cubes.extract(c)[0]

    print(start_stop_str(2010, '15-2-2018', ts_type='monthly'))
Example #59
0
import numpy as np
import iris

file1 = '/group_workspaces/jasmin2/gassp/eeara/2008/bb295a.pm2008apr.pp'
file2 = '/group_workspaces/jasmin2/gassp/eeara/ba471a.p42014apr.pp'
name1 = []
name2 = []
print 'start'
cube1 = iris.load(file1)
print '\ncube1 completed\n'
cube2 = iris.load(file2)
print 'completed'

for i in cube1:
    name1 = np.append(name1, i.name())

for j in cube2:
    name2 = np.append(name2, j.name())
#username=getpass.getuser()

plt.interactive(0)
#files_directory='/nfs/a201/'+username+'/UKCA_TEST_FILES/tebxd/'
#saving_folder=files_directory+'PLOTS'
files_directory = output_files_directory
folders = [files_directory + 'All_time_steps/', files_directory + 'L1/']
#folders=[files_directory+'L1/']

for data_folder in folders:
    saving_folder = data_folder + 'PLOTS/'
    ukl.create_folder(saving_folder)
    nc_files = glob(data_folder + '*nc')
    for nc_file in nc_files:
        #mb=netCDF4.Dataset(nc_file,'r')
        cube = iris.load(nc_file)[0]
        cube = cube.collapsed(['time'], iris.analysis.MEAN)
        print cube.var_name
        print cube.shape
        try:
            if cube.ndim == 2:
                qplt.contourf(cube, cmap=plt.cm.RdBu_r)
                plt.gca().coastlines()
                stash = ''
                try:
                    stash = ukl.get_stash(cube)
                except:
                    stash = 'L1'

                if isinstance(cube.var_name, str) or isinstance(
                        cube.var_name, unicode):