Exemplo n.º 1
0
def check_strattrop(infile):
    """ Check if the model output is from a StratTrop(CheST)
      or TropIsop (CheT) run.
      Method:- Crude, checks for following species in file:
      ClO, N2O --> CheST; NOy (34004) --> CheT
      
      Returns True for StratTrop/ CheST
  """

    stash_codes_str = ['m01s34i042', 'm01s34i049']  # ClO, N2O
    stc_trop = 'm01s34i004'  # NO2/NOy

    fieldcons = []  # List of stash constraints
    for spc in stash_codes_str:
        fieldcons.append(iris.AttributeConstraint(STASH=spc))

    stcubes = iris.load_cubes(infile, constraints=fieldcons)
    if len(stcubes) == len(stash_codes_str):
        stcubes = 'a'
        return True
    else:
        # Assume not StratTrop - confirm if Trop
        fieldcons = iris.AttributeConstraint(STASH=stc_trop)
        trcube = iris.load_cube(infile, constraints=fieldcons)
        if trcube != None:
            trcube = 'a'
            return False
        else:  # Something wrong, not all diagnostics in place
            print 'CHK_STRATTROP: Diagnostics missing -Not able to determine'
            return True  # Default --may cause failure in some scripts
Exemplo n.º 2
0
def convec_2_precipflux(cube_list):
    '''
    Function to add together large scale rain and snow to
    get the equivalent of 05216 (total precip flux) in a
    non convection resolving model. Input:
    cube_list = a list of cubes which contains 4203 and 4204 (large
    scale rain and snow)
    Output:
    cube_list = the same cube list but with the new precip field appended
    '''
    print('Downscaled model is convection permitting . . .')
    print('Correcting for stash difference between downscaled and driving model . . . ')
    print('large scale rainfall (m01s04i203) + large scale snowfall (m01s04i204) = total precipitation (m01s05i216)')
    ls_rain_constraint = iris.AttributeConstraint(STASH="m01s04i203")
    ls_rain_cube, = cube_list.extract(ls_rain_constraint)
    ls_snow_constraint = iris.AttributeConstraint(STASH="m01s04i204")
    ls_snow_cube, = cube_list.extract(ls_snow_constraint)
    var_cube = cube_list[0].copy()
    var_cube.data = ls_snow_cube.data + ls_rain_cube.data  # add snow and rain to get total precip
    precip_stash = iris.fileformats.pp.STASH(1, 5, 216)  # create instance of stash
    var_cube.attributes['STASH'] = precip_stash  # set stash that will be used for driving model
    var_cube.rename('precipitation_flux')
    cube_list.append(var_cube)  # add calculated precip flux to cube list
    cube_list.remove(ls_rain_cube)  # remove large scale rain from cube list
    cube_list.remove(ls_snow_cube)  # remove large scale snow from cube list
    return cube_list
Exemplo n.º 3
0
def toa_cloud_radiative_effect(cubelist, kind):
    """
    Calculate domain-average TOA cloud radiative effect (CRE).

    Parameters
    ----------
    cubelist: iris.cube.CubeList
        Input list of cubes
    kind: str
        Shortwave ('sw'), longwave ('lw'), or 'total' CRE

    Returns
    -------
    iris.cube.Cube
        Cube of CRE with reduced dimensions.
    """
    name = f"toa_cloud_radiative_effect_{kind}"
    if kind == "sw":
        all_sky = "m01s01i208"
        clr_sky = "m01s01i209"
    elif kind == "lw":
        all_sky = "m01s02i205"
        clr_sky = "m01s02i206"
    elif kind == "total":
        sw = toa_cloud_radiative_effect(cubelist, "sw")
        lw = toa_cloud_radiative_effect(cubelist, "lw")
        cre = sw + lw
        cre.rename(name)
        return cre

    cube_clr = spatial(cubelist.extract_strict(iris.AttributeConstraint(STASH=clr_sky)), "mean")
    cube_all = spatial(cubelist.extract_strict(iris.AttributeConstraint(STASH=all_sky)), "mean")
    cre = cube_clr - cube_all
    cre.rename(name)
    return cre
Exemplo n.º 4
0
def load_ts(basedir,
            stashcode,
            start_year,
            stop_year,
            months,
            overwrite=False):
    constraint = iris.AttributeConstraint(STASH=stashcode)
    if stashcode == 'm01s05i216':
        time_constraint = iris.AttributeConstraint(lbtim='122')
    ts_file = basedir + '/' + stashcode + '_' + str(start_year) + '-' + str(
        stop_year) + '_' + months[0] + '-' + months[-1] + '.nc'
    if os.path.exists(ts_file) and not overwrite:
        ts_cube = iris.load_cube(ts_file, constraint)

#raise Exception("File "+ts_file+" exists and will not overwrite.")
    else:
        ts_cubelist = iris.cube.CubeList()
        for year in xrange(start_year, stop_year + 1):
            for month in months:
                if stashcode == 'm01s05i216':
                    month_cube = iris.load_cube(
                        basedir + '/*.p?' + str(year) + month + '.nc',
                        constraint & time_constraint)
                else:
                    month_cube = iris.load_cube(
                        basedir + '/*.p?' + str(year) + month + '.nc',
                        constraint)
                ts_cubelist.append(month_cube)
                print year, month
        print ts_cubelist
        equalise_attributes(ts_cubelist)
        unify_time_units(ts_cubelist)
        ts_cube = ts_cubelist.concatenate_cube()
        iris.save(ts_cube, ts_file)
    return ts_cube
 def test_pp_with_stash_constraints(self):
     filenames = [tests.get_data_path(("PP", "globClim1", "dec_subset.pp"))]
     stcon1 = iris.AttributeConstraint(STASH="m01s00i004")
     stcon2 = iris.AttributeConstraint(STASH="m01s00i010")
     pp_constraints = pp._convert_constraints([stcon1, stcon2])
     pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 76)
Exemplo n.º 6
0
 def loadData(self):
     slpConstraint = iris.AttributeConstraint(STASH=self.slpStash)
     uConstraint = iris.AttributeConstraint(STASH=self.uStash)
     vConstraint = iris.AttributeConstraint(STASH=self.vStash)
     hrdtConstraint = iris.Constraint(forecast_period = lambda cell: cell % 3 == 0)
     self._getBasinConstraint(self.initLat,self.initLon)
     attrConstraint = self.basinConstraint&hrdtConstraint
     self.slpCube = loadIrisCube(self.dataloc,
                                 attributeConstraint=slpConstraint,
                                 dataConstraints = attrConstraint)
Exemplo n.º 7
0
 def test_double_stash(self):
     stcube236 = mock.Mock(stash=STASH.from_msi('m01s03i236'))
     stcube4 = mock.Mock(stash=STASH.from_msi('m01s00i004'))
     stcube7 = mock.Mock(stash=STASH.from_msi('m01s00i007'))
     constraints = [iris.AttributeConstraint(STASH='m01s03i236'),
                    iris.AttributeConstraint(STASH='m01s00i004')]
     pp_filter = _convert_constraints(constraints)
     self.assertTrue(pp_filter(stcube236))
     self.assertTrue(pp_filter(stcube4))
     self.assertFalse(pp_filter(stcube7))
Exemplo n.º 8
0
def loadPP(args, reqd):
    """Loads a pp file with a callback"""
    if args.stash:
        constr = [
            iris.AttributeConstraint(STASH=args.stash[0]),
            iris.AttributeConstraint(STASH=get_pressure_stash(args.pgrid))
        ]
    else:
        constr = None
    cubes = iris.load(args.files, constraints=constr, callback=reqd.callback)
    return cubes
 def test_callable_stash(self):
     stcube236 = mock.Mock(stash=STASH.from_msi('m01s03i236'))
     stcube4 = mock.Mock(stash=STASH.from_msi('m01s00i004'))
     stcube7 = mock.Mock(stash=STASH.from_msi('m01s00i007'))
     con1 = iris.AttributeConstraint(STASH=lambda s: s.endswith("004"))
     con2 = iris.AttributeConstraint(STASH=lambda s: s == "m01s00i007")
     constraints = [con1, con2]
     pp_filter = _convert_constraints(constraints)
     self.assertFalse(pp_filter(stcube236))
     self.assertTrue(pp_filter(stcube4))
     self.assertTrue(pp_filter(stcube7))
Exemplo n.º 10
0
 def stash_vars(self):
     """stash_vars
     Description
     Args:
     Returns
     """
     stash_df = pd.read_csv(self.stashfile + '.csv')
     u_stash = stash_df.u_stash[0]
     v_stash = stash_df.v_stash[0]
     u_constraint = iris.AttributeConstraint(STASH=u_stash)
     v_constraint = iris.AttributeConstraint(STASH=v_stash)
     p_constraint = iris.Constraint(pressure=self.plev)
     return u_constraint, v_constraint, p_constraint
Exemplo n.º 11
0
    def _load_cubes(self, filename):
        cubes = iris.load(
            self.root + filename,
            iris.AttributeConstraint(STASH=[p['stash'] for p in self.params]))
        hr_dict = {}
        lr_dict = {}

        for p in self.params:
            c_hr = cubes.extract(iris.AttributeConstraint(STASH=p['stash']))[0]
            c_lr = self._bilinear_downscale(self._upscale(c_hr), target=c_hr)
            hr_dict[p['name']] = c_hr
            lr_dict[p['name']] = c_lr

        self.cubes_hr[filename] = hr_dict
        self.cubes_lr[filename] = lr_dict
 def test_no_stash(self):
     constraints = [
         iris.Constraint('air_potential_temperature'),
         iris.AttributeConstraint(source='asource')
     ]
     pp_filter = _convert_constraints(constraints)
     self.assertIsNone(pp_filter)
 def test_multiple_with_stash(self):
     constraints = [
         iris.Constraint('air_potential_temperature'),
         iris.AttributeConstraint(STASH='m01s00i004')
     ]
     pp_filter = _convert_constraints(constraints)
     self.assertIsNone(pp_filter)
Exemplo n.º 14
0
def _get_slice_at_hour_at_timestep(variable,
                                   validity_time,
                                   methods=None,
                                   model='global'):
    """Get the cube with the data, given that the specified time
       matches a data timestep."""
    file_name = _get_file_name(variable,
                               validity_time,
                               model=model,
                               methods=methods)
    if not os.path.isfile(file_name):
        raise Exception(
            ("Data for %04d/%02d/%02d not available" +
             " might need oper.fetch") % validity_time.strftime("%Y-%m-%d"))
    if variable == 'prate_a':
        tp = _get_fcst_period(variable,
                              validity_time,
                              model=model,
                              methods=methods)
        tp[0] = tp[0].days * 24 + tp[0].seconds // 3600
        tp[1] = tp[1].days * 24 + tp[1].seconds // 3600
        ftco = iris.Constraint(forecast_period=lambda t: t.bound is not None
                               and t.bound[0] == tp[0] and t.bound[1] == tp[1])
    else:
        ftco = iris.Constraint(forecast_period=_get_fcst_period(
            variable, validity_time, model=model, methods=methods).seconds //
                               3600)
    ftfr = iris.Constraint(forecast_reference_time=_get_fcst_reference_time(
        variable, validity_time, model=model, methods=methods))
    stco = iris.AttributeConstraint(
        STASH=_stash_from_variable_names(variable, model=model))
    hslice = iris.load_cube(file_name, stco & ftco & ftfr)
    return hslice
    def _find_required_cubes_using_metadata(cubes, input_dict):
        """
        Extract the cube that matches the information within the input
        dictionary.

        Args:
            cubes (iris.cube.CubeList):
                The cubes that will be checked for matches against the
                metadata specified in the input_dict.
            input_dict (dict):
                A dictionary containing the metadata that will be used to
                identify the desired cubes.

        Returns:
            iris.cube.CubeList:
                CubeList containing cubes that match the metadata supplied
                within the input dictionary.

        Raises:
            ValueError:
                The metadata supplied resulted in no matching cubes.

        """
        constr = iris.AttributeConstraint(**input_dict["attributes"])
        cubelist = cubes.extract(constr)
        if not cubelist:
            msg = ("The metadata to identify the desired historic forecast or "
                   "truth has found nothing matching the metadata information "
                   "supplied: {}".format(input_dict))
            raise ValueError(msg)
        return cubelist
Exemplo n.º 16
0
def load_stash_for_time_and_region(file, stash, region):
    valid_time_in_hours = file_valid_time(file).timestamp() / (60 * 60)
    
    return iris.load(file, 
                     iris.AttributeConstraint(STASH=stash) &
                     region &
                     iris.Constraint(time=lambda t: valid_time_in_hours - 0.1 < t < valid_time_in_hours + 0.1 ))
Exemplo n.º 17
0
 def test_pp_with_stash_constraint(self):
     filenames = [tests.get_data_path(('PP', 'globClim1', 'dec_subset.pp'))]
     stcon = iris.AttributeConstraint(STASH='m01s00i004')
     pp_constraints = pp._convert_constraints(stcon)
     pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 38)
Exemplo n.º 18
0
def pp_to_cube(suite_name, stream, suite_rootdir, stash_code, path_style,
               years):
    """Find and convert many pp files to a single netCDF (no CMORization!)"""
    # set the file pattern
    stream_part_2 = stream[1:]
    cube_list = []
    for yr in years:
        for mo in nr_months.keys():
            fileDescriptor = suite_name + '.' + stream_part_2 \
                             + yr + nr_months[mo] + '.pp'
            if path_style == 'jasmin':
                basedir = os.path.join(suite_rootdir, yr + mo + tz_root)
            elif path_style == 'moose':
                basedir = os.path.join(suite_rootdir.strip(suite_rootdir[-1]),
                                       stream + '.pp')
            elif path_style == 'mangled':
                basedir = suite_rootdir
            result = []
            for path, _, files in os.walk(basedir, followlinks=True):
                files = fnmatch.filter(files, fileDescriptor)
                if files:
                    result.extend(os.path.join(path, f) for f in files)
            if os.path.isfile(result[0]):
                stash_cons = iris.AttributeConstraint(STASH=stash_code)
                cb = iris.load(result[0], stash_cons)[0]
                cbr = recube(cb)
                cube_list.append(cbr)

    try:
        cube = iris.cube.CubeList(cube_list).concatenate_cube()
        return cube
    except iris.exceptions.ConcatenateError as ex:
        print('Can not concatenate cubes: ', ex)
        print('Differences: %s', ex.differences)
        raise ex
Exemplo n.º 19
0
def PlumeandMet(workdir, metDir):

    times = ['201303060600']

    for time in times:
        filenames = glob.glob(workdir + '*' + time + '.txt')
        filename = filenames[0]
        attConstraint = iris.AttributeConstraint(Name='TotalAC')
        cube = iris.load_cube(filename, attConstraint)
        conc = cube

        filename = metDir + '*' + time + '.txt'
        precip = iris.load_cube(filename)

        colorscale = ('#ffffff', '#b4dcff', '#04fdff', '#00ff00', '#fdff00',
                      '#ffbd02', '#ff6a00', '#fe0000', '#0000FF', '#800080',
                      '#008000')

        # Set up axes
        ax = plt.axes(projection=ccrs.PlateCarree())
        ax.set_extent([-23, -13, 63, 67])

        # Set up country outlines
        countries = cfeature.NaturalEarthFeature(category='cultural',
                                                 name='admin_0_countries',
                                                 scale='10m',
                                                 facecolor='none')
        ax.add_feature(countries, edgecolor='black', zorder=2)

        # Set-up the gridlines
        gl = ax.gridlines(draw_labels=True, linewidth=0.8, alpha=0.9)

        gl.xlabels_top = False
        gl.ylabels_right = False
        gl.xlocator = mticker.FixedLocator(
            [-23, -22, -21, -20, -19, -18, -17, -16, -15, -14])
        gl.ylocator = mticker.FixedLocator([63, 64, 65, 66, 67])
        gl.xformatter = LONGITUDE_FORMATTER
        gl.yformatter = LATITUDE_FORMATTER

        # Plot
        cf1 = iplt.contourf(precip,
                            levels=[0.0, 0.01, 0.1, 1.0, 10],
                            colors=colorscale)

        cf = iplt.contour(conc,
                          levels=[
                              1e-9, 3.16e-8, 1e-8, 3.16e-7, 1e-7, 3.16e-6,
                              1e-6, 3.16e-5, 1e-5
                          ])

        cb = plt.colorbar(cf1, orientation='horizontal', shrink=0.9)
        cb.set_label(str(precip.units))
        plt.title(
            'Precipitation (coloured) and Air Concentration (contoured) \n' +
            time,
            fontsize=12)

        plt.show()
Exemplo n.º 20
0
 def stash_vars(self):
     """stash_vars
     Description:
         Uses Stash file to create iris cube contraints (variables)
     Args: none
     Returns:
         u_constraint (iris cube constraint): Uvel constraint
         v_constraint (iris cube constraint): Vvel constraint
         p_constraint (iris cube constraint): Pressure constraint
     """
     stash_df = pd.read_csv(self.stashfile + '.csv')
     u_stash = stash_df.u_stash[0]
     v_stash = stash_df.v_stash[0]
     u_constraint = iris.AttributeConstraint(STASH=u_stash)
     v_constraint = iris.AttributeConstraint(STASH=v_stash)
     p_constraint = iris.Constraint(pressure=self.plev)
     return u_constraint, v_constraint, p_constraint
Exemplo n.º 21
0
 def load_cube( self , job ):
     variables = [] ; cubes_to_sum = []
     for stash in range( 0 , len( self.stash ) ):
         variables.append( iris.AttributeConstraint( STASH = self.stash[ stash ] ) )
         cubes_to_sum.append( iris.load_cube( self.job_files_dict[job] , variables[ stash ] ) )
           
     cube = sum( cubes_to_sum )
   
     return cube
Exemplo n.º 22
0
    def load_cube( self , job ):
        variable = iris.AttributeConstraint( STASH = self.stash )
        cube = iris.load_cube( self.job_files_dict[job] , variable )
        cubes_to_sum = []
        for layer in range( 0 , len( self.cell_number ) ):
            cubes_to_sum.append( cube.extract( iris.Constraint( soil_model_level_number = lambda cell: cell == self.cell_number[ layer ] ) ) )

        cube = sum( cubes_to_sum )
        
        return cube
Exemplo n.º 23
0
def loadPP_cella(file=file, stash=None):
    """Loads a pp file with a callback"""
    if stash is not None:
        constr = [iris.AttributeConstraint(STASH=stash)]
    else:
        constr = None
        #print file
        #print constr
    cube = iris.load_cube(file, constr)
    return cube
Exemplo n.º 24
0
 def test_stash_constraint(self):
     # Check that an attribute constraint functions correctly.
     # Note: this is a special case in "fileformats.pp".
     flds = self.fields(c_t='1122', phn='0101')
     file = self.save_fieldcubes(flds)
     airtemp_flds = [fld for fld in flds if fld.name() == 'air_temperature']
     stash_attribute = airtemp_flds[0].attributes['STASH']
     results = iris.load(file,
                         iris.AttributeConstraint(STASH=stash_attribute))
     expected = CubeList(airtemp_flds).merge()
     self.assertEqual(results, expected)
Exemplo n.º 25
0
 def __init__(self, cubelist):
     """Initialise CloudPlotter from `iris.cube.CubeList` containing cloud fractions."""
     self.factor = 10.0  # scaling factor
     self.cubes = {}
     for key, stash in self._stash_items.items():
         try:
             self.cubes[key] = cubelist.extract_strict(
                 iris.AttributeConstraint(STASH=stash))
         except iris.exceptions.ConstraintMismatchError:
             warnings.warn(
                 f"Warning!\n{key} ({stash}) is not found in\n\n{cubelist}")
Exemplo n.º 26
0
    def create_new_cubes(self, cubes):
        stashcube = cubes.extract(
            iris.AttributeConstraint(STASH=self.STASH))[0]
        # what are the dimensions of this cube
        if stashcube.ndim == 3:
            self.is3D = False
        elif stashcube.ndim == 4:
            self.is3D = True

        newcube = self.convert_pressure(stashcube, cubes)
        return self.add_metadata(newcube, stashcube)
Exemplo n.º 27
0
def load_stash(files, code, name, units=None):
    print name
    print code

    stash_constraint = iris.AttributeConstraint(STASH=code)

    cube = iris.load_cube(files, stash_constraint)
    cube.var_name = name
    cube.standard_name = None
    if (units is not None): cube.units = units
    return cube
Exemplo n.º 28
0
def cmorization(in_dir, out_dir, cfg, cfg_user):
    """Cmorization func call."""
    # run the cmorization
    # Pass on the workdir to the cfg dictionary
    cfg['work_dir'] = cfg_user['work_dir']
    # If it doesn't exist, create it
    if not os.path.isdir(cfg['work_dir']):
        logger.info("Creating working directory for regridding: %s",
                    cfg['work_dir'])
        os.mkdir(cfg['work_dir'])

    for short_name, var in cfg['variables'].items():
        var['short_name'] = short_name
        logger.info("Processing var %s", short_name)

        # Regridding
        logger.info("Start regridding to: %s",
                    cfg['custom']['regrid_resolution'])
        _regrid_dataset(in_dir, var, cfg)
        logger.info("Finished regridding")

        # File concatenation
        logger.info("Start setting time_bnds")
        cubelist = _set_time_bnds(cfg['work_dir'], var)

        attrs_to_remove = ['identifier', 'date_created']
        for cube in cubelist:
            for attr in attrs_to_remove:
                cube.attributes.pop(attr)

        # Loop over two different platform names
        for platformname in ['SPOT-4', 'SPOT-5']:
            # Now split the cubelist on the different platform
            logger.info("Start processing part of dataset: %s", platformname)
            cubelist_platform = cubelist.extract(
                iris.AttributeConstraint(platform=platformname))
            if cubelist_platform:
                assert _attrs_are_the_same(cubelist_platform)
                cube = cubelist_platform.concatenate_cube()
            else:
                logger.warning(
                    "No files found for platform %s \
                               (check input data)", platformname)
                continue
            savename = os.path.join(cfg['work_dir'],
                                    var['short_name'] + platformname + '.nc')
            logger.info("Saving as: %s", savename)
            iris.save(cube, savename)
            logger.info("Finished file concatenation over time")
            in_file = savename
            logger.info("Start CMORization of file %s", in_file)
            _cmorize_dataset(in_file, var, cfg, out_dir)
            logger.info("Finished regridding and CMORizing %s", in_file)
Exemplo n.º 29
0
    def test_attribute_constraint(self):
        # there is no my_attribute attribute on the cube, so ensure it returns None
        cube = self.cube.extract(
            iris.AttributeConstraint(my_attribute='foobar'))
        self.assertIsNone(cube)

        orig_cube = self.cube
        # add an attribute to the cubes
        orig_cube.attributes['my_attribute'] = 'foobar'

        cube = orig_cube.extract(
            iris.AttributeConstraint(my_attribute='foobar'))
        self.assertCML(cube, ('constrained_load', 'attribute_constraint.cml'))

        cube = orig_cube.extract(
            iris.AttributeConstraint(my_attribute='not me'))
        self.assertIsNone(cube)

        cube = orig_cube.extract(
            iris.AttributeConstraint(
                my_attribute=lambda val: val.startswith('foo')))
        self.assertCML(cube, ('constrained_load', 'attribute_constraint.cml'))

        cube = orig_cube.extract(
            iris.AttributeConstraint(
                my_attribute=lambda val: not val.startswith('foo')))
        self.assertIsNone(cube)

        cube = orig_cube.extract(
            iris.AttributeConstraint(my_non_existant_attribute='hello world'))
        self.assertIsNone(cube)
Exemplo n.º 30
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--start-date', help='ISO-formatted start date')
    parser.add_argument('--end-date', help='ISO-formatted end date')
    parser.add_argument('--output', '-o', help='Output file name', required=True)
    parser.add_argument('--target-mask', help='Target UM land mask', required=True)
    parser.add_argument('--frequency', choices=[6, 12, 24],
                        type=int, help='Update frequency (hours)', default=24)
    args = parser.parse_args()

    # Read in the source mask
    tos = xarray.open_mfdataset('/g/data1a/ub4/erai/netcdf/6hr/ocean/'
                                'oper_an_sfc/v01/tos/'
                                'tos_6hrs_ERAI_historical_an-sfc_2001*.nc',
                                coords='all')
    src_mask = tos.tos.isel(time=0)

    # Read in the target mask
    mask_iris = iris.load_cube(args.target_mask, iris.AttributeConstraint(STASH='m01s00i030'))
    mask_iris.coord('latitude').var_name = 'lat'
    mask_iris.coord('longitude').var_name = 'lon'
    tgt_mask = xarray.DataArray.from_iris(mask_iris).load()
    tgt_mask = tgt_mask.where(tgt_mask == 0)

    tgt_mask.lon.attrs['standard_name'] = 'longitude'
    tgt_mask.lat.attrs['standard_name'] = 'latitude'
    tgt_mask.lon.attrs['units'] = 'degrees_east'
    tgt_mask.lat.attrs['units'] = 'degrees_north'

    print(tgt_mask)

    weights = esmf_generate_weights(src_mask, tgt_mask, method='patch')

    with ProgressBar():

        # Read and slice the source data
        tos = xarray.open_mfdataset('/g/data1a/ub4/erai/netcdf/6hr/ocean/'
                                    'oper_an_sfc/v01/tos/'
                                    'tos_6hrs_ERAI_historical_an-sfc_2001*.nc',
                                    coords='all')
        sic = xarray.open_mfdataset('/g/data1a/ub4/erai/netcdf/6hr/seaIce/'
                                    'oper_an_sfc/v01/sic/'
                                    'sic_6hrs_ERAI_historical_an-sfc_2001*.nc',
                                    coords='all')
        ds = xarray.Dataset({'tos': tos.tos, 'sic': sic.sic})
        ds = ds.sel(time=slice(args.start_date, args.end_date))
        print(ds)

        newds = regrid(ds, weights=weights)

        newds['time'] = newds['time'].astype('i4')
        newds.to_netcdf(args.output)