コード例 #1
0
    def datetimes(self):
        """
            Return:
            list()
        """

        times = []
        axis_types = self.axis_types
        for i in range(0, len(axis_types)):
            if axis_types[i] == Axis.Time:
                val = self.file.variables[self.dim_names[i]]
                for t in range(0, len(val)):
                    epochtime = int(
                        cfunits.Units.conform(
                            val[t], cfunits.Units(val.units),
                            cfunits.Units(
                                "seconds since 1970-01-01 00:00:00")))
                    print(epochtime)
                    dt = datetime.utcfromtimestamp(epochtime)
                    print(dt)
                    times.append(dt)

        if len(times) == 0:
            surfex.util.info("No time found for " + self.var_name, level=2)
        return times
コード例 #2
0
def read_first_guess_netcdf_file(input_file, var):

    fh = netCDF4.Dataset(input_file)
    lons = fh["longitude"][:]
    lats = fh["latitude"][:]

    validtime = int(cfunits.Units.conform(fh["time"][:], cfunits.Units(fh["time"].units),
                    cfunits.Units("seconds since 1970-01-01 00:00:00")))
    validtime = datetime.fromtimestamp(validtime)


    nx = lons.shape[1]
    ny = lons.shape[0]

    lons = np.array(np.reshape(lons, [nx * ny], order="F"))
    lats = np.array(np.reshape(lats, [nx * ny], order="f"))

    # print(lons.shape, lats.shape, type(lons))
    geo = surfex.Geo(nx*ny, nx, ny, lons, lats)

    background_in = fh[var][:]
    background = np.random.rand(nx, ny)

    if "land_area_fraction" in fh.variables:
        glafs_in = fh["land_area_fraction"][:]
        glafs = np.random.rand(nx, ny)
    else:
        raise Exception("No land area fraction found in first guess file")

    if "altitude" in fh.variables:
        gelevs_in = fh["altitude"][:]
        gelevs = np.random.rand(nx, ny)
    else:
        raise Exception("No altitude found in first guess file")

    background = fh[var][:]
    background = np.array(np.reshape(background, [nx * ny]))
    background = np.reshape(background, [ny, nx])
    background = np.transpose(background)
    background = background.tolist()
    background = np.asarray(background)
    glafs = fh["land_area_fraction"][:]
    glafs = np.array(np.reshape(glafs, [nx * ny]))
    glafs = np.reshape(glafs, [ny, nx])
    glafs = np.transpose(glafs)
    glafs = glafs.tolist()
    glafs = np.asarray(glafs)
    gelevs = fh["altitude"][:]
    gelevs = np.array(np.reshape(gelevs, [nx * ny]))
    gelevs = np.reshape(gelevs, [ny, nx])
    gelevs = np.transpose(gelevs)
    gelevs = gelevs.tolist()
    gelevs = np.asarray(gelevs)

    fh.close()
    return geo, validtime, background, glafs, gelevs
コード例 #3
0
def cf_time_to_datetime(times,time_units) :
   # Time processing
   tmp=cfunits.Units(time_units)
   refy, refm, refd=(1950,1,1)                                              # Reference time for this routine
   tmp2=cfunits.Units("seconds since %d-%d-%d 00:00:00"%(refy,refm,refd))   # Units from CF convention
   tmp3=cfunits.Units.conform(times,tmp,tmp2)                               # Transform to new new unit  (known to this routine)
   # Then calculate dt. Phew!
   mydt = [ datetime.datetime(refy,refm,refd,0,0,0) +
         datetime.timedelta(seconds=int(elem)) for elem in tmp3]
   return mydt
コード例 #4
0
    def __init__(self,
                 name,
                 filenametemplate,
                 varname,
                 unit,
                 format,
                 accumulation_time=None,
                 rootPath=None,
                 coord_props={}):
        self._name = name  # Variable names known to this module
        self._filenametemplate = filenametemplate  # File known to this module
        self._varname = varname  # Variable name in file
        self._units = unit
        self._format = format

        if rootPath is not None:
            self._filenametemplate = self._filenametemplate.replace(
                "[rootPath]", rootPath)

        self._accumulation_time = accumulation_time
        if self._accumulation_time is not None:
            if self._accumulation_time[-1] == "h":
                self._accumulation_time = datetime.timedelta(
                    hours=int(self._accumulation_time[:-1]))
            else:
                raise AtmosphericForcingError(
                    "time step must be specified in hours (hours + letter 'h')"
                )

        if self._accumulation_time is not None:
            # Convert from accumulated to flux
            logger.info("Converting accumulated field (varname=%s) to flux" %
                        self._varname)
            self._units = self._units + " s**-1"
            tmp = self._accumulation_time
            if self._filenametemplate.find('/ERA5/') > 0:
                # Downloaded ERA5 at current using the hourly averaged precipitation
                # future it could be 3-hoursly
                self._accumulation_scale_factor = 1. / (tmp.days * 86400. +
                                                        tmp.seconds / 6)
            else:
                self._accumulation_scale_factor = 1. / (tmp.days * 86400. +
                                                        tmp.seconds)
        else:
            self._accumulation_scale_factor = 1.
            self._accumulation_time = datetime.timedelta(0)

        self._cfunit = cfunits.Units(units=self._units)
        self._format = format
        self._fieldreader = FieldReader.get_field_reader(
            self._filenametemplate,
            format,
            coord_props=coord_props,
            time_offset=self._accumulation_time)
コード例 #5
0
ファイル: _indata.py プロジェクト: MostafaBakhoda/modeltools
    def get_timestep(self, dt, unit=None):

        outdt = dt

        # Do unit conversion to get correct output unit
        if unit is not None:
            mycfunit = cfunits.Units(unit)

        logger.debug("Varname %s : imposed unit=%s, my unit=%s" %
                     (self._varname, str(mycfunit), self._units))

        #tmp = numpy.squeeze(self._fieldreader.get_timestep(self._varname,dt))
        tmp = numpy.squeeze(self._fieldreader.get_timestep(
            self._varname, dt)) * self._accumulation_scale_factor
        if not self._cfunit.equals(mycfunit):
            #print "Unit conversion:",self.varname,"unit=",self._cfunit, "targetunit=", mycfunit
            #print "Unit conversion:max=",tmp.max()
            #print self._accumulation_time
            tmp = cfunits.Units.conform(tmp, self._cfunit, mycfunit)
            #print "Unit conversion:max after=",tmp.max()


#Approach 2: Calculate average at this time
# If this is an accumulated field, we need to get next field and interpolate
# TODO: Check if this is really necessary
        if self._accumulation_time <> datetime.timedelta(0):
            dt2 = dt + self._accumulation_time
            logger.info(
                "Computing interpolated value for accumulated field %s (Reading additional field at %s)"
                % (self._varname, str(dt2)))
            tmp2 = numpy.squeeze(
                self._fieldreader.get_timestep(
                    self._varname, dt2)) * self._accumulation_scale_factor
            if not self._cfunit.equals(mycfunit):
                #print "Unit conversion:",self.varname,"unit=",self._cfunit, "targetunit=", mycfunit
                #print "Unit conversion:max=",tmp.max()
                tmp2 = cfunits.Units.conform(tmp2, self._cfunit, mycfunit)
                #print "Unit conversion:max after=",tmp.max()
            tmp = 0.5 * (tmp + tmp2)
            outdt = dt

        #TODO - may be optimized out
        # Sets grid and coords explicitly upon read
        logger.debug(
            "NB: Implicit read of coordinates and grid when reading variable %10s at %s"
            % (self._varname, str(dt)))
        self.get_coords(dt)
        self.get_grid(dt)

        logger.info("Reading name %20s, varname=%20s" %
                    (self._name, self._varname))
        self._data = tmp
        self._time = outdt
コード例 #6
0
def df_convert_index(dataFrame, start="1-1-1990"):

  if any([x in dataFrame.index[0] for x in "NPP HR".split(" ")]):
    from_units = cfunits.Units("kg C m-2 s-1")
    to_units = cfunits.Units("g C m-2 month-1")

  if any([x in dataFrame.index[0] for x in "".split(" ")]):
    pass

  months, ens_members = dataFrame.shape
  #print months, ens_members

  dt_idx = pd.DatetimeIndex(start=start, periods=months, freq="MS")
  #print dt_idx.shape
  #print dataFrame.shape
  #dataFrame = dataFrame.transpose()
  #print dataFrame.shape

  dataFrame = pd.DataFrame(cfunits.Units.conform(dataFrame.values, from_units, to_units), index=dt_idx)

  return dataFrame
コード例 #7
0
ファイル: unit.py プロジェクト: leifdenby/eurec4a-environment
def convert_units(da, units):
    """Convert the data array to the new units

    Args:
        da (xarray.DataArray):
        units (str):

    Returns:
        xarray.DataArray: The input dataset converted to the new units
    """
    if "units" not in da.attrs:
        field_name = da.name
        raise KeyError(
            f"Units haven't been set on `{field_name}` field in dataset")

    if da.attrs["units"] == units:
        return da

    if not HAS_UDUNITS2:
        raise UDUNTS2MissingException(
            "To do correct unit conversion udunits2 is required, without"
            " it no unit conversion will be done. udunits2 can be installed"
            " with conda, `conda install -c conda-forge udunits2` or see"
            " https://stackoverflow.com/a/42387825 for general instructions")

    old_units = cfunits.Units(da.attrs["units"])
    new_units = cfunits.Units(units)
    if old_units == new_units:
        return da
    else:
        values_converted = cfunits.Units.conform(da.values, old_units,
                                                 new_units)
        attrs = dict(da.attrs)
        attrs["units"] = units
        da_converted = xr.DataArray(values_converted,
                                    coords=da.coords,
                                    dims=da.dims,
                                    attrs=attrs,
                                    name=da.name)
        return da_converted
コード例 #8
0
def check_variable_attrs(
    variable_attrs: T.Mapping[T.Hashable, T.Any],
    definition: T.Dict[str, str],
    dtype: T.Optional[str] = None,
    log: structlog.BoundLogger = LOGGER,
) -> None:
    attrs = sanitise_mapping(variable_attrs, log)

    if "long_name" not in attrs:
        log.warning("missing recommended attribute 'long_name'")
    if "units" not in attrs:
        if dtype not in TIME_DTYPE_NAMES:
            log.warning("missing recommended attribute 'units'")
    else:
        units = attrs.get("units")
        expected_units = definition.get("units")
        if expected_units is not None:
            log = log.bind(expected_units=expected_units)
            cf_units = cfunits.Units(units)
            if not cf_units.isvalid:
                log.warning("'units' attribute not valid", units=units)
            else:
                expected_cf_units = cfunits.Units(expected_units)
                log = log.bind(units=units, expected_units=expected_units)
                if not cf_units.equivalent(expected_cf_units):
                    log.warning(
                        "'units' attribute not equivalent to the expected")
                elif not cf_units.equals(expected_cf_units):
                    log.warning("'units' attribute not equal to the expected")

    standard_name = attrs.get("standard_name")
    expected_standard_name = definition.get("standard_name")
    if expected_standard_name is not None:
        log = log.bind(expected_standard_name=expected_standard_name)
        if standard_name is None:
            log.warning("missing expected attribute 'standard_name'")
        elif standard_name != expected_standard_name:
            log.warning("'standard_name' attribute not valid",
                        standard_name=standard_name)
コード例 #9
0
ファイル: _indata.py プロジェクト: MostafaBakhoda/modeltools
    def __init__(self, new_name, instance, unit):
        self._name = new_name  # Variable names known to this module
        self._filenametemplate = None  # File known to this module
        self._varname = None  # Variable name in file
        self._format = None
        self._fieldreader = None
        self._units = unit
        self._cfunit = cfunits.Units(units=self._units)

        # TODO: use ref or copy?
        self._gridx, self._gridy = instance.grid
        self._coordx, self._coordy = instance.coords
        self._time = instance.time
        self._data = instance.data
コード例 #10
0
ファイル: solve.py プロジェクト: sunt05/atmosp
    def __new__(cls, name, parents, dct):
        if dct['_equation_module'] is not None:
            # Update the class docstring
            if '__doc__' in dct.keys():
                dct['__doc__'] = _fill_doc(
                    dct['__doc__'], dct['_equation_module'],
                    dct['default_assumptions'])
            dct['_ref_units'] = {}
            for quantity in dct['_equation_module'].quantities.keys():
                dct['_ref_units'][quantity] = \
                    cfunits.Units(dct['_equation_module'].quantities[
                        quantity]['units'])
            assumptions = set([])
            for f in inspect.getmembers(equations):
                try:
                    assumptions.update(f[1].assumptions)
                except AttributeError:
                    pass
            dct['all_assumptions'] = tuple(assumptions)

        # we need to call type.__new__ to complete the initialization
        instance = super(SolverMeta, cls).__new__(cls, name, parents, dct)
        return instance
コード例 #11
0
def main(filemesh, grid2dfiles, first_j=0, mean_file=False):

    if mean_file:
        fnametemplate = "archm.%Y_%j_%H"
    else:
        fnametemplate = "archv.%Y_%j_%H"
    itest = 1
    jtest = 200
    logger.info("Mean file:%s" % str(mean_file))
    logger.info("Output file template:%s" % str(fnametemplate))

    # Write regional files
    nemo_mesh_to_hycom.main(filemesh, first_j=first_j)

    nemo_mesh = modeltools.nemo.NemoMesh(filemesh, first_j=first_j)

    #ncidmesh=netCDF4.Dataset(filemesh,"r")
    gdept = nemo_mesh["gdept_0"][0, :]  # Depth of t points
    gdepw = nemo_mesh["gdepw_0"][0, :]  # Depth of w points
    e3t_ps = nemo_mesh.sliced(
        nemo_mesh["e3t_ps"][0, :, :])  # Partial steps of t cell
    e3w_ps = nemo_mesh.sliced(
        nemo_mesh["e3w_ps"][0, :, :])  # Partial steps of w cell
    mbathy = nemo_mesh.sliced(nemo_mesh["mbathy"][0, :, :])  # bathy index
    hdepw = nemo_mesh.sliced(
        nemo_mesh["hdepw"][0, :, :])  # Total depth of w points
    mbathy = mbathy - 1  # python indexing starts from 0
    nlev = gdept.size

    mbathy_u, e3u_ps, depthu = nemo_mesh.depth_u_points()
    mbathy_v, e3v_ps, depthv = nemo_mesh.depth_v_points()
    #
    mbathy_u = nemo_mesh.sliced(nemo_mesh.u_to_hycom_u(mbathy_u))
    e3u_ps = nemo_mesh.sliced(nemo_mesh.u_to_hycom_u(e3u_ps))
    depthu = nemo_mesh.sliced(nemo_mesh.u_to_hycom_u(depthu))
    #
    mbathy_v = nemo_mesh.sliced(nemo_mesh.v_to_hycom_v(mbathy_v))
    e3v_ps = nemo_mesh.sliced(nemo_mesh.v_to_hycom_v(e3v_ps))
    depthv = nemo_mesh.sliced(nemo_mesh.v_to_hycom_v(depthv))

    # Thickness of t layers (NB: 1 less than gdepw dimension)
    dt = gdepw[1:] - gdepw[:-1]

    # Loop over input files. All must be in same directory
    for file2d in grid2dfiles:

        # See if actually a grid2D file
        dirname = os.path.dirname(file2d)
        m = re.match("(.*_)(grid2D)(_.*\.nc)", os.path.basename(file2d))
        if not m:
            msg = "File %s is not a grid2D file, aborting" % file2d
            logger.error(msg)
            raise ValueError, msg

        # Construct remaining files
        filet = os.path.join(dirname, m.group(1) + "gridT" + m.group(3))
        files = os.path.join(dirname, m.group(1) + "gridS" + m.group(3))
        fileu = os.path.join(dirname, m.group(1) + "gridU" + m.group(3))
        filev = os.path.join(dirname, m.group(1) + "gridV" + m.group(3))
        filew = os.path.join(dirname, m.group(1) + "gridW" + m.group(3))
        fileice = os.path.join(dirname, m.group(1) + "icemod" + m.group(3))
        logger.info("grid2D file: %s" % file2d)

        # P-points
        logger.info("gridS  file: %s" % files)
        logger.info("gridT  file: %s" % filet)
        ncids = netCDF4.Dataset(files, "r")
        ncidt = netCDF4.Dataset(filet, "r")

        # time from gridT file.
        time = ncidt.variables["time_counter"][0]
        tunit = ncidt.variables["time_counter"].units
        tmp = cfunits.Units(tunit)
        refy, refm, refd = (1958, 1, 1)
        tmp2 = cfunits.Units("hours since %d-%d-%d 00:00:00" %
                             (refy, refm, refd))  # Units from CF convention
        tmp3 = cfunits.Units.conform(time, tmp,
                                     tmp2)  # Transform to new new unit
        tmp3 = int(numpy.round(tmp3))
        mydt = datetime.datetime(refy, refm,
                                 refd, 0, 0, 0) + datetime.timedelta(
                                     hours=tmp3)  # Then calculate dt. Phew!
        logger.info("Valid time from gridT file:%s" % str(mydt))

        # Read and calculculate U in hycom U-points.
        logger.info("gridU  file: %s" % fileu)
        ncidu = netCDF4.Dataset(fileu, "r")
        u = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        for k in range(nlev):
            u[k, :, :] = nemo_mesh.sliced(
                nemo_mesh.u_to_hycom_u(ncidu.variables["vozocrtx"][
                    0, k, :, :]))  # Costly, make more efficient if needed
        u = numpy.where(numpy.abs(u) < 1e10, u, 0.)

        #Calculate barotropic and baroclinic u
        usum = numpy.zeros(u.shape[-2:])
        dsum = numpy.zeros(u.shape[-2:])
        for k in range(u.shape[0] - 1):  # Dont include lowest layer
            # TODO: Mid-layer depths seem to be undefined - figure out why ...
            logger.debug(
                "k=%3d, u=%10.3g, mbathy_u[jtest,itest]=%3d,gdepw[k]=%8.2f, depthu[jtest,itest]=%8.2f"
                % (k, u[k, jtest, itest], mbathy_u[jtest, itest], gdepw[k],
                   depthu[jtest, itest]))
            J, I = numpy.where(mbathy_u > k)
            usum[J, I] = usum[J, I] + u[k, J, I] * dt[k]
            dsum[J, I] = dsum[J, I] + dt[k]
        J, I = numpy.where(mbathy >= 0)
        usum[J, I] = usum[J, I] + u[mbathy_u[J, I], J, I] * e3u_ps[J, I]
        dsum[J, I] = dsum[J, I] + e3u_ps[J, I]
        ubaro = numpy.where(dsum > 0.1, usum / dsum, 0.)

        # Read and calculculate V in hycom V-points.
        logger.info("gridV  file: %s" % filev)
        ncidv = netCDF4.Dataset(filev, "r")
        v = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        for k in range(nlev):
            v[k, :, :] = nemo_mesh.sliced(
                nemo_mesh.v_to_hycom_v(ncidv.variables["vomecrty"][
                    0, k, :, :]))  # Costly, make more efficient if needed
        v = numpy.where(numpy.abs(v) < 1e10, v, 0.)

        #Calculate barotropic and baroclinic v
        vsum = numpy.zeros(v.shape[-2:])
        dsum = numpy.zeros(v.shape[-2:])
        for k in range(v.shape[0] - 1):  # Dont include lowest layer
            logger.debug(
                "k=%3d, v=%10.3g, mbathy_v[jtest,itest]=%3d,gdepw[k]=%8.2f, depthv[jtest,itest]=%8.2f"
                % (k, v[k, jtest, itest], mbathy_v[jtest, itest], gdepw[k],
                   depthv[jtest, itest]))
            J, I = numpy.where(mbathy_v > k)
            vsum[J, I] = vsum[J, I] + v[k, J, I] * dt[k]
            dsum[J, I] = dsum[J, I] + dt[k]
        J, I = numpy.where(mbathy_u >= 0)
        vsum[J, I] = vsum[J, I] + v[mbathy_u[J, I], J, I] * e3v_ps[J, I]
        dsum[J, I] = dsum[J, I] + e3v_ps[J, I]
        vbaro = numpy.where(dsum > .1, vsum / dsum, 0.)

        # Masks (land:True)
        #print mbathy.min(),mbathy.max()
        ip = mbathy == -1
        iu = mbathy_u == -1
        iv = mbathy_v == -1
        #iu = nemo_mesh.periodic_i_shift_right(iu,1)   # u: nemo in cell i is hycom in cell i+1
        #iv = nemo_mesh.arctic_patch_shift_up(iu,1)    # v: nemo in cell j is hycom in cell j+1
        #ip = nemo_mesh.sliced(ip)
        #iu = nemo_mesh.sliced(iu)
        #iv = nemo_mesh.sliced(iv)
        #raise NameError,"test"

        # 2D data
        ncid2d = netCDF4.Dataset(file2d, "r")
        ssh = nemo_mesh.sliced(ncid2d.variables["sossheig"][0, :, :])
        ssh = numpy.where(ssh == ncid2d.variables["sossheig"]._FillValue, 0.,
                          ssh)
        ssh = numpy.where(ssh > 1e30, 0., ssh *
                          9.81)  # NB: HYCOM srfhgt is in geopotential ...
        #bar_height   = nemo_mesh.sliced(ncid2d.variables["sobarhei"][0,:,:] )
        #dyn_height   = nemo_mesh.sliced(ncid2d.variables["sodynhei"][0,:,:]
        #montg1       = ssh * 9.81  #* 1e-3  # Approx
        montg1 = numpy.zeros(ssh.shape)
        logger.warning("montg1 set to zero")
        logger.warning("srfhgt set to sossheigh*9.81 (Geopotential height)")

        # Write to abfile
        outfile = abfile.ABFileArchv(
            mydt.strftime(fnametemplate),
            "w",
            iexpt=10,
            iversn=22,
            yrflag=3,
        )
        logger.info("Writing 2D variables")
        outfile.write_field(montg1, ip, "montg1", 0, 0, 1, 0)
        outfile.write_field(ssh, ip, "srfhgt", 0, 0, 0, 0)
        outfile.write_field(numpy.zeros(ssh.shape), ip, "surflx", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "salflx", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "bl_dpth", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "mix_dpth", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(ubaro, iu, "u_btrop", 0, 0, 0,
                            0)  # u: nemo in cell i is hycom in cell i+1
        outfile.write_field(vbaro, iv, "v_btrop", 0, 0, 0,
                            0)  # v: nemo in cell j is hycom in cell j+1
        #outfile.close() ; raise NameError,"test"
        for k in numpy.arange(u.shape[0]):
            if k % 10 == 0:
                logger.info("Writing 3D variables, level %d of %d" %
                            (k + 1, u.shape[0]))
            ul = numpy.squeeze(u[k, :, :]) - ubaro  # Baroclinic velocity
            vl = numpy.squeeze(v[k, :, :]) - vbaro  # Baroclinic velocity

            # Layer thickness
            dtl = numpy.zeros(ul.shape)
            if k < u.shape[0] - 1:
                J, I = numpy.where(mbathy > k)
                dtl[J, I] = dt[k]
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3t_ps[J, I]
            else:
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3t_ps[J, I]

            tmpfill = ncids.variables["vosaline"]._FillValue
            sl = nemo_mesh.sliced(ncids.variables["vosaline"][0, k, :, :])
            sl = numpy.where(
                numpy.abs(sl - tmpfill) <= 1e-4 * numpy.abs(tmpfill), 35., sl)

            tmpfill = ncidt.variables["votemper"]._FillValue
            tl = nemo_mesh.sliced(ncidt.variables["votemper"][0, k, :, :])
            tl = numpy.where(
                numpy.abs(tl - tmpfill) <= 1e-4 * numpy.abs(tmpfill), 15., tl)

            # Fill empty layers with values from above
            if k > 0:
                K = numpy.where(dtl < 1e-4)
                sl[K] = sl_above[K]
                tl[K] = tl_above[K]

            onem = 9806.
            outfile.write_field(ul, iu, "u-vel.", 0, 0, k + 1,
                                0)  # u: nemo in cell i is hycom in cell i+1
            outfile.write_field(vl, iv, "v-vel.", 0, 0, k + 1,
                                0)  # v: nemo in cell j is hycom in cell j+1
            outfile.write_field(dtl * onem, ip, "thknss", 0, 0, k + 1, 0)
            outfile.write_field(sl, ip, "salin", 0, 0, k + 1, 0)
            outfile.write_field(tl, ip, "temp", 0, 0, k + 1, 0)

            tl_above = numpy.copy(tl)
            sl_above = numpy.copy(sl)

        # TODO: Process ice data
        ncid2d.close()
        outfile.close()
        ncidt.close()
        ncids.close()
        ncidu.close()
        ncidv.close()

        logger.info("Finished writing %s.[ab] " % mydt.strftime(fnametemplate))
    nemo_mesh = []
コード例 #12
0
def main(files):
    import netCDF4
    import datetime
    import numpy
    import matplotlib
    import cfunits
    matplotlib.use('Agg')

    from mpl_toolkits.basemap import Basemap
    import matplotlib.pyplot
    from matplotlib import cm

    # lo/la from first datafile
    ncid = netCDF4.Dataset(files[0])
    lo = ncid.variables["TLON"][:]
    la = ncid.variables["TLAT"][:]

    #m = Basemap(projection='stere',width=6000000,height=6000000,lon_0=0,lat_0=80,resolution="i")
    m = Basemap(projection='stere',
                width=8000000,
                height=7500000,
                lon_0=-45,
                lat_0=85,
                resolution="i")
    x, y = m(lo, la)

    ncid.close()

    icnt = 0
    for file in files:
        icnt += 1
        ncid = netCDF4.Dataset(file)
        t = ncid.variables["time"]

        t_unit = cfunits.Units(t.units)
        my_t_unit = cfunits.Units('days since 1900-1-1')
        newt = cfunits.Units.conform(t, t_unit, my_t_unit)
        newt = [int(elem * 86400.) for elem in newt]
        #print newt[0]
        newdt = [
            datetime.datetime(1900, 1, 1, 0, 0, 0) +
            datetime.timedelta(seconds=elem) for elem in newt
        ]
        print newdt[0]

        sst = ncid.variables["sst"][0, :, :]
        cice = ncid.variables["hi"][0, :, :]
        cice = numpy.ma.masked_where(cice < .05, cice)

        fig = matplotlib.pyplot.figure(figsize=(16, 12))
        ax = fig.add_axes([0, 0, 1, 1])

        m.drawcoastlines()
        m.fillcontinents(color='.3', lake_color='aqua')
        m.drawparallels(numpy.arange(-80., 81., 20.))
        m.drawmeridians(numpy.arange(-180., 181., 20.))

        v = numpy.linspace(-2., 18, 60, endpoint=True)
        v2 = numpy.linspace(-2., 18, 10, endpoint=True)
        CF = m.contourf(x, y, sst, v, extend="both")
        CF.set_clim(0, 18)
        CB = matplotlib.pyplot.colorbar(ticks=v2)

        ax.text(0.02,
                0.02,
                str(newdt[0]),
                fontsize=36,
                transform=ax.transAxes,
                bbox={
                    'facecolor': '.7',
                    'alpha': 0.9,
                    'pad': 10
                })

        cmap = cm.get_cmap("YlGn")
        matplotlib.pyplot.hold(True)
        v3 = numpy.linspace(.1, 3, 6, endpoint=True)
        CF = m.contourf(x, y, cice, v3, cmap=cmap, extend="both")
        CF.set_clim(0, 3)
        CB2 = matplotlib.pyplot.colorbar(ticks=v3)

        matplotlib.pyplot.title(str(newdt[0]))
        matplotlib.pyplot.savefig("tst%04d.png" % icnt)

        matplotlib.pyplot.close()
コード例 #13
0
def main(filemesh,grid2dfiles,first_j=0,mean_file=False,iexpt=10,iversn=22,yrflag=3,makegrid=None,bio_path=None) :

   if mean_file :
      fnametemplate="archm.%Y_%j_%H"
   else :
      fnametemplate="archv.%Y_%j_%H"
   itest=1
   jtest=200
   gdept,gdepw,e3t_ps,e3w_ps,mbathy,hdepw,depth,plon,plat=read_mesh(filemesh)
   if makegrid is not None: 
      logger.info("Making NEMO grid & bathy [ab] files ...")
      make_grid(filemesh)
   mbathy = mbathy -1                       # python indexing starts from 0
   nlev   = gdept.size

   mbathy_u,e3u_ps,depthu=depth_u_points(depth,mbathy,gdepw)
   mbathy_v,e3v_ps,depthv=depth_v_points(depth,mbathy,gdepw)
   #
   mbathy_u=sliced(u_to_hycom_u(mbathy_u))
   e3u_ps  =sliced(u_to_hycom_u(e3u_ps  ))
   depthu  =sliced(u_to_hycom_u(depthu  ))
   #
   mbathy_v=sliced(v_to_hycom_v(mbathy_v))
   e3v_ps  =sliced(v_to_hycom_v(e3v_ps  ))
   depthv  =sliced(v_to_hycom_v(depthv  ))

   dt = gdepw[1:] - gdepw[:-1]

   # Loop over input files. All must be in same directory
   for file2d in grid2dfiles : 

      # See if actually a grid2D file
      dirname=os.path.dirname(file2d)
      m=re.match("(.*_)(grid2D)(_.*\.nc)",os.path.basename(file2d))
      if not m :
         msg="File %s is not a grid2D file, aborting"%file2d
         logger.error(msg)  
         raise ValueError(msg)

      # Construct remaining files
      filet  =os.path.join(dirname,m.group(1) + "gridT" + m.group(3))
      files  =os.path.join(dirname,m.group(1) + "gridS" + m.group(3))
      fileu  =os.path.join(dirname,m.group(1) + "gridU" + m.group(3))
      filev  =os.path.join(dirname,m.group(1) + "gridV" + m.group(3))
      filew  =os.path.join(dirname,m.group(1) + "gridW" + m.group(3))
      fileice=os.path.join(dirname,m.group(1) + "icemod" + m.group(3))
      logger.info("grid2D file: %s"%file2d)

      # P-points
      logger.info("gridS  file: %s"%files)
      logger.info("gridT  file: %s"%filet)
      ncids=netCDF4.Dataset(files,"r")
      ncidt=netCDF4.Dataset(filet,"r")

      # time from gridT file. 
      time = ncidt.variables["time_counter"][0]
      tunit = ncidt.variables["time_counter"].units
      tmp=cfunits.Units(tunit)
      refy, refm, refd=(1958,1,1)
      tmp2=cfunits.Units("hours since %d-%d-%d 00:00:00"%(refy,refm,refd))            # Units from CF convention
      tmp3=cfunits.Units.conform(time,tmp,tmp2)                                       # Transform to new new unit 
      tmp3=int(numpy.round(tmp3))
      mydt = datetime.datetime(refy,refm,refd,0,0,0) + datetime.timedelta(hours=tmp3) # Then calculate dt. Phew!
      if bio_path:
         jdm,idm=numpy.shape(plon)
         points = numpy.transpose(((plat.flatten(),plon.flatten())))
         delta = mydt.strftime( '%Y-%m-%d')
         # filename format MERCATOR-BIO-14-2013-01-05-00
         idx,biofname=search_biofile(bio_path,delta)
         if idx >7: 
            msg="No available BIO file within a week difference with PHY"
            logger.error(msg)
            raise ValueError(msg)
         logger.info("BIO file %s reading & interpolating to 1/12 deg grid cells ..."%biofname)
         ncidb=netCDF4.Dataset(biofname,"r")
         blon=ncidb.variables["longitude"][:];
         blat=ncidb.variables["latitude"][:]
         minblat=blat.min()
         no3=ncidb.variables["NO3"][0,:,:,:];
         no3[numpy.abs(no3)>1e+5]=numpy.nan
         po4=ncidb.variables["PO4"][0,:,:,:]
         si=ncidb.variables["Si"][0,:,:,:]
         po4[numpy.abs(po4)>1e+5]=numpy.nan
         si[numpy.abs(si)>1e+5]=numpy.nan
         # TODO: The following piece will be optimised and replaced soon. 
         nz,ny,nx=no3.shape
         dummy=numpy.zeros((nz,ny,nx+1))
         dummy[:,:,:nx]=no3;dummy[:,:,-1]=no3[:,:,-1]
         no3=dummy
         dummy=numpy.zeros((nz,ny,nx+1))
         dummy[:,:,:nx]=po4;dummy[:,:,-1]=po4[:,:,-1]
         po4=dummy
         dummy=numpy.zeros((nz,ny,nx+1))
         dummy[:,:,:nx]=si;dummy[:,:,-1]=si[:,:,-1]
         si=dummy
         dummy=numpy.zeros((nx+1))
         dummy[:nx]=blon
         blon=dummy
         blon[-1]=-blon[0]
# TODO:  Note that the coordinate files are for global configuration while
#        the data file saved for latitude larger than 30. In the case you change your data file coordinate
#        configuration you need to modify the following lines
         bio_coordfile=bio_path[:-4]+"/GLOBAL_ANALYSIS_FORECAST_BIO_001_014_COORD/GLO-MFC_001_014_mask.nc"
         biocrd=netCDF4.Dataset(bio_coordfile,"r")
         blat2 = biocrd.variables['latitude'][:]
         index=numpy.where(blat2>=minblat)[0]
         depth_lev = biocrd.variables['deptho_lev'][index[0]:,:]
#
#
#
         dummy=numpy.zeros((ny,nx+1))
         dummy[:,:nx]=depth_lev;dummy[:,-1]=depth_lev[:,-1]
         depth_lev=dummy
         depth_lev[depth_lev>50]=0
         depth_lev=depth_lev.astype('i')
         dummy_no3=no3
         dummy_po4=po4
         dummy_si=si

         for j in range(ny):
            for i in range(nx):
               dummy_no3[depth_lev[j,i]:nz-2,j,i]=no3[depth_lev[j,i]-1,j,i]
               dummy_po4[depth_lev[j,i]:nz-2,j,i]=po4[depth_lev[j,i]-1,j,i]
               dummy_si[depth_lev[j,i]:nz-2,j,i]=si[depth_lev[j,i]-1,j,i]
         no3=dummy_no3
         po4=dummy_po4
         si=dummy_si
         po4 = po4 * 106.0 * 12.01
         si = si   * 6.625 * 12.01
         no3 = no3 * 6.625 * 12.01
      #   field_interpolator=FieldInterpolatorBilinear(blon,blat,plon.flatten(),plat.flatten())
      # Read and calculculate U in hycom U-points. 
      logger.info("gridU, gridV, gridT & gridS  file")
      ncidu=netCDF4.Dataset(fileu,"r")
      u=numpy.zeros((nlev,mbathy.shape[0],mbathy.shape[1]))
      ncidv=netCDF4.Dataset(filev,"r")
      v=numpy.zeros((nlev,mbathy.shape[0],mbathy.shape[1]))
      udummy=ncidu.variables["vozocrtx"][:,:,:,:] 
      vdummy=ncidv.variables["vomecrty"][:,:,:,:]
      tdummy=ncidt.variables["votemper"][:,:,:,:]
      tdummy_fill=ncidt.variables["votemper"]._FillValue
      sdummy=ncids.variables["vosaline"][:,:,:,:]
      sdummy_fill=ncids.variables["vosaline"]._FillValue

      for k in range(nlev) : 
         u[k,:,:] = sliced(u_to_hycom_u(udummy[0,k,:,:] ))   # Costly, make more efficient if needed
         v[k,:,:] = sliced(v_to_hycom_v(vdummy[0,k,:,:] ))   # Costly, make more efficient if needed

      u = numpy.where(numpy.abs(u)<1e10,u,0.)
      v = numpy.where(numpy.abs(v)<1e10,v,0.)
      logger.info("Calculate barotropic velocities ...")

      #Calculate barotropic and baroclinic u
      usum=numpy.zeros(u.shape[-2:])
      dsumu=numpy.zeros(u.shape[-2:])
      vsum=numpy.zeros(v.shape[-2:])
      dsumv=numpy.zeros(v.shape[-2:])

      for k in range(u.shape[0]-1) : # Dont include lowest layer
         J,I = numpy.where(mbathy_u>k) 
         usum[J,I] = usum[J,I] + u[k,J,I]*dt[k]
         dsumu[J,I] = dsumu[J,I] + dt[k]
         J,I = numpy.where(mbathy_v>k)
         vsum[J,I] = vsum[J,I] + v[k,J,I]*dt[k]
         dsumv[J,I] = dsumv[J,I] + dt[k]
      J,I = numpy.where(mbathy>=0)
      usum[J,I] = usum[J,I] + u[mbathy_u[J,I],J,I]*e3u_ps[J,I]
      dsumu[J,I] = dsumu[J,I] + e3u_ps[J,I]
      dsumu=numpy.where(abs(dsumu)<1e-2,0.05,dsumu)
      ubaro=numpy.where(dsumu>0.1,usum/dsumu,0.)
      J,I = numpy.where(mbathy_v>=0)
      vsum[J,I] = vsum[J,I] + v[mbathy_v[J,I],J,I]*e3v_ps[J,I]
      dsumv[J,I] = dsumv[J,I] + e3v_ps[J,I]
      dsumv=numpy.where(abs(dsumv)<1e-2,0.05,dsumv)
      vbaro=numpy.where(dsumv>.1,vsum/dsumv,0.)
      fnametemplate="archv.%Y_%j"
      deltat=datetime.datetime(refy,refm,refd,0,0,0)+datetime.timedelta(hours=tmp3)
      oname=deltat.strftime(fnametemplate)+"_00"

      # model day
      refy, refm, refd=(1900,12,31)
      model_day= deltat-datetime.datetime(refy,refm,refd,0,0,0)
      model_day=model_day.days
      logger.info("Model day in HYCOM is %s"%str(model_day))

      # Masks (land:True)
      if mask_method == 1 :
         ip = mbathy   == -1
         iu = mbathy_u == -1
         iv = mbathy_v == -1
      else :
         ip = depth   == 0
         iu = depthu  == 0
         iv = depthv  == 0

      # 2D data
      ncid2d=netCDF4.Dataset(file2d,"r")
      ssh          = sliced(ncid2d.variables["sossheig"][0,:,:])
      ssh = numpy.where(ssh==ncid2d.variables["sossheig"]._FillValue,0.,ssh)
      ssh = numpy.where(ssh>1e10,0.,ssh*9.81) # NB: HYCOM srfhgt is in geopotential ...
      montg1=numpy.zeros(ssh.shape)

      # Write to abfile
      outfile = abf.ABFileArchv("./data/"+oname,"w",iexpt=iexpt,iversn=iversn,yrflag=yrflag,)

      logger.info("Writing 2D variables")
      outfile.write_field(montg1,                ip,"montg1"  ,0,model_day,1,0)
      outfile.write_field(ssh,                   ip,"srfhgt"  ,0,model_day,0,0)
      outfile.write_field(numpy.zeros(ssh.shape),ip,"surflx"  ,0,model_day,0,0) # Not used
      outfile.write_field(numpy.zeros(ssh.shape),ip,"salflx"  ,0,model_day,0,0) # Not used
      outfile.write_field(numpy.zeros(ssh.shape),ip,"bl_dpth" ,0,model_day,0,0) # Not used
      outfile.write_field(numpy.zeros(ssh.shape),ip,"mix_dpth",0,model_day,0,0) # Not used
      outfile.write_field(ubaro                 ,iu,"u_btrop" ,0,model_day,0,0) # u: nemo in cell i is hycom in cell i+1
      outfile.write_field(vbaro                 ,iv,"v_btrop" ,0,model_day,0,0) # v: nemo in cell j is hycom in cell j+1
      ny=mbathy.shape[0];nx=mbathy.shape[1]
      error=numpy.zeros((ny,nx))
      for k in numpy.arange(u.shape[0]) :
         if bio_path:
            no3k=interpolate2d(blat, blon, no3[k,:,:], points).reshape((jdm,idm))
            no3k = maplev(no3k)
            po4k=interpolate2d(blat, blon, po4[k,:,:], points).reshape((jdm,idm))
            po4k = maplev(po4k)
            si_k=interpolate2d(blat, blon, si[k,:,:], points).reshape((jdm,idm))
            si_k = maplev(si_k)
            if k%10==0 : logger.info("Writing 3D variables including BIO, level %d of %d"%(k+1,u.shape[0]))
         else:
            if k%10==0 : logger.info("Writing 3D variables, level %d of %d"%(k+1,u.shape[0]))
         #
         ul = numpy.squeeze(u[k,:,:]) - ubaro # Baroclinic velocity
         vl = numpy.squeeze(v[k,:,:]) - vbaro # Baroclinic velocity

         # Layer thickness
         dtl=numpy.zeros(ul.shape)
         if k < u.shape[0]-1 :
            J,I = numpy.where(mbathy>k)
            dtl[J,I] = dt[k]
            J,I = numpy.where(mbathy==k)
            dtl[J,I] = e3t_ps[J,I]
         else:
            J,I = numpy.where(mbathy==k)
            dtl[J,I] = e3t_ps[J,I]

         tmpfill=sdummy_fill#ncids.variables["vosaline"]._FillValue
         sl = sliced(sdummy[0,k,:,:])
         tmpfill=tdummy_fill#ncidt.variables["votemper"]._FillValue
         tl = sliced(tdummy[0,k,:,:])
         sl = numpy.where(numpy.abs(sl)<1e2,sl,numpy.nan)
         sl = numpy.minimum(numpy.maximum(maplev(sl),25),80.)
         tl = numpy.where(numpy.abs(tl)<=5e2,tl,numpy.nan)
         tl = numpy.minimum(numpy.maximum(maplev(tl),-5.),50.)

         # Fill empty layers with values from above
         if k > 0 :
            K= numpy.where(dtl < 1e-4)

            tl[K] = tl_above[K]


         onem=9806.
         outfile.write_field(ul      ,iu,"u-vel.",0,model_day,k+1,0) # u: nemo in cell i is hycom in cell i+1
         outfile.write_field(vl      ,iv,"v-vel.",0,model_day,k+1,0) # v: nemo in cell j is hycom in cell j+1
         outfile.write_field(dtl*onem,ip,"thknss",0,model_day,k+1,0)
         outfile.write_field(tl      ,ip,"temp"  ,0,model_day,k+1,0)
         outfile.write_field(sl      ,ip,"salin" ,0,model_day,k+1,0)
         if bio_path :
            outfile.write_field(no3k      ,ip,"ECO_no3"  ,0,model_day,k+1,0)
            outfile.write_field(po4k      ,ip,"ECO_pho" ,0,model_day,k+1,0)
            outfile.write_field(si_k      ,ip,"ECO_sil" ,0,model_day,k+1,0)

         tl_above=numpy.copy(tl)
         sl_above=numpy.copy(sl)
         

      # TODO: Process ice data
      ncid2d.close()
      outfile.close()
      ncidt.close()
      ncids.close()
      ncidu.close()
      ncidv.close()
      if bio_path:ncidb.close()
   nemo_mesh = []
コード例 #14
0
ファイル: _indata.py プロジェクト: MostafaBakhoda/modeltools
    def open(self):
        #self._nc = scipy.io.netcdf.netcdf_file(self._filename,"r")
        #print "open started"
        logger.info("Opening %s" % self._filename)
        self._nc = netCDF4.Dataset(self._filename, "r")

        # Read and map coordinate variables of all input vars
        self._coordvar = {}
        self._coordmap = {}
        self._coordrank = {}
        for varname, var in self._nc.variables.items():

            #print varname
            self._coordmap[varname] = {}
            self._coordrank[varname] = {}

            for inumber, i in enumerate(var.dimensions):
                coordvar = self._nc.variables[i]

                # Set coordinate attributes from coord_props if specified
                if "units" not in dir(coordvar):
                    if i in self._coord_props and "units" in self._coord_props[
                            i]:
                        logger.info(
                            "Setting units from explicit coordinate properties for variable %s"
                            % i)
                        unit_string = self._coord_props[i]["units"]
                    else:
                        raise FieldReaderError, "No units specified for variable %s" % i
                else:
                    if i in self._coord_props and "units" in self._coord_props[
                            i]:
                        logger.warning("Overriding units for variable %s" % i)
                        unit_string = self._coord_props[i]["units"]
                    else:
                        unit_string = coordvar.units

                unit = cfunits.Units(unit_string)

                if i not in self._coordvar.keys():
                    # Convert to datetime. use netcdftime as handling is better.
                    coordvals = numpy.array(self._nc.variables[i][:])
                    if unit.isreftime:

                        if "calendar" not in dir(coordvar):
                            if i in self._coord_props and "calendar" in self._coord_props[
                                    i]:
                                logger.info(
                                    "Setting calendar from explicit coordinate properties for variable %s"
                                    % i)
                                calendar = self._coord_props[i]["calendar"]
                            else:
                                raise FieldReaderError, "No calendar specified for variable %s" % i
                        else:
                            if i in self._coord_props and "calendar" in self._coord_props[
                                    i]:
                                logger.warning(
                                    "Overriding calendar for variable %s" % i)
                                calendar = self._coord_props[i]["calendar"]
                            else:
                                calendar = coordvar.calendar

                        tmp = netcdftime.utime(unit_string, calendar=calendar)
                        self._coordvar["time"] = tmp.num2date(coordvals)

                    elif unit.islongitude:
                        self._coordvar["lon"] = coordvar[:]
                    elif unit.islatitude:
                        self._coordvar["lat"] = coordvar[:]
                    else:
                        raise FieldReaderError, "Dont know how to handle coordinate variable %s" % i

                if unit.isreftime:
                    self._coordmap[varname]["time"] = self._coordvar["time"]
                    self._coordrank[varname]["time"] = inumber
                elif unit.islongitude:
                    self._coordmap[varname]["lon"] = self._coordvar["lon"]
                    self._coordrank[varname]["lon"] = inumber
                elif unit.islatitude:
                    self._coordmap[varname]["lat"] = self._coordvar["lat"]
                    self._coordrank[varname]["lat"] = inumber
                else:
                    raise FieldReaderError, "Dont know how to handle coordinate variable %s" % i
コード例 #15
0
shrub04_ts_LAI =   pd.read_csv("PEcAn_2000001165/ensemble.ts.2000001223.LAI.1970.2029.Rdata.csv").transpose()
shrub04_ts_SoilC = pd.read_csv("PEcAn_2000001166/ensemble.ts.2000001226.SoilOrgC.1970.2029.Rdata.csv").transpose()

tussk05_ts_HR  =   pd.read_csv("PEcAn_2000001132/ensemble.ts.2000001182.HeteroResp.1970.2029.Rdata.csv").transpose()
tussk05_ts_NPP =   pd.read_csv("PEcAn_2000001154/ensemble.ts.2000001204.NPP.1970.2029.Rdata.csv").transpose()
tussk05_ts_LAI =   pd.read_csv("PEcAn_2000001168/ensemble.ts.2000001229.LAI.1970.2029.Rdata.csv").transpose()
tussk05_ts_SoilC = pd.read_csv("PEcAn_2000001169/ensemble.ts.2000001231.SoilOrgC.1970.2029.Rdata.csv").transpose()




# Build nice date time index
dt_idx = pd.DatetimeIndex(start="1-1-1970", periods=60*12, freq="MS") # 60 years, start of month

# Convert to more manageable units
heath07_ts_HR_tu     = pd.DataFrame(cfunits.Units.conform(heath07_ts_HR.values, cfunits.Units("kg C m-2 s-1"), cfunits.Units("g C m-2 month-1")), index=dt_idx)
heath07_ts_NPP_tu    = pd.DataFrame(cfunits.Units.conform(heath07_ts_NPP.values, cfunits.Units("kg C m-2 s-1"), cfunits.Units("g C m-2 month-1")), index=dt_idx)
shrub04_ts_HR_tu     = pd.DataFrame(cfunits.Units.conform(shrub04_ts_HR.values, cfunits.Units("kg C m-2 s-1"), cfunits.Units("g C m-2 month-1")), index=dt_idx)
shrub04_ts_NPP_tu    = pd.DataFrame(cfunits.Units.conform(shrub04_ts_NPP.values, cfunits.Units("kg C m-2 s-1"), cfunits.Units("g C m-2 month-1")), index=dt_idx)
tussk05_ts_HR_tu     = pd.DataFrame(cfunits.Units.conform(tussk05_ts_HR.values, cfunits.Units("kg C m-2 s-1"), cfunits.Units("g C m-2 month-1")), index=dt_idx)
tussk05_ts_NPP_tu    = pd.DataFrame(cfunits.Units.conform(tussk05_ts_NPP.values, cfunits.Units("kg C m-2 s-1"), cfunits.Units("g C m-2 month-1")), index=dt_idx)

# No need to convert units on LAI and SoilOrgC


tab20c = plt.cm.get_cmap('tab20c')


# # MAKE THE TIMESERIES FIGURE(S)
# fig = plt.figure(figsize=(15,7))
コード例 #16
0
def main(filemesh,
         grid2dfiles,
         first_j=0,
         mean_file=False,
         iexpt=10,
         iversn=22,
         yrflag=3,
         makegrid=None):

    if mean_file:
        fnametemplate = "archm.%Y_%j_%H"
    else:
        fnametemplate = "archv.%Y_%j_%H"
    itest = 1
    jtest = 200
    gdept, gdepw, e3t_ps, e3w_ps, mbathy, hdepw, depth = read_mesh(filemesh)
    if makegrid is not None:
        logger.info("Making NEMO grid & bathy [ab] files ...")
        make_grid(filemesh)

    mbathy = mbathy - 1  # python indexing starts from 0
    nlev = gdept.size

    mbathy_u, e3u_ps, depthu = depth_u_points(depth, mbathy, gdepw)
    mbathy_v, e3v_ps, depthv = depth_v_points(depth, mbathy, gdepw)
    #
    mbathy_u = sliced(u_to_hycom_u(mbathy_u))
    e3u_ps = sliced(u_to_hycom_u(e3u_ps))
    depthu = sliced(u_to_hycom_u(depthu))
    #
    mbathy_v = sliced(v_to_hycom_v(mbathy_v))
    e3v_ps = sliced(v_to_hycom_v(e3v_ps))
    depthv = sliced(v_to_hycom_v(depthv))

    # Thickness of t layers (NB: 1 less than gdepw dimension)
    dt = gdepw[1:] - gdepw[:-1]

    # Loop over input files. All must be in same directory
    for file2d in grid2dfiles:

        # See if actually a grid2D file
        dirname = os.path.dirname(file2d)
        m = re.match("(.*_)(grid2D)(_.*\.nc)", os.path.basename(file2d))
        if not m:
            msg = "File %s is not a grid2D file, aborting" % file2d
            logger.error(msg)
            raise ValueError, msg

        # Construct remaining files
        filet = os.path.join(dirname, m.group(1) + "gridT" + m.group(3))
        files = os.path.join(dirname, m.group(1) + "gridS" + m.group(3))
        fileu = os.path.join(dirname, m.group(1) + "gridU" + m.group(3))
        filev = os.path.join(dirname, m.group(1) + "gridV" + m.group(3))
        filew = os.path.join(dirname, m.group(1) + "gridW" + m.group(3))
        fileice = os.path.join(dirname, m.group(1) + "icemod" + m.group(3))
        logger.info("grid2D file: %s" % file2d)

        # P-points
        logger.info("gridS  file: %s" % files)
        logger.info("gridT  file: %s" % filet)
        ncids = netCDF4.Dataset(files, "r")
        ncidt = netCDF4.Dataset(filet, "r")

        # time from gridT file.
        time = ncidt.variables["time_counter"][0]
        tunit = ncidt.variables["time_counter"].units
        tmp = cfunits.Units(tunit)
        refy, refm, refd = (1958, 1, 1)
        tmp2 = cfunits.Units("hours since %d-%d-%d 00:00:00" %
                             (refy, refm, refd))  # Units from CF convention
        tmp3 = cfunits.Units.conform(time, tmp,
                                     tmp2)  # Transform to new new unit
        tmp3 = int(numpy.round(tmp3))
        mydt = datetime.datetime(refy, refm,
                                 refd, 0, 0, 0) + datetime.timedelta(
                                     hours=tmp3)  # Then calculate dt. Phew!

        # Read and calculculate U in hycom U-points.
        logger.info("gridU, gridV, gridT & gridS  file")
        ncidu = netCDF4.Dataset(fileu, "r")
        u = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        ncidv = netCDF4.Dataset(filev, "r")
        v = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        udummy = ncidu.variables["vozocrtx"][:, :, :, :]
        vdummy = ncidv.variables["vomecrty"][:, :, :, :]
        tdummy = ncidt.variables["votemper"][:, :, :, :]
        tdummy_fill = ncidt.variables["votemper"]._FillValue
        sdummy = ncids.variables["vosaline"][:, :, :, :]
        sdummy_fill = ncids.variables["vosaline"]._FillValue

        for k in range(nlev):
            u[k, :, :] = sliced(u_to_hycom_u(
                udummy[0, k, :, :]))  # Costly, make more efficient if needed
            v[k, :, :] = sliced(v_to_hycom_v(
                vdummy[0, k, :, :]))  # Costly, make more efficient if needed

        u = numpy.where(numpy.abs(u) < 1e10, u, 0.)
        v = numpy.where(numpy.abs(v) < 1e10, v, 0.)
        logger.info("Calculate barotropic velocities ...")

        #Calculate barotropic and baroclinic u
        usum = numpy.zeros(u.shape[-2:])
        dsumu = numpy.zeros(u.shape[-2:])
        vsum = numpy.zeros(v.shape[-2:])
        dsumv = numpy.zeros(v.shape[-2:])

        for k in range(u.shape[0] - 1):  # Dont include lowest layer
            J, I = numpy.where(mbathy_u > k)
            usum[J, I] = usum[J, I] + u[k, J, I] * dt[k]
            dsumu[J, I] = dsumu[J, I] + dt[k]
            J, I = numpy.where(mbathy_v > k)
            vsum[J, I] = vsum[J, I] + v[k, J, I] * dt[k]
            dsumv[J, I] = dsumv[J, I] + dt[k]
        J, I = numpy.where(mbathy >= 0)
        usum[J, I] = usum[J, I] + u[mbathy_u[J, I], J, I] * e3u_ps[J, I]
        dsumu[J, I] = dsumu[J, I] + e3u_ps[J, I]
        dsumu = numpy.where(abs(dsumu) < 1e-2, 0.05, dsumu)
        ubaro = numpy.where(dsumu > 0.1, usum / dsumu, 0.)
        J, I = numpy.where(mbathy_v >= 0)
        vsum[J, I] = vsum[J, I] + v[mbathy_v[J, I], J, I] * e3v_ps[J, I]
        dsumv[J, I] = dsumv[J, I] + e3v_ps[J, I]
        dsumv = numpy.where(abs(dsumv) < 1e-2, 0.05, dsumv)
        vbaro = numpy.where(dsumv > .1, vsum / dsumv, 0.)

        fnametemplate = "archv.%Y_%j"
        deltat = datetime.datetime(refy, refm, refd, 0, 0,
                                   0) + datetime.timedelta(hours=tmp3)
        oname = deltat.strftime(fnametemplate) + "_00"

        # model day
        refy, refm, refd = (1900, 12, 31)
        model_day = deltat - datetime.datetime(refy, refm, refd, 0, 0, 0)
        model_day = model_day.days
        logger.info("Model day in HYCOM is %s" % str(model_day))

        # Masks (land:True)
        if mask_method == 1:
            ip = mbathy == -1
            iu = mbathy_u == -1
            iv = mbathy_v == -1
        else:
            ip = depth == 0
            iu = depthu == 0
            iv = depthv == 0

        flnm = open('archvname.txt', 'w')
        flnm.write(oname)
        flnm.close()

        # 2D data
        ncid2d = netCDF4.Dataset(file2d, "r")
        ssh = sliced(ncid2d.variables["sossheig"][0, :, :])
        ssh = numpy.where(ssh == ncid2d.variables["sossheig"]._FillValue, 0.,
                          ssh)
        ssh = numpy.where(ssh > 1e10, 0., ssh *
                          9.81)  # NB: HYCOM srfhgt is in geopotential ...
        montg1 = numpy.zeros(ssh.shape)

        # Write to abfile
        outfile = abfile.ABFileArchv(
            "./data/" + oname,
            "w",
            iexpt=iexpt,
            iversn=iversn,
            yrflag=yrflag,
        )

        logger.info("Writing 2D variables")
        outfile.write_field(montg1, ip, "montg1", 0, model_day, 1, 0)
        outfile.write_field(ssh, ip, "srfhgt", 0, model_day, 0, 0)
        outfile.write_field(numpy.zeros(ssh.shape), ip, "surflx", 0, model_day,
                            0, 0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "salflx", 0, model_day,
                            0, 0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "bl_dpth", 0,
                            model_day, 0, 0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "mix_dpth", 0,
                            model_day, 0, 0)  # Not used
        outfile.write_field(ubaro, iu, "u_btrop", 0, model_day, 0,
                            0)  # u: nemo in cell i is hycom in cell i+1
        outfile.write_field(vbaro, iv, "v_btrop", 0, model_day, 0,
                            0)  # v: nemo in cell j is hycom in cell j+1
        for k in numpy.arange(u.shape[0]):
            if k % 10 == 0:
                logger.info("Writing 3D variables, level %d of %d" %
                            (k + 1, u.shape[0]))
            ul = numpy.squeeze(u[k, :, :]) - ubaro  # Baroclinic velocity
            vl = numpy.squeeze(v[k, :, :]) - vbaro  # Baroclinic velocity

            # Layer thickness
            dtl = numpy.zeros(ul.shape)
            if k < u.shape[0] - 1:
                J, I = numpy.where(mbathy > k)
                dtl[J, I] = dt[k]
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3t_ps[J, I]
            else:
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3t_ps[J, I]

            tmpfill = sdummy_fill  #ncids.variables["vosaline"]._FillValue
            sl = sliced(sdummy[0, k, :, :])
            tmpfill = tdummy_fill  #ncidt.variables["votemper"]._FillValue
            tl = sliced(tdummy[0, k, :, :])
            sl = numpy.where(numpy.abs(sl) < 1e2, sl, numpy.nan)
            sl = numpy.minimum(numpy.maximum(maplev(sl), 25), 80.)
            tl = numpy.where(numpy.abs(tl) <= 5e2, tl, numpy.nan)
            tl = numpy.minimum(numpy.maximum(maplev(tl), -5.), 50.)

            # Fill empty layers with values from above
            if k > 0:
                K = numpy.where(dtl < 1e-4)

                tl[K] = tl_above[K]

            onem = 9806.
            outfile.write_field(ul, iu, "u-vel.", 0, model_day, k + 1,
                                0)  # u: nemo in cell i is hycom in cell i+1
            outfile.write_field(vl, iv, "v-vel.", 0, model_day, k + 1,
                                0)  # v: nemo in cell j is hycom in cell j+1
            outfile.write_field(dtl * onem, ip, "thknss", 0, model_day, k + 1,
                                0)
            outfile.write_field(tl, ip, "temp", 0, model_day, k + 1, 0)
            outfile.write_field(sl, ip, "salin", 0, model_day, k + 1, 0)

            tl_above = numpy.copy(tl)
            sl_above = numpy.copy(sl)

        # TODO: Process ice data
        ncid2d.close()
        outfile.close()
        ncidt.close()
        ncids.close()
        ncidu.close()
        ncidv.close()

        logger.info("Finished writing %s.[ab] " % mydt.strftime(fnametemplate))
    nemo_mesh = []
コード例 #17
0
def main(startdate, enddate, first_j=0):

    soda_template = "/work/shared/nersc/msc/SODA/3.3.1/monthly/soda3.3.1_mn_ocean_reg_%Y.nc"

    # open blkdat.input. Get nesting frequency
    bp = modeltools.hycom.BlkdatParser("blkdat.input")
    nestfq = bp["nestfq"]
    bnstfq = bp["bnstfq"]

    # Read soda-grid and topo from first file
    fid = netCDF4.Dataset(startdate.strftime(soda_template), "r")
    depth = fid["depth"][:]
    soda_to_regional_grid(fid)

    # Get bathymetry. Set to 0 wherever salinit in top layer is undefined.
    bathy = numpy.zeros(fid["salt"].shape[-2:])
    for k in range(depth.size):
        bathy[~numpy.squeeze(fid["salt"][0, k, :, :].mask)] = depth[k]
    abfile.write_bathymetry("SODA", 22, bathy, 0.)
    fid.close()

    # TODO:
    ip = bathy == 0.
    iu = bathy == 0.
    iv = bathy == 0.

    onem = 9806

    #if mean_file :
    #   fnametemplate_out="archm.%Y_%j_%H"
    #else :
    hycom_template = "archv.%Y_%j_%H"

    # Loop over nestfq, bnstfq.
    deltat = enddate - startdate
    dsec = deltat.days * 86400 + deltat.seconds
    baroclinic_nest_times = [
        startdate + datetime.timedelta(seconds=s)
        for s in numpy.arange(0, dsec, nestfq * 86400)
    ]
    barotropic_nest_times = [
        startdate + datetime.timedelta(seconds=s)
        for s in numpy.arange(0, dsec, bnstfq * 86400)
    ]
    tmp = sorted(set(barotropic_nest_times + baroclinic_nest_times))
    for dt in tmp:

        logger.info("Processing time %s" % str(dt))

        # Get "mid-month" dates
        if dt.day >= 15:
            nm = 1 + dt.month % 12
            ny = dt.year + dt.month / 12
            mm0 = datetime.datetime(dt.year, dt.month, 15, 0, 0, 0)
            mm1 = datetime.datetime(ny, nm, 15, 0, 0, 0)
        else:
            lm = 1 + (12 + dt.month - 2) % 12
            ly = dt.year - lm / 12
            print dt.month, lm, ly
            mm0 = datetime.datetime(ly, lm, 15, 0, 0, 0)
            mm1 = datetime.datetime(dt.year, dt.month, 15, 0, 0, 0)

        # Linear interpolation weights
        deltat = mm1 - mm0
        deltat = deltat.days + deltat.seconds / 86400.
        w1 = dt - mm0
        w1 = w1.days + w1.seconds / 86400.
        w1 = w1 / deltat
        w0 = 1. - w1
        flnm0 = mm0.strftime(soda_template)
        flnm1 = mm1.strftime(soda_template)
        logger.info("Time %s, file %s at %s(w=%.4f) , file %s at %s(w=%.4f)" %
                    (str(dt), flnm0, str(mm0), w0, flnm1, str(mm1), w1))

        # Open files
        # TODO: reuse pointers/fields
        fid0 = netCDF4.Dataset(flnm0, "r")
        fid1 = netCDF4.Dataset(flnm1, "r")

        # Calculate temperature, velocity
        temp = w0 * fid0["temp"][0, :, :, :] + w1 * fid1["temp"][0, :, :, :]
        salt = w0 * fid0["salt"][0, :, :, :] + w1 * fid1["salt"][0, :, :, :]
        utot = w0 * fid0["u"][0, :, :, :] + w1 * fid1["u"][0, :, :, :]
        vtot = w0 * fid0["v"][0, :, :, :] + w1 * fid1["v"][0, :, :, :]

        #NB: No checks for missing values yet !
        ubaro = numpy.sum(utot, 0)
        vbaro = numpy.sum(vtot, 0)
        u = utot - ubaro
        v = vtot - vbaro

        # 2D vars
        anompb = w0 * fid0["anompb"][0, :, :] + w1 * fid1["anompb"][0, :, :]
        ssh = w0 * fid0["ssh"][0, :, :] + w1 * fid1["ssh"][0, :, :]
        salflx = w0 * fid0["salt_flux_total"][
            0, :, :] + w1 * fid1["salt_flux_total"][0, :, :]
        surflx = w0 * fid0["net_heating"][
            0, :, :] + w1 * fid1["salt_flux_total"][0, :, :]
        montg1 = numpy.zeros(ssh.shape)

        # Write to abfile
        outfile = abfile.ABFileArchv(dt.strftime(hycom_template),
                                     "w",
                                     iexpt=10,
                                     iversn=22,
                                     yrflag=3)
        logger.info("Writing 2D variables")
        outfile.write_field(montg1, ip, "montg1", 0, 0, 1, 0)
        outfile.write_field(ssh, ip, "srfhgt", 0, 0, 0, 0)
        outfile.write_field(surflx, ip, "surflx", 0, 0, 0, 0)
        outfile.write_field(salflx, ip, "salflx", 0, 0, 0, 0)
        outfile.write_field(numpy.zeros(ssh.shape), ip, "bl_dpth", 0, 0, 0, 0)
        outfile.write_field(numpy.zeros(ssh.shape), ip, "mix_dpth", 0, 0, 0, 0)
        outfile.write_field(ubaro, iu, "u_btrop", 0, 0, 0, 0)
        outfile.write_field(vbaro, iv, "v_btrop", 0, 0, 0, 0)
        #outfile.close() ; raise NameError,"test"
        for k in numpy.arange(u.shape[0]):
            if k % 10 == 0:
                logger.info("Writing 3D variables, level %d of %d" %
                            (k + 1, u.shape[0]))

            if k == 0:
                dtl = depth[0] * numpy.where(bathy >= depth[k], 1, 0)
            else:
                dtl = (depth[k] - depth[k - 1]) * numpy.where(
                    bathy >= depth[k], 1, 0)
            print dtl.min(), dtl.max()

            templ = temp[k, :, :]
            saltl = salt[k, :, :]

            # Set to layer above if undefined
            templ[dtl <= 0.] = 20.
            saltl[dtl <= 0.] = 35.

            outfile.write_field(u[k, :, :], iu, "u-vel.", 0, 0, k + 1, 0)
            outfile.write_field(v[k, :, :], iv, "v-vel.", 0, 0, k + 1, 0)
            outfile.write_field(dtl * onem, ip, "thknss", 0, 0, k + 1, 0)
            outfile.write_field(saltl, ip, "salin", 0, 0, k + 1, 0)
            outfile.write_field(templ, ip, "temp", 0, 0, k + 1, 0)

            oldsaltl = saltl
            oldtempl = templ

        # TODO: reuse pointers/fields
        outfile.close()
        fid0.close()
        fid1.close()
        raise NameError, "check vals"

    raise NameError, "test"

    itest = 1
    jtest = 200
    logger.info("Mean file:%s" % str(mean_file))
    logger.info("Output file template:%s" % str(fnametemplate))

    # Write regional files
    nemo_mesh_to_hycom.main(filemesh, first_j=first_j)

    nemo_mesh = modeltools.nemo.NemoMesh(filemesh, first_j=first_j)

    #ncidmesh=netCDF4.Dataset(filemesh,"r")
    gdept = nemo_mesh["gdept_0"][0, :]  # Depth of t points
    gdepw = nemo_mesh["gdepw_0"][0, :]  # Depth of w points
    e3t_ps = nemo_mesh.sliced(
        nemo_mesh["e3t_ps"][0, :, :])  # Partial steps of t cell
    e3w_ps = nemo_mesh.sliced(
        nemo_mesh["e3w_ps"][0, :, :])  # Partial steps of w cell
    mbathy = nemo_mesh.sliced(nemo_mesh["mbathy"][0, :, :])  # bathy index
    hdepw = nemo_mesh.sliced(
        nemo_mesh["hdepw"][0, :, :])  # Total depth of w points
    mbathy = mbathy - 1  # python indexing starts from 0
    nlev = gdept.size

    mbathy_u, e3u_ps, depthu = nemo_mesh.depth_u_points()
    mbathy_v, e3v_ps, depthv = nemo_mesh.depth_v_points()
    #
    mbathy_u = nemo_mesh.sliced(nemo_mesh.u_to_hycom_u(mbathy_u))
    e3u_ps = nemo_mesh.sliced(nemo_mesh.u_to_hycom_u(e3u_ps))
    depthu = nemo_mesh.sliced(nemo_mesh.u_to_hycom_u(depthu))
    #
    mbathy_v = nemo_mesh.sliced(nemo_mesh.v_to_hycom_v(mbathy_v))
    e3v_ps = nemo_mesh.sliced(nemo_mesh.v_to_hycom_v(e3v_ps))
    depthv = nemo_mesh.sliced(nemo_mesh.v_to_hycom_v(depthv))

    # Thickness of t layers (NB: 1 less than gdepw dimension)
    dt = gdepw[1:] - gdepw[:-1]

    # Loop over input files. All must be in same directory
    for file2d in grid2dfiles:

        # See if actually a grid2D file
        dirname = os.path.dirname(file2d)
        m = re.match("(.*_)(grid2D)(_.*\.nc)", os.path.basename(file2d))
        if not m:
            msg = "File %s is not a grid2D file, aborting" % file2d
            logger.error(msg)
            raise ValueError, msg

        # Construct remaining files
        filet = os.path.join(dirname, m.group(1) + "gridT" + m.group(3))
        files = os.path.join(dirname, m.group(1) + "gridS" + m.group(3))
        fileu = os.path.join(dirname, m.group(1) + "gridU" + m.group(3))
        filev = os.path.join(dirname, m.group(1) + "gridV" + m.group(3))
        filew = os.path.join(dirname, m.group(1) + "gridW" + m.group(3))
        fileice = os.path.join(dirname, m.group(1) + "icemod" + m.group(3))
        logger.info("grid2D file: %s" % file2d)

        # P-points
        logger.info("gridS  file: %s" % files)
        logger.info("gridT  file: %s" % filet)
        ncids = netCDF4.Dataset(files, "r")
        s = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        for k in range(nlev):  # Dont include lowest layer
            s[k, :, :] = nemo_mesh.sliced(ncids.variables["vosaline"][0,
                                                                      k, :, :])
        s = numpy.where(s < 1e30, s, 0.)
        s = numpy.where(s == ncids.variables["vosaline"]._FillValue, 0., s)
        ncidt = netCDF4.Dataset(filet, "r")
        t = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        for k in range(nlev):  # Dont include lowest layer
            t[k, :, :] = nemo_mesh.sliced(ncidt.variables["votemper"][0,
                                                                      k, :, :])
        t = numpy.where(t == ncidt.variables["votemper"]._FillValue, 0., t)
        t = numpy.where(t < 1e30, t, 0.)

        # time from gridT file.
        time = ncidt.variables["time_counter"][0]
        tunit = ncidt.variables["time_counter"].units
        tmp = cfunits.Units(tunit)
        refy, refm, refd = (1958, 1, 1)
        tmp2 = cfunits.Units("seconds since %d-%d-%d 00:00:00" %
                             (refy, refm, refd))  # Units from CF convention
        tmp3 = cfunits.Units.conform(time, tmp,
                                     tmp2)  # Transform to new new unit
        mydt = datetime.datetime(refy, refm,
                                 refd, 0, 0, 0) + datetime.timedelta(
                                     seconds=tmp3)  # Then calculate dt. Phew!

        # Read and calculculate U in hycom U-points.
        logger.info("gridU  file: %s" % fileu)
        ncidu = netCDF4.Dataset(fileu, "r")
        u = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        for k in range(nlev):
            u[k, :, :] = nemo_mesh.sliced(
                nemo_mesh.u_to_hycom_u(ncidu.variables["vozocrtx"][
                    0, k, :, :]))  # Costly, make more efficient if needed
        u = numpy.where(numpy.abs(u) < 1e10, u, 0.)

        #Calculate barotropic and baroclinic u
        usum = numpy.zeros(u.shape[-2:])
        dsum = numpy.zeros(u.shape[-2:])
        for k in range(u.shape[0] - 1):  # Dont include lowest layer
            # TODO: Mid-layer depths seem to be undefined - figure out why ...
            logger.debug(
                "k=%3d, u=%10.3g, mbathy_u[jtest,itest]=%3d,gdepw[k]=%8.2f, depthu[jtest,itest]=%8.2f"
                % (k, u[k, jtest, itest], mbathy_u[jtest, itest], gdepw[k],
                   depthu[jtest, itest]))
            J, I = numpy.where(mbathy_u > k)
            usum[J, I] = usum[J, I] + u[k, J, I] * dt[k]
            dsum[J, I] = dsum[J, I] + dt[k]
        J, I = numpy.where(mbathy >= 0)
        usum[J, I] = usum[J, I] + u[mbathy_u[J, I], J, I] * e3u_ps[J, I]
        dsum[J, I] = dsum[J, I] + e3u_ps[J, I]
        ubaro = numpy.where(dsum > 0.1, usum / dsum, 0.)

        # Read and calculculate V in hycom V-points.
        logger.info("gridV  file: %s" % filev)
        ncidv = netCDF4.Dataset(filev, "r")
        v = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
        for k in range(nlev):
            v[k, :, :] = nemo_mesh.sliced(
                nemo_mesh.v_to_hycom_v(ncidv.variables["vomecrty"][
                    0, k, :, :]))  # Costly, make more efficient if needed
        v = numpy.where(numpy.abs(v) < 1e10, v, 0.)

        #Calculate barotropic and baroclinic v
        vsum = numpy.zeros(v.shape[-2:])
        dsum = numpy.zeros(v.shape[-2:])
        for k in range(v.shape[0] - 1):  # Dont include lowest layer
            logger.debug(
                "k=%3d, v=%10.3g, mbathy_v[jtest,itest]=%3d,gdepw[k]=%8.2f, depthv[jtest,itest]=%8.2f"
                % (k, v[k, jtest, itest], mbathy_v[jtest, itest], gdepw[k],
                   depthv[jtest, itest]))
            J, I = numpy.where(mbathy_v > k)
            vsum[J, I] = vsum[J, I] + v[k, J, I] * dt[k]
            dsum[J, I] = dsum[J, I] + dt[k]
        J, I = numpy.where(mbathy_u >= 0)
        vsum[J, I] = vsum[J, I] + v[mbathy_u[J, I], J, I] * e3v_ps[J, I]
        dsum[J, I] = dsum[J, I] + e3v_ps[J, I]
        vbaro = numpy.where(dsum > .1, vsum / dsum, 0.)

        # Masks (land:True)
        #print mbathy.min(),mbathy.max()
        ip = mbathy == -1
        iu = mbathy_u == -1
        iv = mbathy_v == -1
        #iu = nemo_mesh.periodic_i_shift_right(iu,1)   # u: nemo in cell i is hycom in cell i+1
        #iv = nemo_mesh.arctic_patch_shift_up(iu,1)    # v: nemo in cell j is hycom in cell j+1
        #ip = nemo_mesh.sliced(ip)
        #iu = nemo_mesh.sliced(iu)
        #iv = nemo_mesh.sliced(iv)
        #raise NameError,"test"

        # 2D data
        ncid2d = netCDF4.Dataset(file2d, "r")
        ssh = nemo_mesh.sliced(ncid2d.variables["sossheig"][0, :, :])
        ssh = numpy.where(ssh == ncid2d.variables["sossheig"]._FillValue, 0.,
                          ssh)
        ssh = numpy.where(ssh > 1e30, 0., ssh)  # Hmmmmm
        #bar_height   = nemo_mesh.sliced(ncid2d.variables["sobarhei"][0,:,:] )
        #dyn_height   = nemo_mesh.sliced(ncid2d.variables["sodynhei"][0,:,:]
        montg1 = ssh * 9.81  #* 1e-3  # Approx
        logger.warning("TODO:montg pot calculation must be checked...")

        # Write to abfile
        outfile = abfile.ABFileArchv(
            mydt.strftime(fnametemplate),
            "w",
            iexpt=10,
            iversn=22,
            yrflag=3,
        )
        logger.info("Writing 2D variables")
        outfile.write_field(montg1, ip, "montg1", 0, 0, 1, 0)
        outfile.write_field(ssh, ip, "srfhgt", 0, 0, 0, 0)
        outfile.write_field(numpy.zeros(ssh.shape), ip, "surflx", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "salflx", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "bl_dpth", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(numpy.zeros(ssh.shape), ip, "mix_dpth", 0, 0, 0,
                            0)  # Not used
        outfile.write_field(ubaro, iu, "u_btrop", 0, 0, 0,
                            0)  # u: nemo in cell i is hycom in cell i+1
        outfile.write_field(vbaro, iv, "v_btrop", 0, 0, 0,
                            0)  # v: nemo in cell j is hycom in cell j+1
        #outfile.close() ; raise NameError,"test"
        for k in numpy.arange(u.shape[0]):
            if k % 10 == 0:
                logger.info("Writing 3D variables, level %d of %d" %
                            (k + 1, u.shape[0]))
            ul = numpy.squeeze(u[k, :, :]) - ubaro  # Baroclinic velocity
            vl = numpy.squeeze(v[k, :, :]) - vbaro  # Baroclinic velocity
            sl = numpy.squeeze(s[k, :, :])
            tl = numpy.squeeze(t[k, :, :])

            # Layer thickness
            dtl = numpy.zeros(ul.shape)
            if k < u.shape[0] - 1:
                J, I = numpy.where(mbathy > k)
                dtl[J, I] = dt[k]
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3t_ps[J, I]
            else:
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3t_ps[J, I]

            onem = 9806.
            outfile.write_field(ul, iu, "u-vel.", 0, 0, k + 1,
                                0)  # u: nemo in cell i is hycom in cell i+1
            outfile.write_field(vl, iv, "v-vel.", 0, 0, k + 1,
                                0)  # v: nemo in cell j is hycom in cell j+1
            outfile.write_field(dtl * onem, ip, "thknss", 0, 0, k + 1, 0)
            outfile.write_field(sl, ip, "salin", 0, 0, k + 1, 0)
            outfile.write_field(tl, ip, "temp", 0, 0, k + 1, 0)

        # TODO: Process ice data
        ncid2d.close()
        ncids.close()
        ncidt.close()
        ncidu.close()
        ncidv.close()
        outfile.close()

        logger.info("Finished writing %s.[ab] " % mydt.strftime(fnametemplate))
    nemo_mesh = []
コード例 #18
0
for output_var in ['NPP', 'SoilOrgC', 'LAI', 'HeteroResp']:

    dt_idx = pd.DatetimeIndex(start='1970-01-01', freq='MS', periods=12 * 60)

    if output_var == 'NPP':
        shrub04 = pd.read_csv(
            "PEcAn_2000001137/ensemble.ts.2000001184.NPP.1970.2029.Rdata.csv"
        ).transpose()
        tussk05 = pd.read_csv(
            "PEcAn_2000001154/ensemble.ts.2000001204.NPP.1970.2029.Rdata.csv"
        ).transpose()
        heath07 = pd.read_csv(
            "PEcAn_2000001131/ensemble.ts.2000001180.NPP.1970.2029.Rdata.csv"
        ).transpose()
        tussk05 = pd.DataFrame(cfunits.Units.conform(
            tussk05.values, cfunits.Units("kg C m-2 s-1"),
            cfunits.Units("g C m-2 month-1")),
                               index=dt_idx)
        shrub04 = pd.DataFrame(cfunits.Units.conform(
            shrub04.values, cfunits.Units("kg C m-2 s-1"),
            cfunits.Units("g C m-2 month-1")),
                               index=dt_idx)
        heath07 = pd.DataFrame(cfunits.Units.conform(
            heath07.values, cfunits.Units("kg C m-2 s-1"),
            cfunits.Units("g C m-2 month-1")),
                               index=dt_idx)
        output_var_units = "g C m-2 month-1"
    elif output_var == 'SoilOrgC':
        tussk05 = pd.read_csv(
            "PEcAn_2000001169/ensemble.ts.2000001231.SoilOrgC.1970.2029.Rdata.csv"
        ).transpose()
コード例 #19
0
ファイル: units.py プロジェクト: shinji-s/eccodes
def C(u):
    x = cfunits.Units(u).formatted()
    if x is None:
        return "~~ /" + u + "/"
    return x
コード例 #20
0
def main(meshfile, file, iexpt=10, iversn=22, yrflag=3, bio_path=None):

    #
    # Trim input netcdf file name being appropriate for reading
    #
    meshfile = str(meshfile)[2:-2]
    logger.info("Reading mesh information from %s." % (meshfile))
    #
    # Read mesh file containing grid and coordinate information.
    # Note that for now, we are using T-grid in vertical which may need
    # to be improved by utilizing W-point along the vertical axis.
    #
    hdept, gdept, mbathy, mbathy_u, mbathy_v, mask, e3t, plon, plat = read_grid(
        meshfile)
    logger.warning(
        "Reading grid information from regional.grid.[ab] (not completed)")
    #
    # Convert from P-point (i.e. NEMO grid) to U and V HYCOM grids
    #
    mask_u = p2u_2d(mask)
    mask_v = p2v_2d(mask)
    #
    # Read regional.grid.[ab]
    # Grid angle is not used for this product because all quantities are
    # on regular rectangular grid points.
    #
    angle = numpy.zeros(plon.shape)
    #
    # Number vertical layers in T-point.
    #
    nlev = gdept.size
    #
    # layer thickness in the absence of layer partial steps.
    #
    dt = gdept[1:] - gdept[:-1]
    #
    # Prepare/read input data file (in netcdf format). Reference time is 1950-01-01
    #
    logger.info("Reading data files.")
    file = str(file).strip()[2:-2]
    dirname = os.path.dirname(file)
    logger.debug("file name is {}".format(file))
    logger.debug("dirname is {}".format(dirname))
    logger.debug("basename is {}".format(os.path.basename(file)))
    m = re.match("(MERCATOR-PHY-24-)(.*\.nc)", os.path.basename(file))
    logger.debug("file prefix is {}".format(file_pre))
    ###    m=re.match(file_pre,os.path.basename(file))
    if not m:
        msg = "File %s is not a grid2D file, aborting" % file
        logger.error(msg)
        raise ValueError, msg

    #fileinput0=os.path.join(dirname+"/"+"MERCATOR-PHY-24-"+m.group(2))
    file_date = file[-16:-6]
    fileinput0 = file
    print file_date, file
    next_day = datetime.datetime.strptime(
        file_date, '%Y-%m-%d') + datetime.timedelta(days=1)
    fileinput1 = datetime.datetime.strftime(next_day, '%Y%m%d')
    fileinput1 = os.path.join(dirname + "/" + file_pre + fileinput1 + '.nc')

    logger.info("Reading from %s" % (fileinput0))
    ncid0 = netCDF4.Dataset(fileinput0, "r")
    if timeavg_method == 1 and os.path.isfile(fileinput1):

        logger.info("timeavg_method=1, Reading from %s" % (fileinput1))
        ncid1 = netCDF4.Dataset(fileinput1, "r")
        #
        # Calculate temporal averaged temperature, salinity, and velocity
        #
        uo = 0.5 * (ncid0.variables["uo"][0, :, :, :] +
                    ncid1.variables["uo"][0, :, :, :])
        vo = 0.5 * (ncid0.variables["vo"][0, :, :, :] +
                    ncid1.variables["vo"][0, :, :, :])
        salt = 0.5 * (ncid0.variables["so"][0, :, :, :] +
                      ncid1.variables["so"][0, :, :, :])
        temp = 0.5 * (ncid0.variables["thetao"][0, :, :, :] +
                      ncid1.variables["thetao"][0, :, :, :])
        ssh = numpy.squeeze(0.5 * (ncid0.variables["zos"][0, :, :] +
                                   ncid1.variables["zos"][0, :, :]))

    else:
        #
        # Set variables based on current file when timeavg_method ~=1 or the next netcdf file is not available
        logger.debug("time average method set to {}".format(timeavg_method))
        uo = ncid0.variables["uo"][0, :, :, :]
        vo = ncid0.variables["vo"][0, :, :, :]
        salt = ncid0.variables["so"][0, :, :, :]
        temp = ncid0.variables["thetao"][0, :, :, :]
        ssh = numpy.squeeze(ncid0.variables["zos"][0, :, :])
    #
    # I will account these values afterward. Because in the current version, I am accounting for missing values using a gap-filling methodology.
    #
    logger.debug("getting _FillValue")
    uofill = ncid0.variables["uo"]._FillValue
    vofill = ncid0.variables["vo"]._FillValue
    slfill = ncid0.variables["so"]._FillValue
    tlfill = ncid0.variables["thetao"]._FillValue
    shfill = ncid0.variables["zos"]._FillValue

    # Set time
    logger.info("Set time.")
    time = ncid0.variables["time"][0]
    unit = ncid0.variables["time"].units
    tmp = cfunits.Units(unit)
    refy, refm, refd = (1950, 1, 1)
    tmp2 = cfunits.Units("hours since %d-%d-%d 00:00:00" % (refy, refm, refd))
    tmp3 = int(numpy.round(cfunits.Units.conform(time, tmp, tmp2)))
    mydt = datetime.datetime(refy, refm, refd, 0, 0, 0) + datetime.timedelta(
        hours=tmp3)  # Then calculate dt. Phew!

    if timeavg_method == 1 and os.path.isfile(fileinput1):
        fnametemplate = "archv.%Y_%j_%H"
        deltat=datetime.datetime(refy,refm,refd,0,0,0) + \
              datetime.timedelta(hours=tmp3) + \
              datetime.timedelta(hours=12)
        oname = deltat.strftime(fnametemplate)
    else:
        #
        # I am assuming that daily mean can be set at 00 instead of 12
        # for cases that there is no information of next day.
        #
        fnametemplate = "archv.%Y_%j"
        deltat=datetime.datetime(refy,refm,refd,0,0,0) + \
              datetime.timedelta(hours=tmp3)
        oname = deltat.strftime(fnametemplate) + '_00'

    # model day
    refy, refm, refd = (1900, 12, 31)
    model_day = deltat - datetime.datetime(refy, refm, refd, 0, 0, 0)
    model_day = model_day.days
    logger.info("Model day in HYCOM is %s" % str(model_day))
    if bio_path:
        jdm, idm = numpy.shape(plon)
        points = numpy.transpose(((plat.flatten(), plon.flatten())))
        delta = mydt.strftime('%Y-%m-%d')
        # filename format MERCATOR-BIO-14-2013-01-05-00
        print bio_path, delta
        idx, biofname = search_biofile(bio_path, delta)
        if idx > 7:
            msg = "No available BIO file within a week difference with PHY"
            logger.error(msg)
            raise ValueError, msg
        logger.info(
            "BIO file %s reading & interpolating to 1/12 deg grid cells ..." %
            biofname)
        ncidb = netCDF4.Dataset(biofname, "r")
        blon = ncidb.variables["longitude"][:]
        blat = ncidb.variables["latitude"][:]
        minblat = blat.min()
        no3 = ncidb.variables["NO3"][0, :, :, :]
        no3[numpy.abs(no3) > 1e+10] = numpy.nan
        po4 = ncidb.variables["PO4"][0, :, :, :]
        si = ncidb.variables["Si"][0, :, :, :]
        po4[numpy.abs(po4) > 1e+10] = numpy.nan
        si[numpy.abs(si) > 1e+10] = numpy.nan
        # TODO: Ineed to improve this part
        nz, ny, nx = no3.shape
        dummy = numpy.zeros((nz, ny, nx + 1))
        dummy[:, :, :nx] = no3
        dummy[:, :, -1] = no3[:, :, -1]
        no3 = dummy
        dummy = numpy.zeros((nz, ny, nx + 1))
        dummy[:, :, :nx] = po4
        dummy[:, :, -1] = po4[:, :, -1]
        po4 = dummy
        dummy = numpy.zeros((nz, ny, nx + 1))
        dummy[:, :, :nx] = si
        dummy[:, :, -1] = si[:, :, -1]
        si = dummy
        dummy = numpy.zeros((nx + 1))
        dummy[:nx] = blon
        blon = dummy
        blon[-1] = -blon[0]
        # TODO:  Note that the coordinate files are for global configuration while
        #        the data file saved for latitude larger than 30. In the case you change your data file coordinate
        #        configuration you need to modify the following lines
        bio_coordfile = bio_path[:-4] + "/GLOBAL_ANALYSIS_FORECAST_BIO_001_014_COORD/GLO-MFC_001_014_mask.nc"
        biocrd = netCDF4.Dataset(bio_coordfile, "r")
        blat2 = biocrd.variables['latitude'][:]
        index = numpy.where(blat2 >= minblat)[0]
        depth_lev = biocrd.variables['deptho_lev'][index[0]:, :]
        #
        #
        #
        dummy = numpy.zeros((ny, nx + 1))
        dummy[:, :nx] = depth_lev
        dummy[:, -1] = depth_lev[:, -1]
        depth_lev = dummy
        depth_lev[depth_lev > 50] = 0
        depth_lev = depth_lev.astype('i')
        dummy_no3 = no3
        dummy_po4 = po4
        dummy_si = si
        for j in range(ny):
            for i in range(nx):
                dummy_no3[depth_lev[j, i]:nz - 2, j,
                          i] = no3[depth_lev[j, i] - 1, j, i]
                dummy_po4[depth_lev[j, i]:nz - 2, j,
                          i] = po4[depth_lev[j, i] - 1, j, i]
                dummy_si[depth_lev[j, i]:nz - 2, j,
                         i] = si[depth_lev[j, i] - 1, j, i]
        no3 = dummy_no3
        po4 = dummy_po4
        si = dummy_si

        #
        po4 = po4 * 106.0 * 12.01
        si = si * 6.625 * 12.01
        no3 = no3 * 6.625 * 12.01

    logger.info("Read, trim, rotate NEMO velocities.")
    u = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
    v = numpy.zeros((nlev, mbathy.shape[0], mbathy.shape[1]))
    utmp = numpy.zeros((mbathy.shape))
    vtmp = numpy.zeros((mbathy.shape))
    #
    # Metrices to detect carrefully bottom at p-, u-, and v-grid points.While I have used 3D, mask data,following methods are good enough for now.
    #
    if mbathy_method == 1:
        ip = mbathy == -1
        iu = mbathy_u == -1
        iv = mbathy_v == -1
    else:
        ip = mask == 0
        iu = mask_u == 0
        iv = mask_v == 0
    #
    # Read 3D velocity field to calculate barotropic velocity
    #
    # Estimate barotropic velocities using partial steps along the vertical axis. Note that for the early version of this code,
    # I used dt = gdept[1:] - gdept[:-1] on NEMO t-grid. Furthermore, you may re-calculate this part on vertical grid cells for future.
    #
    logger.info("Calculate barotropic velocities.")
    ubaro, vbaro = calc_uvbaro(uo, vo, e3t, iu, iv)
    #
    # Save 2D fields (here only ubaro & vbaro)
    #
    zeros = numpy.zeros(mbathy.shape)
    #flnm = open(oname+'.txt', 'w')
    #flnm.write(oname)
    #flnm.close()
    ssh = numpy.where(numpy.abs(ssh) > 1000, 0.,
                      ssh * 9.81)  # NB: HYCOM srfhgt is in geopotential ...
    #
    outfile = abfile.ABFileArchv(
        "./data/" + oname,
        "w",
        iexpt=iexpt,
        iversn=iversn,
        yrflag=yrflag,
    )
    outfile.write_field(zeros, ip, "montg1", 0, model_day, 1, 0)
    outfile.write_field(ssh, ip, "srfhgt", 0, model_day, 0, 0)
    outfile.write_field(zeros, ip, "surflx", 0, model_day, 0, 0)  # Not used
    outfile.write_field(zeros, ip, "salflx", 0, model_day, 0, 0)  # Not used
    outfile.write_field(zeros, ip, "bl_dpth", 0, model_day, 0, 0)  # Not used
    outfile.write_field(zeros, ip, "mix_dpth", 0, model_day, 0, 0)  # Not used
    outfile.write_field(ubaro, iu, "u_btrop", 0, model_day, 0, 0)
    outfile.write_field(vbaro, iv, "v_btrop", 0, model_day, 0, 0)
    #
    if bio_path:
        logger.info(
            "Calculate baroclinic velocities, temperature, and salinity data as well as BIO field."
        )
    else:
        logger.info(
            "Calculate baroclinic velocities, temperature, and salinity data.")
    for k in numpy.arange(u.shape[0]):
        if bio_path:
            no3k = interpolate2d(blat, blon, no3[k, :, :], points).reshape(
                (jdm, idm))
            no3k = maplev(no3k)
            po4k = interpolate2d(blat, blon, po4[k, :, :], points).reshape(
                (jdm, idm))
            po4k = maplev(po4k)
            si_k = interpolate2d(blat, blon, si[k, :, :], points).reshape(
                (jdm, idm))
            si_k = maplev(si_k)
            if k % 10 == 0:
                logger.info(
                    "Writing 3D variables including BIO, level %d of %d" %
                    (k + 1, u.shape[0]))
        else:
            if k % 10 == 0:
                logger.info("Writing 3D variables, level %d of %d" %
                            (k + 1, u.shape[0]))
        #

        #
        uo[k, :, :] = numpy.where(numpy.abs(uo[k, :, :]) < 10, uo[k, :, :], 0)
        vo[k, :, :] = numpy.where(numpy.abs(vo[k, :, :]) < 10, vo[k, :, :], 0)

        # Baroclinic velocity (in HYCOM U- and V-grid)
        ul = p2u_2d(numpy.squeeze(uo[k, :, :])) - ubaro
        vl = p2v_2d(numpy.squeeze(vo[k, :, :])) - vbaro
        ul[iu] = spval
        vl[iv] = spval

        # Layer thickness

        dtl = numpy.zeros(mbathy.shape)
        # Use dt for the water column except the nearest cell to bottom
        if thickness_method == 1:
            if k < u.shape[0] - 1:
                J, I = numpy.where(mbathy > k)
                e3 = (e3t[k, :, :])
                dtl[J, I] = dt[k]
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3[J, I]
            else:
                e3 = (e3t[k, :, :])
                J, I = numpy.where(mbathy == k)
                dtl[J, I] = e3[J, I]

# Use partial cells for the whole water column.
        else:
            J, I = numpy.where(mbathy >= k)
            dtl[J, I] = e3t[k, J, I]

        # Salinity
        sl = salt[k, :, :]

        # Temperature
        tl = temp[k, :, :]
        # Need to be carefully treated in order to minimize artifacts to the resulting [ab] files.
        if fillgap_method == 1:
            J, I = numpy.where(mbathy < k)
            sl = maplev(numpy.where(numpy.abs(sl) < 1e2, sl, numpy.nan))
            sl[J, I] = spval
            J, I = numpy.where(mbathy < k)
            tl = maplev(numpy.where(numpy.abs(tl) < 1e2, tl, numpy.nan))
            tl[J, I] = spval
        else:
            sl = numpy.where(numpy.abs(sl) < 1e2, sl, numpy.nan)
            sl = numpy.minimum(numpy.maximum(maplev(sl), 25), 80.)
            tl = numpy.where(numpy.abs(tl) <= 5e2, tl, numpy.nan)
            tl = numpy.minimum(numpy.maximum(maplev(tl), -5.), 50.)

        # Thickness
        dtl = maplev(dtl)
        if k > 0:
            with numpy.errstate(invalid='ignore'):
                K = numpy.where(dtl < 1e-4)
            sl[K] = sl_above[K]
            tl[K] = tl_above[K]
        #
        sl[ip] = spval
        tl[ip] = spval

        # Save 3D fields
        outfile.write_field(ul, iu, "u-vel.", 0, model_day, k + 1, 0)
        outfile.write_field(vl, iv, "v-vel.", 0, model_day, k + 1, 0)
        outfile.write_field(dtl * onem, ip, "thknss", 0, model_day, k + 1, 0)
        outfile.write_field(tl, ip, "temp", 0, model_day, k + 1, 0)
        outfile.write_field(sl, ip, "salin", 0, model_day, k + 1, 0)
        if bio_path:
            outfile.write_field(no3k, ip, "ECO_no3", 0, model_day, k + 1, 0)
            outfile.write_field(po4k, ip, "ECO_pho", 0, model_day, k + 1, 0)
            outfile.write_field(si_k, ip, "ECO_sil", 0, model_day, k + 1, 0)

        tl_above = numpy.copy(tl)
        sl_above = numpy.copy(sl)

    outfile.close()
    ncid0.close()
    if timeavg_method == 1 and os.path.isfile(fileinput1):
        ncid1.close()
    if bio_path:
        ncidb.close()
コード例 #21
0
 def cfunit(self) :
    return cfunits.Units(self._variable_unit)
コード例 #22
0
def main(source_dir, version, instrument, changelog, dry_run):
    t_now = datetime.now()
    files = _find_source_files(source_dir=source_dir, instrument=instrument)
    print(f"Found {len(files)} files")

    for filepath in files:
        print(f"{filepath.name}:")
        r = parse.parse(DATAFILE_FORMAT[instrument], filepath.name)
        ds = xr.open_dataset(filepath, decode_times=False)

        nc_rev = ds.attrs['Revision']
        filename_rev = r['rev']
        version_id = f"v{version}"
        print(f"  exising revision info")
        print(f"    filename: {filename_rev}")
        print(f"    nc-attrs: {nc_rev}")
        print(f"  new version: {version_id}")

        if instrument == 'MASIN':
            # correct o follow EUREC4A time reference
            time_units = ds.Time.attrs['units']
            nc_tref = cfunits.Units(time_units).reftime
            t_offset = EUREC4A_REF_TIME - nc_tref
            if dry_run:
                print(f"  would adjust reference time to 2020-01-01 00:00:00, by {t_offset} ({t_offset.total_seconds()}s)")
                print(f"  current time units: {time_units}")
            else:
                ds.Time.values -= int(t_offset.total_seconds())
                ds.Time.attrs['units'] = 'seconds since 2020-01-01 00:00:00 +0000 UTC'

            old_rev_info = f"filename: `{filename_rev}`, nc-attrs: `{nc_rev}`"
            history_s = f"version created by Leif Denby {t_now.isoformat()}, existing revision info: {old_rev_info} from file: `{filepath.name}`"
            if dry_run:
                print(f"  would set ds.attrs['version'] = {version_id}")
                print(f"  would set ds.attrs['history'] = {history_s}")
                print(f"  would delete ds.attrs['Revision']")
            else:
                ds.attrs['version'] = version_id
                ds.attrs['history'] = history_s
                ds.attrs['contact'] = "Tom Lachlan-Cope <*****@*****.**>"
                ds.attrs['acknowledgement'] = "TO NOT USE FOR PUBLICATION! EARLY-RELEASE DATA"
                del ds.attrs['Revision']

            # fixes for masin data
            for v in ds.data_vars:
                if type(ds[v].attrs['units']) == np.int8 and ds[v].attrs['units'] == 1:
                    # should be string, not a number
                    ds[v].attrs['units'] = "1"
            # make time the main coordinate
            ds = ds.swap_dims(dict(data_point='Time'))

            date_filename = datetime.strptime(r['date'], DATE_FORMAT[instrument])
            time_id = date_filename.strftime(DATE_FORMAT['EUREC4A'])
            platform_id = f"TO-{r['flight_num']}"
            instrument_id = f"{instrument}-{r['freq']}Hz"
            fn_new = EUREC4A_FILE_FORMAT.format(
                platform_id=platform_id,
                instrument_id=instrument_id,
                time_id=time_id,
                version_id=version_id,
            )
            p_out = Path(DATAFILE_PATH.format(instrument=instrument, flight_num=r['flight_num']))/fn_new
            if dry_run:
                print(f"  would write to {p_out}")
            else:
                ds.to_netcdf(p_out)
        print(flush=True)

    changelog_extra = f"""

# {t_now.date().isoformat()} {version_id}
{changelog}
"""
    if dry_run:
        print(f"would add to CHANGELOG: {changelog_extra}")
    else:
        with open("CHANGELOG.txt", "a") as fh:
            fh.write(changelog_extra)
コード例 #23
0
    def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, ycoords=None,
              deaccumulate=False, instantanious=0., units=None, lev_from_ind=False):
        """
        Assembles a 5D field in order lon,lat,time,height,ensemble

        Arguments:
            var_name (str): Name of field to retrieve
            levels (list): Height index. If None, return all.
            members (list): Ensemble index. If None, return all.
            times (list): Time index. If None, return all.
            xcoords: X-axis coordinates to subset
            ycoords: Y-axis coordinates to subset
            deaccumulate (bool): Deaccumulate field
            instantanious (float): Scaling factor to make an accumulated value as instantanius
            units (str): CF unit for the variable to be read
            lev_from_ind (bool): level list are indices and not values

        Returns:
         np.array: 5D array with values
        """

        var = NetCDFFileVariable(self.file, var_name)
        if xcoords is not None or ycoords is not None:
            raise Exception("Subsetting of the input dimensions not implemented yet!")

        surfex.util.info("Reading variable "+var.var_name, level=1)
        times_to_read = []
        prev_time_steps = []
        if times is None:
            for i in range(0, var.times.shape[0]):
                times_to_read.append(i)
                if i > 0:
                    prev_time_steps.append(i-1)
                else:
                    prev_time_steps.append(0)
        else:
            if not isinstance(times, (list, tuple)):
                raise Exception("Times must be a list!")
            if isinstance(times[0], date):
                surfex.util.info("Time provided in call as datetime objects", level=2)
                times_in_var = var.datetimes
                for i in range(0, len(times_in_var)):
                    print(i, times_in_var[i], times)
                    for j in range(0, len(times)):
                        # Time steps requested
                        print(times_in_var[i], times[j])
                        if times_in_var[i] == times[j]:
                            times_to_read.append(i)
                            if i > 0:
                                prev_time_steps.append(i-1)
                            else:
                                prev_time_steps.append(0)

            else:
                times_in_var = var.times
                for i in range(0, times_in_var.shape[0]):
                    for j in range(0, len(times)):
                        # Time steps requested
                        if i == times[j]:
                            times_to_read.append(times[j])
                            if i > 0:
                                prev_time_steps.append(i-1)
                            else:
                                prev_time_steps.append(0)

        print("times to read", times_to_read)
        levels_to_read = []
        if levels is None:
            for i in range(0, var.levels.shape[0]):
                levels_to_read.append(i)
        else:
            surfex.util.info("Level provided in call. lev_from_ind=" + str(lev_from_ind), level=2)
            if not isinstance(levels, (list, tuple)):
                raise Exception("Levels must be a list!")
            levels_in_var = var.levels
            for i in range(0, levels_in_var.shape[0]):
                for j in range(0, len(levels)):
                    # print lev_from_ind,i, j, levels_in_var[i], levels[j]
                    if lev_from_ind:
                        if i == levels[j]:
                            levels_to_read.append(i)
                    else:
                        # NB! Round number to avoid round off when matching
                        if round(float(levels_in_var[i]), 5) == round(float(levels[j]), 5):
                            levels_to_read.append(i)

        members_to_read = []
        if members is None:
            for i in range(0, var.members.shape[0]):
                members_to_read.append(i)
        else:
            if not isinstance(members, (list, tuple)):
                raise Exception("Members must be a list!")
            surfex.util.info("Ensemble members provided in call", level=2)
            members_in_var = var.members
            for i in range(0, members_in_var.shape[0]):
                for j in range(0, len(members)):
                    if members_in_var[i] == members[j]:
                        members_to_read.append(i)

            if len(members_to_read) == 0:
                raise Exception("No ensemble members found for " + var.var_name)

        lons = var.lons
        lats = var.lats

        # Dimensions of the "problem"
        dim_x = lons.shape[0]
        dim_y = lats.shape[1]

        geo = surfex.geo.Geo(dim_x * dim_y, dim_x, dim_y, lons, lats)

        dim_t = max(len(times_to_read), 1)
        dim_levels = max(len(levels_to_read), 1)
        dim_members = max(len(members_to_read), 1)

        surfex.util.info("Dimensions in output", level=3)
        surfex.util.info(str(dim_x) + " " + str(dim_y) + " " + str(dim_t) + " " + str(dim_levels) + " " +
                         str(dim_members), level=3)

        lon_ind = slice(0, dim_x, 1)
        lat_ind = slice(0, dim_y, 1)
        dims = []
        prev_dims = []
        types = var.axis_types
        mapping = {}  # Map axis to output axis
        for i in range(0, len(types)):
            if types[i] == Axis.GeoX or types[i] == Axis.Lon:
                dims.append(lon_ind)
                prev_dims.append(lon_ind)
                mapping[0] = i
            elif types[i] == Axis.GeoY or types[i] == Axis.Lat:
                dims.append(lat_ind)
                prev_dims.append(lat_ind)
                mapping[1] = i
            elif types[i] == Axis.Time:
                dims.append(times_to_read)
                prev_dims.append(prev_time_steps)
                mapping[2] = i
            elif var.is_level(types[i]):
                dims.append(levels_to_read)
                prev_dims.append(levels_to_read)
                mapping[3] = i
            elif types[i] == Axis.Realization:
                dims.append(members_to_read)
                prev_dims.append(members_to_read)
                mapping[4] = i
            else:
                raise Exception(str(types[i])+" is not defined!")

        surfex.util.info("Read " + var.var_name + " with dimensions: " + str(dims), level=2)
        if deaccumulate:
            surfex.util.info("Deaccumulate previous dimensions: " + str(prev_dims), level=2)

        print(var.var_name)
        print(dims)
        print(self.file[var.var_name])
        field = self.file[var.var_name][dims]
        if units is not None:
            field = cfunits.Units.conform(field, cfunits.Units(var.units), cfunits.Units(units))

        # Deaccumulation
        if deaccumulate:
            original_field = field
            previous_field = self.file[var.var_name][prev_dims]
            if units is not None:
                previous_field = cfunits.Units.conform(previous_field, cfunits.Units(var.units), cfunits.Units(units))
            field = np.subtract(original_field, previous_field)

        # Create instantanious values
        if instantanious > 0:
            field = np.divide(field, instantanious)

        # Add extra dimensions
        i = 0
        reverse_mapping = []
        for d in range(0, 5):
            if d not in mapping:
                surfex.util.info("Adding dimension " + str(d), level=3)
                field = np.expand_dims(field, len(dims) + i)
                reverse_mapping.append(len(dims) + i)
                i = i + 1
            else:
                reverse_mapping.append(mapping[d])

        # Transpose to 5D array
        surfex.util.info("Transpose to 5D array", level=1)
        field = np.transpose(field, reverse_mapping)

        print("Read netcdf from ", self.filename, "times", times)
        surfex.util.info("Shape of output: "+str(field.shape), level=2)
        return field, geo
コード例 #24
0
ファイル: solve.py プロジェクト: sunt05/atmosp
 def __init__(self, **kwargs):
     if self._equation_module is None:
         raise NotImplementedError('Class needs _equation_module '
                                   'defined')
     if 'debug' in kwargs.keys():
         self._debug = kwargs.pop('debug')
     else:
         self._debug = False
     # make sure add and remove assumptions are tuples, not strings
     if ('add_assumptions' in kwargs.keys() and
             isinstance(kwargs['add_assumptions'], string_types)):
         kwargs['add_assumptions'] = (kwargs['add_assumptions'],)
     if ('remove_assumptions' in kwargs.keys() and
             isinstance(kwargs['remove_assumptions'], string_types)):
         kwargs['remove_assumptions'] = (kwargs['remove_assumptions'],)
     # See if an assumption set was given
     if 'assumptions' in kwargs.keys():
         # If it was, make sure it wasn't given with other ways of
         # setting assumptions (by modifying the default assumptions)
         if ('add_assumptions' in kwargs.keys() or
                 'remove_assumptions' in kwargs.keys()):
             raise ValueError('cannot give kwarg assumptions with '
                              'add_assumptions or remove_assumptions')
         assumptions = kwargs.pop('assumptions')
     else:
         # if it wasn't, modify the default assumptions
         assumptions = self.default_assumptions
         if 'add_assumptions' in kwargs.keys():
             if 'remove_assumptions' in kwargs.keys():
                 # make sure there is no overlap
                 if any([a in kwargs['remove_assumptions']
                         for a in kwargs['add_assumptions']]):
                     raise ValueError('assumption may not be present in '
                                      'both add_assumptions and '
                                      'remove_assumptions')
             # add assumptions, avoiding duplicates
             assumptions = assumptions + tuple(
                 [a for a in kwargs.pop('add_assumptions') if a not in
                  assumptions])
         if 'remove_assumptions' in kwargs.keys():
             # remove assumptions if present
             remove_assumptions = kwargs.pop('remove_assumptions')
             self._ensure_assumptions(*assumptions)
             assumptions = tuple([a for a in assumptions if a not in
                                  remove_assumptions])
     # Make sure all set assumptions are valid (not misspelt, for instance)
     self._ensure_assumptions(*assumptions)
     # now that we have our assumptions, use them to set the methods
     self.methods = self._get_methods(assumptions)
     self.assumptions = assumptions
     # take out any unit designations
     self.units = {}
     remove_kwargs = []
     for kwarg in kwargs:
         m = _unit_kwarg_prog.match(kwarg)
         if m is not None:
             # select whichever group is not None
             var = m.group(1) or m.group(2)
             self._ensure_quantities(var)
             if var in self.units:
                 raise ValueError(
                     'units for {} specified multiple times'.format(var))
             unit_str = kwargs[kwarg]
             remove_kwargs.append(kwarg)
             if not isinstance(unit_str, string_types):
                 raise TypeError('units must be strings')
             self.units[var] = cfunits.Units(unit_str)
     for kwarg in remove_kwargs:
         kwargs.pop(kwarg)
     # make sure the remaining variables are quantities
     self._ensure_quantities(*kwargs.keys())
     # convert quantities to reference units
     for kwarg in kwargs:
         if (kwarg in self.units and
                 self.units[kwarg] != self._ref_units[kwarg]):
             # special unit defined
             # convert to reference unit for calculations
             kwargs[kwarg] = cfunits.Units.conform(
                 kwargs[kwarg], self.units[kwarg], self._ref_units[kwarg])
     # also store the quantities
     self.vars = kwargs
コード例 #25
0
def checkNc(fn, dict1, overwrite=0, allowOverwrite=1, vb=0):
  '''
return ok1:
0 -- good
1 -- dimension is not a var itself
2 -- dimension has no units
3 -- dimension units not recognized
4 -- ref time is 0000
5 -- cannot open file
6 -- make up dim as 'i'
11-- 2d dim
12-- dimension is not a var itself. same as 1?

'''
  ok1 = 0
  dict9 = copy.deepcopy(dict1)

  varList = []
  varDict = {}
  check1 = ''
  warning = ''

  if fn.find('*')>-1:
    fn2 = glob.glob(fn)
  else:
    fn2 = [fn,]

  if 0:
    temp2 = os.path.split(fn2[0])
    dict1['filename'] = temp2[1]
    dict1['filepath'] = fn2[0]

  dict1['nFile'] = len(fn2)

  # facets from fn
  #fn3 = fn2[0]
  fn3a = fn.lower()

  if 1:
    pp = '_'
    if fn3a.find('/mnt/')>-1:
      pp = 'staged'

    if fn3a.find('/home/svc/upload')>-1:
      pp = 'uploaded'

    if fn3a.find('http')>-1:
      pp = 'online'
    dict1['source'] = pp

  pp = '_'
  for prov in providers:
    if fn3a.find('cmip5/%s'%prov)>-1:
      pp = providers[prov]

  dict1['provider'] = pp
  
  pp = '_'
  for mod1 in models2:
    if fn3a.find(models2[mod1])>-1:
      pp = mod1
  dict1['model'] = pp

  pp = '_'
  for exp1 in experiments2:
    if fn3a.find(experiments2[exp1])>-1:
      pp = exp1
  dict1['experiment'] = pp

  pp = '_'
  for rr in runs2:
    if fn3a.find(runs2[rr])>-1:
      pp = rr
  dict1['run'] = pp


  try:
    if len(fn2)>1:
      nc = MFDataset(fn2)
      nc1 = Dataset(fn2[0])
      nc2 = Dataset(fn2[-1])

    else:
      if overwrite:
        nc = Dataset(fn2[0], 'r+')
      else:
        nc = Dataset(fn2[0])

  except Exception as e :
    dict1['message'] += "File on server is not found: %s "%(fn)
    dict1['success'] = False

    ok1 = 5
    print('ok1 = %d'%ok1)
    print("cannot open file: %s "%(fn2[0]))
    if len(fn2)>1:
      print("cannot open file: %s "%(fn2[-1]))

    print(traceback.format_exc()) 
    return ok1

  # loop_vars
  varListAll = nc.variables.keys()
  varListAll = [str(i) for i in varListAll]
  
  
  # gather global att
  title2 = ''
  try:
    title2 = nc.title
  except:
    pass

  summary2 = ''
  try:
    summary2 += nc.obs_project
  except:
    pass

  try:
    summary2 += nc.source
  except:
    pass

  try:
    summary2 += nc.history
  except:
    pass

  freq2 = ''
  try:
    if nc.frequency == 'mon':
      freq2 += 'monthly'
  except:
    pass

  # find dim
  str1 = ''
  dimList = []
  for var in varListAll:

    # find_units
    units1 = ''
    d1 = nc.variables[var]
    try:
      units1 = d1.units 
    except:
      temp1 = var.find('_bnds')
      if temp1==-1:
        check1 += var + ': need the units attribute.\n'

    # find_longname
    longName = '_'

    try:
      longName = d1.long_name 
    except: pass

    try:
      longName = d1.longname 
    except: pass
      
    # collect_dims

    # to remove u' (unicode thing)
    dim1 = list(d1.dimensions)
    for i in range(len(dim1)):
      dim1[i] = str(dim1[i])

    if var.find('_bnds')==-1:
      str1 += '%s: %s\n'%(var, str(dim1))
      dimList += list(dim1)

    varDict[var] = {'dim':  dim1, 
                    'units': units1,
                    'longName': longName,
                   }

  str1 += '\nDimension Variables\n'
  dimList = list(set(dimList))

  # only if the dim is a variable itself
  dimList2 = []
  for d in dimList:
    if d in varListAll:
      dimList2.append(d)
  dimList = dimList2
  dimList0 = dimList

  if vb==1: print('dimList0')
  if vb==1: print(dimList0)

  dimList = []

  # collect_vars, only they are not dim var

  dimList0a = [i.lower() for i in dimList0]

  varList = []
  varListLong = []
  for k in varListAll:
    k1 = k.lower()
    if k not in dimList0:
      if not k1.endswith('_bnds') \
          and not k1.endswith('err') \
          and not k1.endswith('nobs') \
          and not k1.endswith('stddev') \
          and k1 not in (
            'month', 
            'year',
            'height',
            'plev',
            'not_used',
            'model_lat',
            'model_lon'): 
        varList.append(k)
        varListLong.append(varDict[k]['longName'])

  if vb==1: print('varList:')
  if vb==1: print(varList)

  # from the varList, 
  # collect_the_real dim
  dimList = []
  for var in varList:
    d1 = nc.variables[var]
    dim1 = list(d1.dimensions)
    for i in range(len(dim1)):
      dim1[i] = str(dim1[i])

    if vb==1: print('dim1')
    if vb==1: print(dim1)
      
    str1 += '%s: %s\n'%(var, str(dim1))
    dimList += list(dim1)

  # this is the list of dims of the real vars
  dimList = list(set(dimList))

  if vb==1: print('dimList:')
  if vb==1: print(dimList)

  # check_dimList
  for dimVar in dimList:
    dimWhat = ''
    dimAsI = 0 

    try: 
      d2 = nc.variables[dimVar]

      if len(fn2)>1:
        d2a1 = nc1.variables[dimVar]
        d2a2 = nc2.variables[dimVar]

    except:
      dimAsI = 1 
      hasUnits = 0
      #ok1 = 1
      print('this dim is not a var: %s'%dimVar)
      print(traceback.format_exc()) 

    # test if 2d dim
    if not dimAsI:
      # check_var_dim
      for var1 in varList:
        for dimV in varDict[var1]['dim']:
          if dimV in varListAll:
            shape0 =  nc.variables[dimV].shape 
            if len(shape0)>1:
              #ok1 = 11
              return ok1

      try:
        units1 = str(d2.units)
        if len(fn2)>1:
          #xxxx should not do this. should do the dim of the real var.
          units1a1 = str(d2a1.units)
          units1a2 = str(d2a2.units)

        hasUnits = 1
        print('units1: '),  
        print(units1)

      except:
        hasUnits = 0
        #ok1 = 2
        print('this var has no units: %s'%dimVar)
        print(traceback.format_exc()) 
        return ok1
        #if overwrite==0 and allowOverwrite==1:
        #  return checkNc_w(nc, fn, dict9)

    # if_hasUnits
    if hasUnits:
      goodUnits = 1
      try:
        cfUnits = cf1.Units(units1)
      except:
        print(traceback.format_exc()) 
        goodUnits = 0

      # month since, and 0000 are ok with 360_day
      if not goodUnits:
        goodUnits = 1
        try:
          cfUnits = cf1.Units(units1, calendar='360_day')
        except:
          print(traceback.format_exc()) 
          goodUnits = 0

      if 0:
        if not goodUnits:
          goodUnits = 1
          try:
            cfUnits = cf1.Units(units1, calendar='365_day')
          except:
            print(traceback.format_exc()) 
            goodUnits = 0

      if not goodUnits:
        #ok1 = 3
        print('ok1=3')
        print('units not recgnized: %s'%units1)
        print(traceback.format_exc()) 
        goodUnits = 0
        return ok1

      if goodUnits:
        if vb==1: print('cfUnits:')
        if vb==1: print(cfUnits)
   
        if cfUnits.islongitude:
          dimWhat = 'lon'
        elif cfUnits.islatitude:
          dimWhat = 'lat'
        elif cfUnits.isreftime:
          dimWhat = 'time'
        elif cfUnits.ispressure or units1=='hPa':
          dimWhat = 'z'

    #if hasUnits:
      # check_time_limits
      if dimWhat=='time':

        if len(fn2)>1:
          units9 = units1a1
        else:
          units9 = units1

        if vb==1: print('units9')
        if vb==1: print(units9)
        date1 = cmac.num2date(netCDF4,d2[0], units9)
        date2 = cmac.num2date(netCDF4,d2[-1], units9)
 
        if vb==1: print(date1)
        if vb==1: print(date2)

        if (date1 is None) or (date2 is None):
          #ok1 = 11      
          return ok1

        time1 = date1.timetuple()
        time2 = date2.timetuple()

        a1 = '%04d%02d%02d %02d:%02d:%02d'%(time1[0], time1[1], time1[2], time1[3], time1[4], time1[5])

        a2 = '%04d%02d%02d %02d:%02d:%02d'%(time2[0], time2[1], time2[2], time2[3], time2[4], time2[5])

        a1 = a1[:8]
        a2 = a2[:8]

      else:  # not time
        try:
          d2a = d2[:]
          a1 = str(d2a.min())
          a2 = str(d2a.max())      
        except:
          a1 = '0'
          a2 = '0'
      str1 += '%s: %s to %s (%s)\n'%(dimVar, a1, a2, units1)
      varDict[dimVar]['min'] = a1
      varDict[dimVar]['max'] = a2
      varDict[dimVar]['units'] = units1
      varDict[dimVar]['what'] = dimWhat

      # end
    #if hasUnits:
  if ok1 > 0:
    return ok1

  # construct_dim2
  if 1:
    for var1 in varList:
    #for var1 in varDict.keys():

      dim2 = []
      for i in varDict[var1]['dim']:
        try:
          dim2.append( varDict[i]['what'] )
        except:
          dim2.append( 'unknown' )
      varDict[var1]['dim2'] = dim2

  # construct_global_dim2
  dim22 = []
  for d in dimList:
    try:
      dim22.append( varDict[d]['what'] ) 
    except:
      dim22.append( 'unknown' )
  

  nc.close()
  check1 += '\nThe netCDF file has %d variables:\n%s'%(len(varList), str1) 

  dict1['varDict'] = varDict 
  dict1['varList'] = varList
  dict1['varListLong'] = varListLong
  dict1['dimList'] = dimList 
  dict1['dim2'] = dim22 
  dict1['check'] = check1
  dict1['warning'] = warning
  dict1['title'] = title2
  dict1['summary'] = summary2
  dict1['frequency'] = freq2
  dict1['ok'] = 0

  return ok1
コード例 #26
0
def main(myglobs):

    # Read cice files, plot volume
    reg = abfile.ABFileGrid("regional.grid", "r")
    plon = reg.read_field("plon")
    plat = reg.read_field("plat")
    scpx = reg.read_field("scpx")
    scpy = reg.read_field("scpy")

    figure1 = matplotlib.pyplot.figure(figsize=(8, 8))
    ax1 = figure1.add_subplot(111)
    ax1.set_title("Total Ice Area [ 1.000.000 km^2 ]")
    ax1.grid(True)

    figure2 = matplotlib.pyplot.figure(figsize=(8, 8))
    ax2 = figure2.add_subplot(111)
    ax2.set_title("Total Ice Volume [ km^3 ]")
    ax2.grid(True)

    for myglob in myglobs:
        print myglob
        files = glob.glob(myglob)

        l_area = []
        l_vol = []
        l_time = []

        for file in files:

            print file

            nc = netCDF4.Dataset(file, "r")
            aice = nc.variables["aice"][0, :, :]
            hice = nc.variables["hi"][0, :, :]
            newt = nc.variables["time"][0]

            vol = numpy.sum(aice * hice * scpx * scpy)
            area = numpy.sum(aice * scpx * scpy)

            t_unit = cfunits.Units(nc.variables["time"].units)
            my_t_unit = cfunits.Units('days since 1900-1-1')
            newt = cfunits.Units.conform(newt, t_unit, my_t_unit)
            newt = int(newt * 86400.)
            newdt = datetime.datetime(1900, 1, 1, 0, 0,
                                      0) + datetime.timedelta(seconds=newt)
            #print file, "%14.6gm**3    %14.6gm**2" % (vol,area)
            nc.close()

            l_area.append(area)
            l_vol.append(vol)
            l_time.append(newdt)

        #  Sort
        I = sorted(range(len(l_time)), key=lambda x: l_time[x])
        l_area = [l_area[i] for i in I]
        l_vol = [l_vol[i] for i in I]
        l_time = [l_time[i] for i in I]

        ax1.plot(l_time, numpy.array(l_area) * 1e-12, label=myglob, lw=3)
        ax2.plot(l_time, numpy.array(l_vol) * 1e-12, label=myglob, lw=3)

    ax1.legend()
    figure1.canvas.print_figure("icearea.png")

    ax2.legend()
    figure2.canvas.print_figure("icevolume.png")