コード例 #1
0
def compute_vel_mag(prefix, inGridName, inDir):
    config = MpasAnalysisConfigParser()
    config.read('mpas_analysis/config.default')

    outDescriptor = get_comparison_descriptor(config, 'antarctic')
    outGridName = outDescriptor.meshName
    description = 'Monthly velocity magnitude climatologies from ' \
                  '2005-2010 average of the Southern Ocean State ' \
                  'Estimate (SOSE)'
    botDescription = 'Monthly velocity magnitude climatologies at sea ' \
                     'floor from 2005-2010 average from SOSE'

    for gridName in [outGridName]:
        outFileName = '{}_vel_mag_{}.nc'.format(prefix, gridName)
        uFileName = '{}_zonal_vel_{}.nc'.format(prefix, gridName)
        vFileName = '{}_merid_vel_{}.nc'.format(prefix, gridName)
        if not os.path.exists(outFileName):
            with xarray.open_dataset(uFileName) as dsU:
                with xarray.open_dataset(vFileName) as dsV:
                    dsVelMag = dsU.drop(['zonalVel', 'botZonalVel'])
                    dsVelMag['velMag'] = xarray.ufuncs.sqrt(dsU.zonalVel**2 +
                                                            dsV.meridVel**2)
                    dsVelMag.velMag.attrs['units'] = 'm s$^{-1}$'
                    dsVelMag.velMag.attrs['description'] = description

                    dsVelMag['botVelMag'] = xarray.ufuncs.sqrt(
                        dsU.botZonalVel**2 + dsV.botMeridVel**2)
                    dsVelMag.botVelMag.attrs['units'] = 'm s$^{-1}$'
                    dsVelMag.botVelMag.attrs['description'] = botDescription

                    write_netcdf(dsVelMag, outFileName)
コード例 #2
0
def _cache_individual_climatologies(ds, cacheInfo, printProgress,
                                    yearsPerCacheFile, monthValues,
                                    calendar):  # {{{
    '''
    Cache individual climatologies for later aggregation.
    '''
    # Authors
    # -------
    # Xylar Asay-Davis

    for cacheIndex, info in enumerate(cacheInfo):
        outputFileClimo, done, yearString = info
        if done:
            continue
        dsYear = ds.where(ds.cacheIndices == cacheIndex, drop=True)

        if printProgress:
            print('     {}'.format(yearString))

        totalDays = dsYear.daysInMonth.sum(dim='Time').values

        monthCount = dsYear.dims['Time']

        climatology = compute_climatology(dsYear, monthValues, calendar,
                                          maskVaries=False)

        climatology.attrs['totalDays'] = totalDays
        climatology.attrs['totalMonths'] = monthCount
        climatology.attrs['fingerprintClimo'] = fingerprint_generator()

        write_netcdf(climatology, outputFileClimo)
        climatology.close()
コード例 #3
0
def compute_vel_mag(prefix, inGridName, inDir, outGridName):
    description = 'Monthly velocity magnitude climatologies from ' \
                  '2005-2010 average of the Southern Ocean State ' \
                  'Estimate (SOSE)'
    botDescription = 'Monthly velocity magnitude climatologies at sea ' \
                     'floor from 2005-2010 average from SOSE'

    for gridName in [outGridName]:
        outFileName = '{}_vel_mag_{}.nc'.format(prefix, gridName)
        uFileName = '{}_zonal_vel_{}.nc'.format(prefix, gridName)
        vFileName = '{}_merid_vel_{}.nc'.format(prefix, gridName)
        if not os.path.exists(outFileName):
            with xarray.open_dataset(uFileName) as dsU:
                with xarray.open_dataset(vFileName) as dsV:
                    dsVelMag = dsU.drop(['zonalVel', 'botZonalVel'])
                    dsVelMag['velMag'] = numpy.sqrt(dsU.zonalVel**2 +
                                                    dsV.meridVel**2)
                    dsVelMag.velMag.attrs['units'] = 'm s$^{-1}$'
                    dsVelMag.velMag.attrs['description'] = description

                    dsVelMag['botVelMag'] = numpy.sqrt(dsU.botZonalVel**2 +
                                                       dsV.botMeridVel**2)
                    dsVelMag.botVelMag.attrs['units'] = 'm s$^{-1}$'
                    dsVelMag.botVelMag.attrs['description'] = botDescription

                    write_netcdf(dsVelMag, outFileName)
コード例 #4
0
    def run_task(self):  # {{{
        """
        Combine the time series
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        outputDirectory = '{}/transport/'.format(
            build_config_full_path(self.config, 'output',
                                   'timeseriesSubdirectory'))

        outFileName = '{}/transport_{:04d}-{:04d}.nc'.format(
            outputDirectory, self.startYears[0], self.endYears[-1])

        if not os.path.exists(outFileName):
            inFileNames = []
            for startYear, endYear in zip(self.startYears, self.endYears):
                inFileName = '{}/transport_{:04d}-{:04d}.nc'.format(
                    outputDirectory, startYear, endYear)
                inFileNames.append(inFileName)

            ds = xarray.open_mfdataset(inFileNames,
                                       combine='nested',
                                       concat_dim='Time',
                                       decode_times=False)
            ds.load()
            write_netcdf(ds, outFileName)
コード例 #5
0
    def setup_and_check(self):  # {{{
        """
        Perform steps to set up the analysis and check for errors in the setup.
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        # call setup_and_check from the base class (AnalysisTask),
        # which will perform some common setup, including storing:
        #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
        #     self.namelist, self.runStreams, self.historyStreams,
        #     self.calendar
        super(RemapObservedClimatologySubtask, self).setup_and_check()

        # we set up the remappers here because ESFM_RegridWeightGen seems to
        # have trouble if it runs in another process (or in several at once)
        self._setup_remappers(self.fileName)

        # build the observational data set and write it out to a file, to
        # be read back in during the run_task() phase
        obsFileName = self.get_file_name(stage='original')
        if not os.path.exists(obsFileName):
            ds = self.build_observational_dataset(self.fileName)
            write_netcdf(ds, obsFileName)
コード例 #6
0
    def run_task(self):  # {{{
        """
        Performs analysis of ocean heat content (OHC) from time-series output.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani, Greg Streletz

        self.logger.info("\nComputing anomalies...")

        config = self.config

        ds = xarray.open_dataset(self.inFileName)

        dsStart = ds.isel(Time=slice(0, self.movingAverageMonths)).mean('Time')

        for variable in ds.data_vars:
            ds[variable] = ds[variable] - dsStart[variable]

        outFileName = self.outFileName
        if not os.path.isabs(outFileName):
            baseDirectory = build_config_full_path(config, 'output',
                                                   'timeSeriesSubdirectory')

            outFileName = '{}/{}'.format(baseDirectory, outFileName)

        write_netcdf(ds, outFileName)  # }}}
コード例 #7
0
def compute_lon_lat_region_masks(geojsonFileName, lon, lat, maskFileName,
                                 featureList=None, logger=None, processCount=1,
                                 chunkSize=1000, showProgress=True,
                                 lonDim='lon', latDim='lat'):
    """
    Build a region mask file from the given lon, lat and geojson file defining
    a set of regions.
    """
    if os.path.exists(maskFileName):
        return

    nLon = len(lon)
    nLat = len(lat)

    # make sure -180 <= lon < 180
    lon = numpy.mod(lon + 180., 360.) - 180.

    if lonDim != latDim:
        lon, lat = numpy.meshgrid(lon, lat)

    # create shapely geometry for lonCell and latCell
    cellPoints = [shapely.geometry.Point(x, y) for x, y in
                  zip(lon.ravel(), lat.ravel())]

    regionNames, masks, properties, nChar = compute_region_masks(
        geojsonFileName, cellPoints, maskFileName, featureList, logger,
        processCount, chunkSize, showProgress)

    # create a new data array for masks and another for mask names
    if logger is not None:
        logger.info('  Creating and writing masks dataset...')
    nRegions = len(regionNames)
    dsMasks = xr.Dataset()
    if lonDim == latDim:
        dsMasks['regionCellMasks'] = (('nRegions', lonDim),
                                      numpy.zeros((nRegions, nLon),
                                                  dtype=bool))
    else:
        dsMasks['regionCellMasks'] = (('nRegions', latDim, lonDim),
                                      numpy.zeros((nRegions, nLat, nLon),
                                                  dtype=bool))

    dsMasks['regionNames'] = (('nRegions'),
                              numpy.zeros((nRegions),
                                          dtype='|S{}'.format(nChar)))

    for index in range(nRegions):
        regionName = regionNames[index]
        mask = masks[index]
        if lonDim == latDim:
            dsMasks['regionCellMasks'][index, :] = mask
        else:
            dsMasks['regionCellMasks'][index, :] = mask.reshape([nLat, nLon])
        dsMasks['regionNames'][index] = regionName

    for propertyName in properties:
        dsMasks['{}Regions'.format(propertyName)] = \
            (('nRegions'), properties[propertyName])

    write_netcdf(dsMasks, maskFileName)
コード例 #8
0
    def _remap(self, inFileName, outFileName, remapper, comparisonGridName,
               season):  # {{{
        """
        Performs remapping either using ``ncremap`` or the native python code,
        depending on the requested setting and the comparison grid

        Parameters
        ----------
        inFileName : str
            The name of the input file to be remapped.

        outFileName : str
            The name of the output file to which the remapped data set should
            be written.

        remapper : ``Remapper`` object
            A remapper that can be used to remap files or data sets to a
            comparison grid.

        comparisonGridNames : {'latlon', 'antarctic'}
            The name of the comparison grid to use for remapping.

        season : str
            The name of the season to be remapped
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        if remapper.mappingFileName is None:
            # no remapping is needed
            return

        renormalizationThreshold = self.config.getfloat(
            'climatology', 'renormalizationThreshold')

        # ncremap doesn't support grids other than lat/lon
        if self.useNcremap and comparisonGridName == 'latlon':
            remapper.remap_file(inFileName=inFileName,
                                outFileName=outFileName,
                                overwrite=True,
                                renormalize=renormalizationThreshold,
                                logger=self.logger)

            remappedClimatology = xr.open_dataset(outFileName)
            remappedClimatology.load()
            remappedClimatology.close()
        else:

            climatologyDataSet = xr.open_dataset(inFileName)

            remappedClimatology = remapper.remap(climatologyDataSet,
                                                 renormalizationThreshold)

        # customize (if this function has been overridden)
        remappedClimatology = self.customize_remapped_climatology(
                remappedClimatology, comparisonGridName, season)

        write_netcdf(remappedClimatology, outFileName)  # }}}
コード例 #9
0
def sose_volume_to_nc(prefix, inGridName, inGridFileName, inDir):
    outFileName = '{}_volume_{}.nc'.format(prefix, inGridName)

    matGrid = loadmat(inGridFileName)
    # lat/lon is a tensor grid so we can use 1-D arrays
    lon = matGrid['XC'][:, 0]
    lat = matGrid['YC'][0, :]
    z = matGrid['RC'][:, 0]

    area = matGrid['RAC']
    dz = matGrid['DRF'][:, 0]
    cellFraction = matGrid['hFacC']

    volume = numpy.zeros(cellFraction.shape)

    for zIndex in range(len(dz)):
        volume[:, :, zIndex] = cellFraction[:, :, zIndex] * dz[zIndex] * area

    volume = volume.transpose(1, 0, 2)

    dictonary = {
        'dims': ['lon', 'lat', 'z'],
        'coords': {
            'lon': {
                'dims': ('lon'),
                'data': lon,
                'attrs': {
                    'units': 'degrees'
                }
            },
            'lat': {
                'dims': ('lat'),
                'data': lat,
                'attrs': {
                    'units': 'degrees'
                }
            },
            'z': {
                'dims': ('z'),
                'data': z,
                'attrs': {
                    'units': 'm'
                }
            }
        },
        'data_vars': {
            'volume': {
                'dims': ('lat', 'lon', 'z'),
                'data': volume,
                'attrs': {
                    'units': 'm$^3$',
                    'description': 'cell volumes'
                }
            }
        }
    }

    dsVolume = xarray.Dataset.from_dict(dictonary)
    write_netcdf(dsVolume, outFileName)
コード例 #10
0
    def _compute_area_vol(self):  # {{{
        '''
        Compute part of the time series of sea ice volume and area, given time
        indices to process.
        '''

        outFileNames = {}
        for hemisphere in ['NH', 'SH']:
            baseDirectory = build_config_full_path(self.config, 'output',
                                                   'timeSeriesSubdirectory')

            make_directories(baseDirectory)

            outFileName = '{}/seaIceAreaVol{}.nc'.format(
                baseDirectory, hemisphere)
            outFileNames[hemisphere] = outFileName

        dsTimeSeries = {}
        dsMesh = xr.open_dataset(self.restartFileName)
        dsMesh = subset_variables(dsMesh, variableList=['latCell', 'areaCell'])
        # Load data
        ds = open_mpas_dataset(fileName=self.inputFile,
                               calendar=self.calendar,
                               variableList=self.variableList,
                               startDate=self.startDate,
                               endDate=self.endDate)

        for hemisphere in ['NH', 'SH']:

            if hemisphere == 'NH':
                mask = dsMesh.latCell > 0
            else:
                mask = dsMesh.latCell < 0

            dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells')
            dsAreaSum = dsAreaSum.rename({
                'timeMonthly_avg_iceAreaCell':
                'iceArea',
                'timeMonthly_avg_iceVolumeCell':
                'iceVolume'
            })
            dsAreaSum['iceThickness'] = (dsAreaSum.iceVolume /
                                         dsMesh.areaCell.sum('nCells'))

            dsAreaSum['iceArea'].attrs['units'] = 'm$^2$'
            dsAreaSum['iceArea'].attrs['description'] = \
                'Total {} sea ice area'.format(hemisphere)
            dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$'
            dsAreaSum['iceVolume'].attrs['description'] = \
                'Total {} sea ice volume'.format(hemisphere)
            dsAreaSum['iceThickness'].attrs['units'] = 'm'
            dsAreaSum['iceThickness'].attrs['description'] = \
                'Mean {} sea ice volume'.format(hemisphere)

            dsTimeSeries[hemisphere] = dsAreaSum

            write_netcdf(dsAreaSum, outFileNames[hemisphere])

        return dsTimeSeries  # }}}
コード例 #11
0
def compute_mpas_region_masks(geojsonFileName, meshFileName, maskFileName,
                              featureList=None, logger=None, processCount=1,
                              chunkSize=1000, showProgress=True,
                              useMpasMaskCreator=False, dir=None):
    """
    Build a region mask file from the given MPAS mesh and geojson file defining
    a set of regions.
    """
    if os.path.exists(maskFileName):
        return

    if useMpasMaskCreator:
        dsMesh = xr.open_dataset(meshFileName)
        fcMask = read_feature_collection(geojsonFileName)
        dsMasks = mpas_tools.conversion.mask(dsMesh=dsMesh, fcMask=fcMask,
                                             logger=logger, dir=dir)

    else:
        with xr.open_dataset(meshFileName) as dsMesh:
            dsMesh = dsMesh[['lonCell', 'latCell']]
            latCell = numpy.rad2deg(dsMesh.latCell.values)

            # transform longitudes to [-180, 180)
            lonCell = numpy.mod(numpy.rad2deg(dsMesh.lonCell.values) + 180.,
                                360.) - 180.

        # create shapely geometry for lonCell and latCell
        cellPoints = [shapely.geometry.Point(x, y) for x, y in
                      zip(lonCell, latCell)]

        regionNames, masks, properties, nChar = compute_region_masks(
            geojsonFileName, cellPoints, maskFileName, featureList, logger,
            processCount, chunkSize, showProgress)

        nCells = len(cellPoints)

        # create a new data array for masks and another for mask names
        if logger is not None:
            logger.info('  Creating and writing masks dataset...')
        nRegions = len(regionNames)
        dsMasks = xr.Dataset()
        dsMasks['regionCellMasks'] = (('nRegions', 'nCells'),
                                      numpy.zeros((nRegions, nCells), dtype=bool))
        dsMasks['regionNames'] = (('nRegions'),
                                  numpy.zeros((nRegions),
                                              dtype='|S{}'.format(nChar)))

        for index in range(nRegions):
            regionName = regionNames[index]
            mask = masks[index]
            dsMasks['regionCellMasks'][index, :] = mask
            dsMasks['regionNames'][index] = regionName

        for propertyName in properties:
            dsMasks[propertyName] = (('nRegions'), properties[propertyName])

    write_netcdf(dsMasks, maskFileName)
コード例 #12
0
    def run_task(self):  # {{{
        """
        Performs remapping of obsrevations to the comparsion grid
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        obsFileName = self.get_file_name(stage='original')
        if not os.path.isfile(obsFileName):
            raise OSError('Obs file {} not found.'.format(obsFileName))

        for comparisonGridName in self.comparisonGridNames:
            for season in self.seasons:

                remappedFileName = self.get_file_name(
                    stage='remapped',
                    season=season,
                    comparisonGridName=comparisonGridName)

                if not os.path.exists(remappedFileName):

                    ds = xr.open_dataset(obsFileName)

                    climatologyFileName = self.get_file_name(
                        stage='climatology',
                        season=season,
                        comparisonGridName=comparisonGridName)
                    if 'month' in ds.variables.keys() and \
                            'year' in ds.variables.keys():
                        # this data set is not yet a climatology, so compute
                        # the climatology
                        monthValues = constants.monthDictionary[season]
                        seasonalClimatology = compute_climatology(
                            ds, monthValues, maskVaries=True)
                    else:
                        # We don't have month or year arrays to compute a
                        # climatology so assume this already is one
                        seasonalClimatology = ds

                    write_netcdf(seasonalClimatology, climatologyFileName)

                    remapper = self.remappers[comparisonGridName]

                    if remapper.mappingFileName is None:
                        # no need to remap because the observations are on the
                        # comparison grid already
                        os.symlink(climatologyFileName, remappedFileName)
                    else:
                        remap_and_write_climatology(config,
                                                    seasonalClimatology,
                                                    climatologyFileName,
                                                    remappedFileName,
                                                    remapper,
                                                    logger=self.logger)
コード例 #13
0
    def _mask_climatologies(self, season, dsMask):  # {{{
        '''
        For each season, creates a masked version of the climatology

        Parameters
        ----------
        season : str
            The name of the season to be masked

        dsMask : ``xarray.Dataset`` object
            A data set (from the first input file) that can be used to
            determine the mask in MPAS output files.

        Author
        ------
        Xylar Asay-Davis
        '''

        climatologyFileName = self.mpasClimatologyTask.get_file_name(season)

        maskedClimatologyFileName = self.get_masked_file_name(season)

        if not os.path.exists(maskedClimatologyFileName):
            # slice and mask the data set
            climatology = xr.open_dataset(climatologyFileName)
            climatology = mpas_xarray.subset_variables(climatology,
                                                       self.variableList)
            iselValues = {}
            if 'Time' in climatology.dims:
                iselValues['Time'] = 0
            if self.iselValues is not None:
                iselValues.update(self.iselValues)
            # select only Time=0 and possibly only the desired vertical
            # slice
            if len(iselValues.keys()) > 0:
                climatology = climatology.isel(**iselValues)

            # add valid mask as a variable, useful for remapping later
            climatology['validMask'] = \
                xr.DataArray(numpy.ones(climatology.dims['nCells']),
                             dims=['nCells'])
            # mask the data set
            for variableName in self.variableList:
                climatology[variableName] = \
                    climatology[variableName].where(
                        dsMask[variableName] != self._fillValue)

            # customize (if this function has been overridden)
            climatology = self.customize_masked_climatology(
                climatology, season)

            write_netcdf(climatology, maskedClimatologyFileName)
コード例 #14
0
def compute_pot_density(prefix, inGridName, inDir):
    config = MpasAnalysisConfigParser()
    config.read('mpas_analysis/config.default')

    outDescriptor = get_comparison_descriptor(config, 'antarctic')
    outGridName = outDescriptor.meshName
    description = 'Monthly potential density climatologies from ' \
                  '2005-2010 average of the Southern Ocean State ' \
                  'Estimate (SOSE)'
    botDescription = 'Monthly potential density climatologies at sea ' \
                     'floor from 2005-2010 average from SOSE'

    for gridName in [inGridName, outGridName]:
        outFileName = '{}_pot_den_{}.nc'.format(prefix, gridName)
        TFileName = '{}_pot_temp_{}.nc'.format(prefix, gridName)
        SFileName = '{}_salinity_{}.nc'.format(prefix, gridName)
        if not os.path.exists(outFileName):
            with xarray.open_dataset(TFileName) as dsT:
                with xarray.open_dataset(SFileName) as dsS:
                    dsPotDensity = dsT.drop(['theta', 'botTheta'])

                    lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.z)
                    pressure = gsw.p_from_z(z.values, lat.values)
                    SA = gsw.SA_from_SP(dsS.salinity.values, pressure,
                                        lon.values, lat.values)
                    CT = gsw.CT_from_pt(SA, dsT.theta.values)
                    dsPotDensity['potentialDensity'] = (dsS.salinity.dims,
                                                        gsw.rho(SA, CT, 0.))
                    dsPotDensity.potentialDensity.attrs['units'] = \
                        'kg m$^{-3}$'
                    dsPotDensity.potentialDensity.attrs['description'] = \
                        description

                    lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.zBot)
                    pressure = gsw.p_from_z(z.values, lat.values)
                    SA = gsw.SA_from_SP(dsS.botSalinity.values, pressure,
                                        lon.values, lat.values)
                    CT = gsw.CT_from_pt(SA, dsT.botTheta.values)
                    dsPotDensity['botPotentialDensity'] = \
                        (dsS.botSalinity.dims, gsw.rho(SA, CT, 0.))
                    dsPotDensity.botPotentialDensity.attrs['units'] = \
                        'kg m$^{-3}$'
                    dsPotDensity.botPotentialDensity.attrs['description'] = \
                        botDescription

                    write_netcdf(dsPotDensity, outFileName)
コード例 #15
0
def compute_mpas_transect_masks(geojsonFileName,
                                meshFileName,
                                maskFileName,
                                logger=None):
    """
    Build a transect mask file from the given MPAS mesh and geojson file \
    defining a set of transects.
    """
    if os.path.exists(maskFileName):
        return

    dsMesh = xr.open_dataset(meshFileName)
    fcMask = read_feature_collection(geojsonFileName)
    dsMask = mpas_tools.conversion.mask(dsMesh=dsMesh,
                                        fcMask=fcMask,
                                        logger=logger)

    write_netcdf(dsMask, maskFileName)
コード例 #16
0
    def run_task(self):  # {{{
        """
        Performs analysis of ocean heat content (OHC) from time-series output.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani, Greg Streletz

        self.logger.info("\nComputing anomalies...")

        config = self.config
        startDate = config.get('timeSeries', 'startDate')
        endDate = config.get('timeSeries', 'endDate')

        if config.has_option('timeSeries', 'anomalyRefYear'):
            anomalyYear = config.getint('timeSeries', 'anomalyRefYear')
            anomalyRefDate = '{:04d}-01-01_00:00:00'.format(anomalyYear)
            anomalyEndDate = '{:04d}-12-31_23:59:59'.format(anomalyYear)
        else:
            anomalyRefDate = get_simulation_start_time(self.runStreams)
            anomalyYear = int(anomalyRefDate[0:4])
            anomalyEndDate = '{:04d}-12-31_23:59:59'.format(anomalyYear)

        ds = compute_moving_avg_anomaly_from_start(
            timeSeriesFileName=self.inputFile,
            variableList=self.variableList,
            anomalyStartTime=anomalyRefDate,
            anomalyEndTime=anomalyEndDate,
            startDate=startDate,
            endDate=endDate,
            calendar=self.calendar,
            movingAveragePoints=self.movingAveragePoints,
            alter_dataset=self.alter_dataset)

        outFileName = self.outFileName
        if not os.path.isabs(outFileName):
            baseDirectory = build_config_full_path(
                config, 'output', 'timeSeriesSubdirectory')

            outFileName = '{}/{}'.format(baseDirectory,
                                         outFileName)

        write_netcdf(ds, outFileName)  # }}}
コード例 #17
0
def remap_v(prefix, inGridName, inGridFileName, inDir, inVPrefix):
    cacheVFileName = '{}_merid_vel_{}.nc'.format(prefix, inGridName)

    config = MpasAnalysisConfigParser()
    config.read('mpas_analysis/config.default')

    matGrid = loadmat(inGridFileName)
    # lat/lon is a tensor grid so we can use 1-D arrays
    lon = matGrid['XC'][:, 0]
    lat = matGrid['YG'][0, :]
    z = matGrid['RC'][:, 0]
    cellFraction = matGrid['hFacS']

    botIndices = get_bottom_indices(cellFraction)

    with sose_v_to_nc('{}/{}'.format(inDir, inVPrefix),
                      cacheVFileName, lon, lat, z, cellFraction, botIndices) \
            as dsV:
        inDescriptor = LatLonGridDescriptor()

        inDescriptor = LatLonGridDescriptor.read(cacheVFileName,
                                                 latVarName='lat',
                                                 lonVarName='lon')

        outDescriptor = get_comparison_descriptor(config, 'antarctic')
        outGridName = outDescriptor.meshName

        outVFileName = '{}_merid_vel_{}.nc'.format(prefix, outGridName)

        mappingFileName = '{}/map_V_{}_to_{}.nc'.format(
            inDir, inGridName, outGridName)

        remapper = Remapper(inDescriptor, outDescriptor, mappingFileName)

        remapper.build_mapping_file(method='bilinear')

        if not os.path.exists(outVFileName):
            print('Remapping meridional velocity...')
            with remapper.remap(dsV, renormalizationThreshold=0.01) \
                    as remappedV:
                print('Done.')
                remappedV.attrs['history'] = ' '.join(sys.argv)
                write_netcdf(remappedV, outVFileName)
コード例 #18
0
def remap(ds, outDescriptor, mappingFileName, inDir, outFileName):

    tempFileName1 = '{}/temp_transpose.nc'.format(inDir)
    tempFileName2 = '{}/temp_remap.nc'.format(inDir)
    if 'z' in ds:
        print('  transposing and fixing periodicity...')
        ds = ds.chunk({'Time': 4})
        ds = ds.transpose('Time', 'z', 'lat', 'lon')
    ds = add_periodic_lon(ds)
    write_netcdf(ds, tempFileName1)
    ds.close()

    inDescriptor = LatLonGridDescriptor.read(tempFileName1,
                                             latVarName='lat',
                                             lonVarName='lon')

    remapper = Remapper(inDescriptor, outDescriptor, mappingFileName)

    remapper.build_mapping_file(method='bilinear')

    remapper.remap_file(inFileName=tempFileName1,
                        outFileName=tempFileName2,
                        overwrite=True,
                        renormalize=0.01)

    ds = xarray.open_dataset(tempFileName2)
    if 'z' in ds:
        print('  transposing back...')
        ds = ds.chunk({'Time': 4})
        ds = ds.transpose('Time', 'x', 'y', 'z', 'nvertices')
    ds.attrs['meshName'] = outDescriptor.meshName

    for coord in ['x', 'y']:
        ds.coords[coord] = xarray.DataArray.from_dict(
            outDescriptor.coords[coord])

    ds = ds.set_coords(names=['month', 'year'])

    write_netcdf(ds, outFileName)
    ds.close()
コード例 #19
0
    def run_task(self):  # {{{
        """
        Combine the time series
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        regionGroup = self.regionGroup
        timeSeriesName = regionGroup[0].lower() + \
            regionGroup[1:].replace(' ', '')

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(self.config, 'output',
                                   'timeseriesSubdirectory'), timeSeriesName)

        outFileName = '{}/{}_{:04d}-{:04d}.nc'.format(outputDirectory,
                                                      timeSeriesName,
                                                      self.startYears[0],
                                                      self.endYears[-1])

        if not os.path.exists(outFileName):
            inFileNames = []
            for startYear, endYear in zip(self.startYears, self.endYears):
                inFileName = '{}/{}_{:04d}-{:04d}.nc'.format(
                    outputDirectory, timeSeriesName, startYear, endYear)
                inFileNames.append(inFileName)

            ds = xarray.open_mfdataset(inFileNames,
                                       combine='nested',
                                       concat_dim='Time',
                                       decode_times=False)

            ds.load()

            # a few variables have become time dependent and shouldn't be
            for var in ['totalArea', 'zbounds']:
                ds[var] = ds[var].isel(Time=0, drop=True)

            write_netcdf(ds, outFileName)
コード例 #20
0
    def get_observations(self):
        # {{{
        '''
        Read in and set up the observations.

        Returns
        -------
        obsDatasets : OrderedDict
            The observational dataset
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        obsDatasets = OrderedDict()
        for name in self.obsFileNames:
            outFileName = self.get_out_file_name(name)
            if os.path.exists(outFileName):
                dsObs = xr.open_dataset(outFileName)
                dsObs.load()
            else:
                dsObs = self.build_observational_dataset(
                    self.obsFileNames[name], name)

                dsObs.load()
                # make sure lat and lon are coordinates
                for coord in ['lon', 'lat']:
                    dsObs.coords[coord] = dsObs[coord]

                if self.horizontalResolution == 'obs':
                    dsObs = self._add_distance(dsObs)
                else:
                    dsObs = self._subdivide_observations(dsObs)
                write_netcdf(dsObs, outFileName)
            obsDatasets[name] = dsObs

        return obsDatasets  # }}}
コード例 #21
0
def remap(inDir, outDir):

    inGridName = 'SouthernOcean_0.25x0.125degree'
    inFileName = '{}/Schmidtko_et_al_2014_bottom_PT_S_PD_{}.nc'.format(
        inDir, inGridName)

    config = MpasAnalysisConfigParser()
    config.read('mpas_analysis/config.default')

    inDescriptor = LatLonGridDescriptor()

    inDescriptor = LatLonGridDescriptor.read(inFileName,
                                             latVarName='lat',
                                             lonVarName='lon')

    outDescriptor = get_comparison_descriptor(config, 'antarctic')
    outGridName = outDescriptor.meshName

    outFileName = '{}/Schmidtko_et_al_2014_bottom_PT_S_PD_{}.nc'.format(
        outDir, outGridName)

    mappingFileName = '{}/map_{}_to_{}.nc'.format(inDir, inGridName,
                                                  outGridName)

    remapper = Remapper(inDescriptor, outDescriptor, mappingFileName)

    remapper.build_mapping_file(method='bilinear')

    if not os.path.exists(outFileName):
        print('Remapping...')
        with xarray.open_dataset(inFileName) as dsIn:
            with remapper.remap(dsIn, renormalizationThreshold=0.01) \
                    as remappedMLD:
                print('Done.')
                remappedMLD.attrs['history'] = ' '.join(sys.argv)
                write_netcdf(remappedMLD, outFileName)
コード例 #22
0
    def _write_obs_t_s(self, obsDict, zmin, zmax):  # {{{
        obsSection = '{}Observations'.format(self.componentName)
        climatologyDirectory = build_config_full_path(
            config=self.config,
            section='output',
            relativePathOption='climatologySubdirectory',
            relativePathSection=obsSection)

        outFileName = '{}/TS_{}_{}_{}.nc'.format(climatologyDirectory,
                                                 obsDict['suffix'],
                                                 self.prefix, self.season)

        if os.path.exists(outFileName):
            return

        with dask.config.set(schedular='threads',
                             pool=ThreadPool(self.daskThreads)):

            chunk = {obsDict['latVar']: 400, obsDict['lonVar']: 400}

            regionMaskFileName = obsDict['maskTask'].maskFileName

            dsRegionMask = \
                xarray.open_dataset(regionMaskFileName).chunk(chunk).stack(
                        nCells=(obsDict['latVar'], obsDict['lonVar']))
            dsRegionMask = dsRegionMask.reset_index('nCells').drop_vars(
                [obsDict['latVar'], obsDict['lonVar']])

            maskRegionNames = decode_strings(dsRegionMask.regionNames)
            regionIndex = maskRegionNames.index(self.regionName)

            dsMask = dsRegionMask.isel(nRegions=regionIndex)

            cellMask = dsMask.regionCellMasks == 1

            TVarName = obsDict['TVar']
            SVarName = obsDict['SVar']
            zVarName = obsDict['zVar']
            volVarName = obsDict['volVar']

            obsFileName = obsDict['climatologyTask'][self.season].fileName
            ds = xarray.open_dataset(obsFileName, chunks=chunk)
            ds = ds.stack(nCells=(obsDict['latVar'], obsDict['lonVar']))
            ds = ds.reset_index('nCells').drop_vars(
                [obsDict['latVar'], obsDict['lonVar']])

            ds = ds.where(cellMask, drop=True)

            cellsChunk = 32768
            chunk = {'nCells': cellsChunk}

            ds = ds.chunk(chunk)

            depthMask = numpy.logical_and(ds[zVarName] >= zmin,
                                          ds[zVarName] <= zmax)
            ds = ds.where(depthMask)
            ds.compute()

            T = ds[TVarName].values.ravel()
            mask = numpy.isfinite(T)
            T = T[mask]

            S = ds[SVarName].values.ravel()[mask]
            z = ds['zBroadcast'].values.ravel()[mask]

            volume = ds[volVarName].values.ravel()[mask]

            dsOut = xarray.Dataset()
            dsOut['T'] = ('nPoints', T)
            dsOut['S'] = ('nPoints', S)
            dsOut['z'] = ('nPoints', z)
            dsOut['volume'] = ('nPoints', volume)
            dsOut['zbounds'] = ('nBounds', [zmin, zmax])
            write_netcdf(dsOut, outFileName)
コード例 #23
0
def remap_pt_s(prefix, inGridName, inGridFileName, inDir, inTPrefix, inSPrefix,
               inGammaNPrefix):
    cacheTFileName = '{}_pot_temp_{}.nc'.format(prefix, inGridName)
    cacheSFileName = '{}_salinity_{}.nc'.format(prefix, inGridName)
    cacheGammaNFileName = '{}_neut_den_{}.nc'.format(prefix, inGridName)

    config = MpasAnalysisConfigParser()
    config.read('mpas_analysis/config.default')

    matGrid = loadmat(inGridFileName)
    # lat/lon is a tensor grid so we can use 1-D arrays
    lon = matGrid['XC'][:, 0]
    lat = matGrid['YC'][0, :]
    z = matGrid['RC'][:, 0]
    cellFraction = matGrid['hFacC']

    botIndices = get_bottom_indices(cellFraction)

    with sose_pt_to_nc('{}/{}'.format(inDir, inTPrefix),
                       cacheTFileName, lon, lat, z, cellFraction, botIndices) \
            as dsT:
        inDescriptor = LatLonGridDescriptor()

        inDescriptor = LatLonGridDescriptor.read(cacheTFileName,
                                                 latVarName='lat',
                                                 lonVarName='lon')

        outDescriptor = get_comparison_descriptor(config, 'antarctic')
        outGridName = outDescriptor.meshName

        outTFileName = '{}_pot_temp_{}.nc'.format(prefix, outGridName)
        outSFileName = '{}_salinity_{}.nc'.format(prefix, outGridName)
        outGammaNFileName = '{}_neut_den_{}.nc'.format(prefix, outGridName)

        mappingFileName = '{}/map_C_{}_to_{}.nc'.format(
            inDir, inGridName, outGridName)

        remapper = Remapper(inDescriptor, outDescriptor, mappingFileName)

        remapper.build_mapping_file(method='bilinear')

        if not os.path.exists(outTFileName):
            dsT.reset_coords(names='zBot', inplace=True)
            print('Remapping potential temperature...')
            with remapper.remap(dsT, renormalizationThreshold=0.01) \
                    as remappedT:
                print('Done.')
                remappedT.attrs['history'] = ' '.join(sys.argv)
                remappedT.set_coords(names='zBot', inplace=True)
                write_netcdf(remappedT, outTFileName)

    with sose_s_to_nc('{}/{}'.format(inDir, inSPrefix),
                      cacheSFileName, lon, lat, z, cellFraction, botIndices) \
            as dsS:
        if not os.path.exists(outSFileName):
            dsS.reset_coords(names='zBot', inplace=True)
            print('Remapping salinity...')
            with remapper.remap(dsS, renormalizationThreshold=0.01) \
                    as remappedS:
                print('Done.')
                remappedS.attrs['history'] = ' '.join(sys.argv)
                remappedS.set_coords(names='zBot', inplace=True)
                write_netcdf(remappedS, outSFileName)

    with sose_gammaN_to_nc('{}/{}'.format(inDir, inGammaNPrefix),
                           cacheGammaNFileName, lon, lat, z, cellFraction,
                           botIndices) \
            as dsGammaN:
        if not os.path.exists(outGammaNFileName):
            dsGammaN.reset_coords(names='zBot', inplace=True)
            print('Remapping neutral density...')
            with remapper.remap(dsGammaN, renormalizationThreshold=0.01) \
                    as remappedGammaN:
                print('Done.')
                remappedGammaN.attrs['history'] = ' '.join(sys.argv)
                remappedGammaN.set_coords(names='zBot', inplace=True)
                write_netcdf(remappedGammaN, outGammaNFileName)
コード例 #24
0
def sose_v_to_nc(inPrefix, outFileName, lon, lat, z, cellFraction, botIndices):
    if os.path.exists(outFileName):
        dsV = xarray.open_dataset(outFileName)
    else:
        print('Building climatology of meridional velocity...')
        field, botField = get_monthly_average_3d(inPrefix, cellFraction,
                                                 botIndices)
        print('Done.')
        zBot = numpy.ma.masked_array(z[botIndices], mask=(botIndices == -1))
        zBot = zBot.transpose(1, 0)

        description = 'Monthly meridional velocity climatologies from ' \
                      '2005-2010 average of the Southern Ocean State ' \
                      'Estimate (SOSE)'
        botDescription = 'Monthly meridional velocity climatologies at sea ' \
                         'floor from 2005-2010 average from SOSE'
        dictonary = {
            'dims': ['Time', 'lon', 'lat', 'z'],
            'coords': {
                'month': {
                    'dims': ('Time'),
                    'data': range(1, 13),
                    'attrs': {
                        'units': 'months'
                    }
                },
                'year': {
                    'dims': ('Time'),
                    'data': numpy.ones(12),
                    'attrs': {
                        'units': 'years'
                    }
                },
                'lon': {
                    'dims': ('lon'),
                    'data': lon,
                    'attrs': {
                        'units': 'degrees'
                    }
                },
                'lat': {
                    'dims': ('lat'),
                    'data': lat,
                    'attrs': {
                        'units': 'degrees'
                    }
                },
                'z': {
                    'dims': ('z'),
                    'data': z,
                    'attrs': {
                        'units': 'm'
                    }
                },
                'zBot': {
                    'dims': ('lat', 'lon'),
                    'data': zBot,
                    'attrs': {
                        'units': 'm'
                    }
                }
            },
            'data_vars': {
                'meridVel': {
                    'dims': ('Time', 'lat', 'lon', 'z'),
                    'data': field,
                    'attrs': {
                        'units': 'm s$^{-1}$',
                        'description': description
                    }
                },
                'botMeridVel': {
                    'dims': ('Time', 'lat', 'lon'),
                    'data': botField,
                    'attrs': {
                        'units': 'm s$^{-1}$',
                        'description': botDescription
                    }
                }
            }
        }

        dsV = xarray.Dataset.from_dict(dictonary)
        write_netcdf(dsV, outFileName)

    return dsV
コード例 #25
0
def sose_mld_to_nc(inPrefix, outFileName, lon, lat, botIndices):
    if os.path.exists(outFileName):
        dsMLD = xarray.open_dataset(outFileName)
    else:
        print('Building climatology of mixed layer depth...')
        field = get_monthly_average_2d(inPrefix, botIndices)
        # make MLD positive
        field = -field
        print('Done.')

        description = 'Monthly mixed layer depth climatologies from ' \
                      '2005-2010 average of the Southern Ocean State ' \
                      'Estimate (SOSE)'
        dictonary = {
            'dims': ['Time', 'lon', 'lat'],
            'coords': {
                'month': {
                    'dims': ('Time'),
                    'data': range(1, 13),
                    'attrs': {
                        'units': 'months'
                    }
                },
                'year': {
                    'dims': ('Time'),
                    'data': numpy.ones(12),
                    'attrs': {
                        'units': 'years'
                    }
                },
                'lon': {
                    'dims': ('lon'),
                    'data': lon,
                    'attrs': {
                        'units': 'degrees'
                    }
                },
                'lat': {
                    'dims': ('lat'),
                    'data': lat,
                    'attrs': {
                        'units': 'degrees'
                    }
                }
            },
            'data_vars': {
                'mld': {
                    'dims': ('Time', 'lat', 'lon'),
                    'data': field,
                    'attrs': {
                        'units': 'm',
                        'description': description
                    }
                }
            }
        }

        dsMLD = xarray.Dataset.from_dict(dictonary)
        write_netcdf(dsMLD, outFileName)

    return dsMLD
コード例 #26
0
    def combine_observations(self):  # {{{
        '''
        Combine SOSE oservations into a single file
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        longitudes = sorted(
            config.getExpression('soseTransects',
                                 'longitudes',
                                 usenumpyfunc=True))

        observationsDirectory = build_obs_path(config, 'ocean',
                                               'soseSubdirectory')

        outObsDirectory = build_config_full_path(
            config=config,
            section='output',
            relativePathOption='climatologySubdirectory',
            relativePathSection='oceanObservations')

        make_directories(outObsDirectory)

        combinedFileName = '{}/{}.nc'.format(outObsDirectory,
                                             self.transectCollectionName)
        obsFileNames = OrderedDict()
        for lon in longitudes:
            transectName = 'lon_{}'.format(lon)
            obsFileNames[transectName] = combinedFileName

        self.obsFileNames = obsFileNames

        if os.path.exists(combinedFileName):
            return

        print('Preprocessing SOSE transect data...')

        minLat = config.getfloat('soseTransects', 'minLat')
        maxLat = config.getfloat('soseTransects', 'maxLat')

        dsObs = None
        for field in self.fields:
            prefix = field['obsFilePrefix']
            fieldName = field['obsFieldName']
            if prefix is None:
                continue
            print('  {}'.format(field['prefix']))

            fileName = '{}/SOSE_2005-2010_monthly_{}_SouthernOcean' \
                       '_0.167x0.167degree_20180710.nc'.format(
                           observationsDirectory, prefix)

            dsLocal = xr.open_dataset(fileName)

            lat = dsLocal.lat.values
            mask = numpy.logical_and(lat >= minLat, lat <= maxLat)
            indices = numpy.argwhere(mask)
            dsLocal = dsLocal.isel(lat=slice(indices[0][0], indices[-1][0]))
            dsLocal.load()

            if fieldName == 'zonalVel':
                # need to average in longitude
                nLon = dsLocal.sizes['lon']
                lonIndicesP1 = numpy.mod(numpy.arange(nLon) + 1, nLon)
                dsLocal = 0.5 * (dsLocal + dsLocal.isel(lon=lonIndicesP1))

            if fieldName == 'meridVel':
                # need to average in latitude
                nLat = dsLocal.sizes['lat']
                latIndicesP1 = numpy.mod(numpy.arange(nLat) + 1, nLat)
                dsLocal = 0.5 * (dsLocal + dsLocal.isel(lat=latIndicesP1))

            dsLocal = dsLocal.sel(lon=longitudes, method=str('nearest'))

            if dsObs is None:
                dsObs = dsLocal
            else:
                dsLocal['lon'] = dsObs.lon
                dsLocal['lat'] = dsObs.lat
                dsObs[fieldName] = dsLocal[fieldName]
                dsLocal.close()

        if 'zonalVel' in dsObs and 'meridVel' in dsObs:
            # compute the velocity magnitude
            print('  velMag')
            description = 'Monthly velocity magnitude climatologies ' \
                          'from 2005-2010 average of the Southern Ocean ' \
                          'State Estimate (SOSE)'
            dsObs['velMag'] = numpy.sqrt(dsObs.zonalVel**2 + dsObs.meridVel**2)
            dsObs.velMag.attrs['units'] = 'm s$^{-1}$'
            dsObs.velMag.attrs['description'] = description

        write_netcdf(dsObs, combinedFileName)

        print('  Done.')
コード例 #27
0
def text_to_netcdf(inDir, outDir):
    inFileName = '{}/Antarctic_shelf_data.txt'.format(inDir)
    outFileName = '{}/Schmidtko_et_al_2014_bottom_PT_S_PD_' \
                  'SouthernOcean_0.25x0.125degree.nc'.format(outDir)

    if os.path.exists(outFileName):
        return

    # 1/4 x 1/8 degree grid cells
    cellsPerLon = 4
    cellsPerLat = 8

    obsFile = pandas.read_csv(inFileName, delim_whitespace=True)

    inLon = numpy.array(obsFile.iloc[:, 0])
    inLat = numpy.array(obsFile.iloc[:, 1])

    inZ = numpy.array(obsFile.iloc[:, 2])

    inCT = numpy.array(obsFile.iloc[:, 3])
    inCT_std = numpy.array(obsFile.iloc[:, 4])

    inSA = numpy.array(obsFile.iloc[:, 5])
    inSA_std = numpy.array(obsFile.iloc[:, 6])

    pressure = gsw.p_from_z(inZ, inLat)
    inS = gsw.SP_from_SA(inSA, pressure, inLon, inLat)
    inPT = gsw.pt_from_CT(inSA, inCT)
    inPD = gsw.rho(inSA, inCT, 0.)

    minLat = int(numpy.amin(inLat) * cellsPerLat) / cellsPerLat
    maxLat = int(numpy.amax(inLat) * cellsPerLat) / cellsPerLat
    deltaLat = 1. / cellsPerLat
    outLat = numpy.arange(minLat - deltaLat, maxLat + 2 * deltaLat, deltaLat)

    deltaLon = 1. / cellsPerLon
    outLon = numpy.arange(0., 360., deltaLon)

    xIndices = numpy.array(cellsPerLon * inLon + 0.5, int)
    yIndices = numpy.array(cellsPerLat * (inLat - outLat[0]) + 0.5, int)

    Lon, Lat = numpy.meshgrid(outLon, outLat)

    ds = xarray.Dataset()
    ds['lon'] = (('lon', ), outLon)
    ds.lon.attrs['units'] = 'degrees'
    ds.lon.attrs['description'] = 'longitutude'

    ds['lat'] = (('lat', ), outLat)
    ds.lat.attrs['units'] = 'degrees'
    ds.lat.attrs['description'] = 'latitutude'

    z = numpy.ma.masked_all(Lon.shape)
    z[yIndices, xIndices] = inZ
    ds['z'] = (('lat', 'lon'), z)
    ds.z.attrs['units'] = 'meters'
    ds.z.attrs['description'] = 'depth of the seafloor (positive up)'

    PT = numpy.ma.masked_all(Lon.shape)
    PT[yIndices, xIndices] = inPT
    ds['botTheta'] = (('lat', 'lon'), PT)
    ds.botTheta.attrs['units'] = '$\degree$C'
    ds.botTheta.attrs['description'] = \
        'potential temperature at sea floor'

    PT_std = numpy.ma.masked_all(Lon.shape)
    # neglect difference between std of PT and CT
    PT_std[yIndices, xIndices] = inCT_std
    ds['botThetaStd'] = (('lat', 'lon'), PT_std)
    ds.botThetaStd.attrs['units'] = '$\degree$C'
    ds.botThetaStd.attrs['description'] = \
        'standard deviation in potential temperature at sea floor'

    S = numpy.ma.masked_all(Lon.shape)
    S[yIndices, xIndices] = inS
    ds['botSalinity'] = (('lat', 'lon'), S)
    ds.botSalinity.attrs['units'] = 'PSU'
    ds.botSalinity.attrs['description'] = \
        'salinity at sea floor'

    S_std = numpy.ma.masked_all(Lon.shape)
    # neglect difference between std of S and SA
    S_std[yIndices, xIndices] = inSA_std
    ds['botSalinityStd'] = (('lat', 'lon'), S_std)
    ds.botSalinityStd.attrs['units'] = 'PSU'
    ds.botSalinityStd.attrs['description'] = \
        'standard deviation in salinity at sea floor'

    PD = numpy.ma.masked_all(Lon.shape)
    PD[yIndices, xIndices] = inPD
    ds['botPotentialDensity'] = (('lat', 'lon'), PD)
    ds.botPotentialDensity.attrs['units'] = 'kg m$^{-3}$'
    ds.botPotentialDensity.attrs['description'] = \
        'potential desnity at sea floor'

    write_netcdf(ds, outFileName)
コード例 #28
0
    def _compute_climatologies_with_xarray(self, inDirectory, outDirectory):
        # {{{
        '''
        Uses xarray to compute seasonal and/or annual climatologies.

        Parameters
        ----------
        inDirectory : str
            The run directory containing timeSeriesStatsMonthly output

        outDirectory : str
            The output directory where climatologies will be written
        '''

        # Authors
        # -------
        # Xylar Asay-Davis

        def _preprocess(ds):
            # drop unused variables during preprocessing because only the
            # variables we want are guaranteed to be in all the files
            return ds[variableList]

        season = self.season
        parentTask = self.parentTask
        variableList = parentTask.variableList[season]

        chunkSize = self.config.getint('input', 'maxChunkSize')

        if season in constants.abrevMonthNames:
            # this is an individual month, so create a climatology from
            # timeSeriesStatsMonthlyOutput

            fileNames = sorted(parentTask.inputFiles)
            years, months = get_files_year_month(
                fileNames, self.historyStreams, 'timeSeriesStatsMonthlyOutput')

            with xarray.open_mfdataset(parentTask.inputFiles,
                                       combine='nested',
                                       concat_dim='Time',
                                       chunks={'nCells': chunkSize},
                                       decode_cf=False,
                                       decode_times=False,
                                       preprocess=_preprocess) as ds:

                ds.coords['year'] = ('Time', years)
                ds.coords['month'] = ('Time', months)
                month = constants.abrevMonthNames.index(season) + 1
                climatologyFileName = parentTask.get_file_name(season)
                self.logger.info('computing climatology {}'.format(
                    os.path.basename(climatologyFileName)))

                ds = ds.where(ds.month == month, drop=True)
                ds = ds.mean(dim='Time')
                ds.compute(num_workers=self.subprocessCount)
                write_netcdf(ds, climatologyFileName)
        else:
            outFileName = parentTask.get_file_name(season=season)
            self.logger.info('computing climatology {}'.format(
                os.path.basename(outFileName)))
            fileNames = []
            weights = []
            for month in constants.monthDictionary[season]:
                monthName = constants.abrevMonthNames[month - 1]
                fileNames.append(parentTask.get_file_name(season=monthName))
                weights.append(constants.daysInMonth[month - 1])

            with xarray.open_mfdataset(fileNames,
                                       concat_dim='weight',
                                       combine='nested',
                                       chunks={'nCells': chunkSize},
                                       decode_cf=False,
                                       decode_times=False,
                                       preprocess=_preprocess) as ds:
                ds.coords['weight'] = ('weight', weights)
                ds = ((ds.weight * ds).sum(dim='weight') /
                      ds.weight.sum(dim='weight'))
                ds.compute(num_workers=self.subprocessCount)
                write_netcdf(ds, outFileName)
コード例 #29
0
    def _compute_ice_shelf_fluxes(self):  # {{{
        """
        Reads melt flux time series and computes regional total melt flux and
        mean melt rate.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Stephen Price

        mpasTimeSeriesTask = self.mpasTimeSeriesTask
        config = self.config

        baseDirectory = build_config_full_path(config, 'output',
                                               'timeSeriesSubdirectory')

        outFileName = '{}/{}'.format(baseDirectory, self.outFileName)

        # Load data:
        inputFile = mpasTimeSeriesTask.outputFile
        dsIn = open_mpas_dataset(fileName=inputFile,
                                 calendar=self.calendar,
                                 variableList=self.variableList,
                                 startDate=self.startDate,
                                 endDate=self.endDate)
        try:
            if os.path.exists(outFileName):
                # The file already exists so load it
                dsOut = xarray.open_dataset(outFileName)
                if numpy.all(dsOut.Time.values == dsIn.Time.values):
                    return dsOut.totalMeltFlux, dsOut.meltRates
                else:
                    self.logger.warning('File {} is incomplete. Deleting '
                                        'it.'.format(outFileName))
                    os.remove(outFileName)
        except OSError:
            # something is potentailly wrong with the file, so let's delete
            # it and try again
            self.logger.warning('Problems reading file {}. Deleting '
                                'it.'.format(outFileName))
            os.remove(outFileName)

        # work on data from simulations
        freshwaterFlux = dsIn.timeMonthly_avg_landIceFreshwaterFlux

        restartFileName = \
            mpasTimeSeriesTask.runStreams.readpath('restart')[0]

        dsRestart = xarray.open_dataset(restartFileName)
        areaCell = dsRestart.landIceFraction.isel(Time=0) * dsRestart.areaCell

        mpasMeshName = config.get('input', 'mpasMeshName')
        regionMaskDirectory = config.get('regions', 'regionMaskDirectory')

        regionMaskFileName = '{}/{}_iceShelfMasks.nc'.format(
            regionMaskDirectory, mpasMeshName)

        dsRegionMask = xarray.open_dataset(regionMaskFileName)

        # select only those regions we want to plot
        dsRegionMask = dsRegionMask.isel(nRegions=self.regionIndices)
        cellMasks = dsRegionMask.regionCellMasks

        # convert from kg/s to kg/yr
        totalMeltFlux = constants.sec_per_year * \
            (cellMasks*areaCell*freshwaterFlux).sum(dim='nCells')

        totalArea = (cellMasks * areaCell).sum(dim='nCells')

        # from kg/m^2/yr to m/yr
        meltRates = (1. / constants.rho_fw) * (totalMeltFlux / totalArea)

        # convert from kg/yr to GT/yr
        totalMeltFlux /= constants.kg_per_GT

        baseDirectory = build_config_full_path(config, 'output',
                                               'timeSeriesSubdirectory')

        outFileName = '{}/iceShelfAggregatedFluxes.nc'.format(baseDirectory)

        dsOut = xarray.Dataset()
        dsOut['totalMeltFlux'] = totalMeltFlux
        dsOut.totalMeltFlux.attrs['units'] = 'GT a$^{-1}$'
        dsOut.totalMeltFlux.attrs['description'] = \
            'Total melt flux summed over each ice shelf or region'
        dsOut['meltRates'] = meltRates
        dsOut.meltRates.attrs['units'] = 'm a$^{-1}$'
        dsOut.meltRates.attrs['description'] = \
            'Melt rate averaged over each ice shelf or region'

        write_netcdf(dsOut, outFileName)

        return totalMeltFlux, meltRates  # }}}
コード例 #30
0
    def _write_mpas_t_s(self, config):  # {{{

        climatologyName = 'TS_{}_{}'.format(self.prefix, self.season)
        outFileName = get_masked_mpas_climatology_file_name(config,
                                                            self.season,
                                                            self.componentName,
                                                            climatologyName,
                                                            op='avg')

        if os.path.exists(outFileName):
            ds = xarray.open_dataset(outFileName)
            zmin, zmax = ds.zbounds.values
            return zmin, zmax

        with dask.config.set(schedular='threads',
                             pool=ThreadPool(self.daskThreads)):

            self.logger.info('  Extracting T and S in the region...')

            sectionName = self.sectionName

            cellsChunk = 32768
            chunk = {'nCells': cellsChunk}

            try:
                restartFileName = self.runStreams.readpath('restart')[0]
            except ValueError:
                raise IOError('No MPAS-O restart file found: need at least one'
                              ' restart file to plot T-S diagrams')
            dsRestart = xarray.open_dataset(restartFileName)
            dsRestart = dsRestart.isel(Time=0).chunk(chunk)

            regionMaskFileName = self.mpasMasksSubtask.maskFileName

            dsRegionMask = xarray.open_dataset(regionMaskFileName)

            maskRegionNames = decode_strings(dsRegionMask.regionNames)
            regionIndex = maskRegionNames.index(self.regionName)

            dsMask = dsRegionMask.isel(nRegions=regionIndex).chunk(chunk)

            cellMask = dsMask.regionCellMasks == 1
            if 'landIceMask' in dsRestart:
                # only the region outside of ice-shelf cavities
                cellMask = numpy.logical_and(cellMask,
                                             dsRestart.landIceMask == 0)

            if config.has_option(sectionName, 'zmin'):
                zmin = config.getfloat(sectionName, 'zmin')
            else:
                if 'zminRegions' in dsMask:
                    zmin = dsMask.zminRegions.values
                else:
                    # the old naming convention, used in some pre-generated
                    # mask files
                    zmin = dsMask.zmin.values

            if config.has_option(sectionName, 'zmax'):
                zmax = config.getfloat(sectionName, 'zmax')
            else:
                if 'zmaxRegions' in dsMask:
                    zmax = dsMask.zmaxRegions.values
                else:
                    # the old naming convention, used in some pre-generated
                    # mask files
                    zmax = dsMask.zmax.values

            inFileName = get_unmasked_mpas_climatology_file_name(
                config, self.season, self.componentName, op='avg')

            ds = xarray.open_dataset(inFileName)

            variableList = [
                'timeMonthly_avg_activeTracers_temperature',
                'timeMonthly_avg_activeTracers_salinity',
                'timeMonthly_avg_layerThickness'
            ]
            ds = ds[variableList]

            ds['zMid'] = compute_zmid(dsRestart.bottomDepth,
                                      dsRestart.maxLevelCell,
                                      dsRestart.layerThickness)

            ds['volume'] = (dsRestart.areaCell *
                            ds['timeMonthly_avg_layerThickness'])

            ds = ds.where(cellMask, drop=True)

            self.logger.info("Don't worry about the following dask "
                             "warnings.")
            depthMask = numpy.logical_and(ds.zMid >= zmin, ds.zMid <= zmax)
            depthMask.compute()
            self.logger.info("Dask warnings should be done.")
            ds['depthMask'] = depthMask

            for var in variableList:
                ds[var] = ds[var].where(depthMask)

            T = ds['timeMonthly_avg_activeTracers_temperature'].values.ravel()
            mask = numpy.isfinite(T)
            T = T[mask]

            S = ds['timeMonthly_avg_activeTracers_salinity'].values.ravel()
            S = S[mask]

            zMid = ds['zMid'].values.ravel()[mask]

            volume = ds['volume'].values.ravel()[mask]

            dsOut = xarray.Dataset()
            dsOut['T'] = ('nPoints', T)
            dsOut['S'] = ('nPoints', S)
            dsOut['z'] = ('nPoints', zMid)
            dsOut['volume'] = ('nPoints', volume)
            dsOut['zbounds'] = ('nBounds', [zmin, zmax])
            write_netcdf(dsOut, outFileName)

        return zmin, zmax  # }}}