def setup_and_check(self):  # {{{
        """
        Perform steps to set up the analysis and check for errors in the setup.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Greg Streletz

        # first, call setup_and_check from the base class (AnalysisTask),
        # which will perform some common setup, including storing:
        #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
        #     self.namelist, self.runStreams, self.historyStreams,
        #     self.calendar
        super(PlotDepthIntegratedTimeSeriesSubtask, self).setup_and_check()

        config = self.config

        if self.controlConfig is not None:
            # we need to know what file to read from the control run so
            # an absolute path won't work
            assert (not os.path.isabs(self.inFileName))

            baseDirectory = build_config_full_path(self.controlConfig,
                                                   'output',
                                                   'timeSeriesSubdirectory')

            self.refFileName = '{}/{}'.format(baseDirectory, self.inFileName)

        preprocessedReferenceRunName = config.get(
            'runs', 'preprocessedReferenceRunName')
        if preprocessedReferenceRunName != 'None':

            assert (not os.path.isabs(self.inFileName))

            baseDirectory = build_config_full_path(config, 'output',
                                                   'timeSeriesSubdirectory')

            make_directories('{}/preprocessed'.format(baseDirectory))

            self.preprocessedIntermediateFileName = \
                '{}/preprocessed/intermediate_{}'.format(baseDirectory,
                                                         self.inFileName)
            self.preprocessedFileName = '{}/preprocessed/{}'.format(
                baseDirectory, self.inFileName)

        if not os.path.isabs(self.inFileName):
            baseDirectory = build_config_full_path(config, 'output',
                                                   'timeSeriesSubdirectory')

            self.inFileName = '{}/{}'.format(baseDirectory, self.inFileName)

        mainRunName = self.config.get('runs', 'mainRunName')

        self.filePrefix = '{}_{}_{}'.format(self.outFileLabel, self.regionName,
                                            mainRunName)
        self.xmlFileNames = [
            '{}/{}.xml'.format(self.plotsDirectory, self.filePrefix)
        ]

        return  # }}}
def get_unmasked_mpas_climatology_directory(config, op='avg'):  # {{{
    """
    Get the directory for an unmasked MPAS climatology produced by ncclimo,
    making the directory if it doesn't already exist

    Parameters
    ----------
    config :  ``MpasAnalysisConfigParser``
        configuration options

    op : {'avg', 'min', 'max'}
         operator for monthly stats
    """
    # Authors
    # -------
    # Xylar Asay-Davis

    climatologyOpDirectory = get_climatology_op_directory(config, op)

    mpasMeshName = config.get('input', 'mpasMeshName')

    directory = '{}/unmasked_{}'.format(climatologyOpDirectory,
                                        mpasMeshName)

    make_directories(directory)
    return directory  # }}}
示例#3
0
    def _compute_area_vol(self):  # {{{
        '''
        Compute part of the time series of sea ice volume and area, given time
        indices to process.
        '''

        outFileNames = {}
        for hemisphere in ['NH', 'SH']:
            baseDirectory = build_config_full_path(self.config, 'output',
                                                   'timeSeriesSubdirectory')

            make_directories(baseDirectory)

            outFileName = '{}/seaIceAreaVol{}.nc'.format(
                baseDirectory, hemisphere)
            outFileNames[hemisphere] = outFileName

        dsTimeSeries = {}
        dsMesh = xr.open_dataset(self.restartFileName)
        dsMesh = subset_variables(dsMesh, variableList=['latCell', 'areaCell'])
        # Load data
        ds = open_mpas_dataset(fileName=self.inputFile,
                               calendar=self.calendar,
                               variableList=self.variableList,
                               startDate=self.startDate,
                               endDate=self.endDate)

        for hemisphere in ['NH', 'SH']:

            if hemisphere == 'NH':
                mask = dsMesh.latCell > 0
            else:
                mask = dsMesh.latCell < 0

            dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells')
            dsAreaSum = dsAreaSum.rename({
                'timeMonthly_avg_iceAreaCell':
                'iceArea',
                'timeMonthly_avg_iceVolumeCell':
                'iceVolume'
            })
            dsAreaSum['iceThickness'] = (dsAreaSum.iceVolume /
                                         dsMesh.areaCell.sum('nCells'))

            dsAreaSum['iceArea'].attrs['units'] = 'm$^2$'
            dsAreaSum['iceArea'].attrs['description'] = \
                'Total {} sea ice area'.format(hemisphere)
            dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$'
            dsAreaSum['iceVolume'].attrs['description'] = \
                'Total {} sea ice volume'.format(hemisphere)
            dsAreaSum['iceThickness'].attrs['units'] = 'm'
            dsAreaSum['iceThickness'].attrs['description'] = \
                'Mean {} sea ice volume'.format(hemisphere)

            dsTimeSeries[hemisphere] = dsAreaSum

            write_netcdf(dsAreaSum, outFileNames[hemisphere])

        return dsTimeSeries  # }}}
示例#4
0
def get_masked_mpas_climatology_file_name(config, season, componentName,
                                          climatologyName):  # {{{
    """
    Get the file name for a masked MPAS climatology

    Parameters
    ----------
    config :  ``MpasAnalysisConfigParser``
        Configuration options

    season : str
        One of the seasons in ``constants.monthDictionary``

    componentName : {'ocean', 'seaIce'}
        The MPAS component for which the climatology is being computed

    climatologyName : str
        The name of the climatology (typically the name of a field to mask
        and later remap)
    """
    # Authors
    # -------
    # Xylar Asay-Davis

    startYear = config.getint('climatology', 'startYear')
    endYear = config.getint('climatology', 'endYear')
    mpasMeshName = config.get('input', 'mpasMeshName')

    if componentName == 'ocean':
        ncclimoModel = 'mpaso'
    elif componentName == 'seaIce':
        ncclimoModel = 'mpascice'
    else:
        raise ValueError('component {} is not supported by ncclimo.\n'
                         'Check with Charlie Zender and Xylar Asay-Davis\n'
                         'about getting it added'.format(componentName))

    climatologyBaseDirectory = build_config_full_path(
        config, 'output', 'mpasClimatologySubdirectory')

    stageDirectory = '{}/masked'.format(climatologyBaseDirectory)

    directory = '{}/{}_{}'.format(stageDirectory, climatologyName,
                                  mpasMeshName)

    make_directories(directory)

    monthValues = sorted(constants.monthDictionary[season])
    startMonth = monthValues[0]
    endMonth = monthValues[-1]

    suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format(startYear, startMonth,
                                                      endYear, endMonth)

    if season in constants.abrevMonthNames:
        season = '{:02d}'.format(monthValues[0])
    fileName = '{}/{}_{}_{}.nc'.format(directory, ncclimoModel, season, suffix)

    return fileName  # }}}
示例#5
0
    def setup_and_check(self):  # {{{
        '''
        Perform steps to set up the analysis and check for errors in the setup.

        Raises
        ------
        IOError :
            If a restart file is not available from which to read mesh
            information or if no history files are available from which to
            compute the climatology in the desired time range.
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        # first, call setup_and_check from the base class (AnalysisTask),
        # which will perform some common setup, including storing:
        #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
        #     self.namelist, self.runStreams, self.historyStreams,
        #     self.calendar
        super(ComputeRegionMasksSubtask, self).setup_and_check()

        self.useMpasMesh = self.obsFileName is None
        if self.useMpasMesh:
            try:
                self.obsFileName = self.runStreams.readpath('restart')[0]
            except ValueError:
                raise IOError('No MPAS restart file found: need at least one '
                              'restart file to perform region masking.')

        maskSubdirectory = build_config_full_path(self.config, 'output',
                                                  'maskSubdirectory')
        make_directories(maskSubdirectory)

        if self.meshName is None:
            self.meshName = self.config.get('input', 'mpasMeshName')

        # first, see if we have cached a mask file name in the region masks
        # directory

        self.maskFileName = get_region_mask(
            self.config, '{}_{}.nc'.format(self.meshName, self.outFileSuffix))

        if not os.path.exists(self.maskFileName):
            # no cached mask file, so let's see if there's already one in the
            # masks subfolder of the output directory

            maskSubdirectory = build_config_full_path(self.config, 'output',
                                                      'maskSubdirectory')
            self.maskFileName = '{}/{}_{}.nc'.format(maskSubdirectory,
                                                     self.meshName,
                                                     self.outFileSuffix)

        if os.path.exists(self.maskFileName):
            # nothing to do so don't block a bunch of other processes
            self.subprocessCount = 1
示例#6
0
def get_unmasked_mpas_climatology_file_name(config,
                                            season,
                                            componentName,
                                            op='avg'):
    # {{{
    """
    Get the file name for an unmasked MPAS climatology produced by ncclimo

    Parameters
    ----------
    config :  ``MpasAnalysisConfigParser``
        configuration options

    season : str
        One of the seasons in ``constants.monthDictionary``

    componentName : {'ocean', 'seaIce'}
        The MPAS component for which the climatology is being computed

    op : {'avg', 'min', 'max'}
         operator for monthly stats
    """
    # Authors
    # -------
    # Xylar Asay-Davis

    startYear = config.getint('climatology', 'startYear')
    endYear = config.getint('climatology', 'endYear')

    if componentName == 'ocean':
        ncclimoModel = 'mpaso'
    elif componentName == 'seaIce':
        ncclimoModel = 'mpascice'
    else:
        raise ValueError('component {} is not supported by ncclimo.\n'
                         'Check with Charlie Zender and Xylar Asay-Davis\n'
                         'about getting it added'.format(componentName))

    directory = get_unmasked_mpas_climatology_directory(config, op)

    make_directories(directory)
    monthValues = sorted(constants.monthDictionary[season])
    startMonth = monthValues[0]
    endMonth = monthValues[-1]

    suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format(startYear, startMonth,
                                                      endYear, endMonth)

    if season in constants.abrevMonthNames:
        season = '{:02d}'.format(monthValues[0])
    fileName = '{}/{}_{}_{}.nc'.format(directory, ncclimoModel, season, suffix)
    return fileName  # }}}
示例#7
0
    def get_out_file_name(self,
                          transectName,
                          verticalComparisonGridName='obs'):  # {{{
        '''
        Given config options, the name of a field and a string identifying the
        months in a seasonal climatology, returns the full path for MPAS
        climatology files before and after remapping.

        Parameters
        ----------
        transectName : str
            The name of the transect

        verticalComparisonGridName : {'obs', 'mpas'} or any str, optional
            The vertical grid name on which to compare MPAS data with
            observations. 'obs' indicates the locations of the original
            observations; 'mpas' is the vertical locations of MPAS points,
            remapped to the observation latitude/longitude. If any other,
            string, verticalComparisonGrid should be a 1D numpy array and this
            name should be a useful (and unique) description of that grid.

        Returns
        -------
        fileName : str
            The path to the climatology file for the specified season.
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        remappedDirectory = build_config_full_path(
            config=config,
            section='output',
            relativePathOption='remappedClimSubdirectory',
            relativePathSection='oceanObservations')

        make_directories(remappedDirectory)

        if verticalComparisonGridName == 'obs':
            fileName = '{}/{}_{}.nc'.format(remappedDirectory,
                                            self.transectCollectionName,
                                            transectName)
        else:
            fileName = '{}/{}_{}_{}.nc'.format(remappedDirectory,
                                               self.transectCollectionName,
                                               transectName,
                                               verticalComparisonGridName)
        return fileName  # }}}
示例#8
0
    def test_run_analysis(self):
        mpasClimatologyTask = self.setup_task()
        self.add_variables(mpasClimatologyTask)

        config = mpasClimatologyTask.config
        logsDirectory = build_config_full_path(config, 'output',
                                               'logsSubdirectory')
        make_directories(logsDirectory)
        make_directories('{}/configs/'.format(logsDirectory))

        mpasClimatologyTask.run(writeLogFile=False)

        for season in mpasClimatologyTask.seasons:
            fileName = mpasClimatologyTask.get_file_name(season=season)
            assert (os.path.exists(fileName))
    def _get_file_name(self, obsDict, suffix=''):
        obsSection = '{}Observations'.format(self.componentName)
        climatologyDirectory = build_config_full_path(
            config=self.config,
            section='output',
            relativePathOption='climatologySubdirectory',
            relativePathSection=obsSection)

        make_directories(climatologyDirectory)

        fileName = '{}/{}_{}_{}{}.nc'.format(climatologyDirectory,
                                             'TS_{}'.format(obsDict['suffix']),
                                             obsDict['gridName'], self.season,
                                             suffix)
        return fileName
    def run_task(self):  # {{{
        """
        Compute the requested climatologies
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        if os.path.exists(self.maskFileName):
            return

        # make the geojson file if it doesn't exist
        self.make_region_mask()

        if self.featureList is None:
            # get a list of features for use by other tasks (e.g. to determine
            # plot names)
            self.featureList = get_feature_list(self.geojsonFileName)

        if self.useMpasMesh:

            maskSubdirectory = build_config_full_path(self.config, 'output',
                                                      'maskSubdirectory')
            make_directories(maskSubdirectory)

            compute_mpas_region_masks(
                self.geojsonFileName, self.obsFileName, self.maskFileName,
                self.featureList, self.logger, self.subprocessCount,
                showProgress=False, useMpasMaskCreator=self.useMpasMaskCreator,
                dir=maskSubdirectory)
        else:

            dsGrid = xr.open_dataset(self.obsFileName)
            latVar = dsGrid[self.latVar]
            lonVar = dsGrid[self.lonVar]
            if len(latVar.dims) > 1 or len(lonVar.dims) > 1:
                raise ValueError('Masking does not support multidimensional'
                                 'lat/lon with dims {}'.format(latVar.dims))

            latDim = latVar.dims[0]
            lonDim = lonVar.dims[0]
            lat = latVar.values
            lon = lonVar.values

            compute_lon_lat_region_masks(
                self.geojsonFileName, lon, lat, self.maskFileName,
                self.featureList, self.logger, self.subprocessCount,
                showProgress=False, lonDim=lonDim, latDim=latDim)
示例#11
0
    def link_dir(section, option):
        destDirectory = build_config_full_path(config=config, section='output',
                                               relativePathOption=option,
                                               relativePathSection=section)
        if not os.path.exists(destDirectory):

            sourceDirectory = build_config_full_path(
                config=mainConfig, section='output',
                relativePathOption=option, relativePathSection=section)

            if os.path.exists(sourceDirectory):

                destBase, _ = os.path.split(destDirectory)

                make_directories(destBase)

                os.symlink(sourceDirectory, destDirectory)
示例#12
0
    def test_subtask_run_analysis(self):
        remapSubtask = self.setup_subtask()

        config = remapSubtask.config
        logsDirectory = build_config_full_path(config, 'output',
                                               'logsSubdirectory')
        make_directories(logsDirectory)
        make_directories('{}/configs/'.format(logsDirectory))

        remapSubtask.run(writeLogFile=False)

        for comparisonGridName in remapSubtask.comparisonGridNames:
            for season in remapSubtask.seasons:
                for stage in ['original', 'climatology', 'remapped']:
                    fileName = remapSubtask.get_file_name(
                            season=season, stage=stage,
                            comparisonGridName=comparisonGridName)
                    assert(os.path.exists(fileName))
示例#13
0
    def test_subtask_run_analysis(self):
        mpasClimatologyTask = self.setup_task()
        self.add_variables(mpasClimatologyTask)
        remapSubtask = self.setup_subtask(mpasClimatologyTask)

        config = mpasClimatologyTask.config
        logsDirectory = build_config_full_path(config, 'output',
                                               'logsSubdirectory')
        make_directories(logsDirectory)
        make_directories('{}/configs/'.format(logsDirectory))

        mpasClimatologyTask.run(writeLogFile=False)
        remapSubtask.run(writeLogFile=False)

        for season in remapSubtask.seasons:
            for stage in ['masked', 'remapped']:
                fileName = remapSubtask.get_file_name(
                    season=season, stage=stage, comparisonGridName='latlon')
                assert (os.path.exists(fileName))
    def setup_and_check(self):  # {{{
        '''
        Perform steps to set up the analysis and check for errors in the setup.

        Raises
        ------
        IOError :
            If a restart file is not available from which to read mesh
            information or if no history files are available from which to
            compute the climatology in the desired time range.
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        # first, call setup_and_check from the base class (AnalysisTask),
        # which will perform some common setup, including storing:
        #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
        #     self.namelist, self.runStreams, self.historyStreams,
        #     self.calendar
        super(RemapMpasClimatologySubtask, self).setup_and_check()

        try:
            self.restartFileName = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS restart file found: need at least one '
                          'restart file to perform remapping of '
                          'climatologies.')

        # we set up the remapper here because ESFM_RegridWeightGen seems to
        # have trouble if it runs in another process (or in several at once)
        self._setup_remappers()

        # don't add the variables and seasons to mpasClimatologyTask until
        # we're sure this subtask is supposed to run
        self.mpasClimatologyTask.add_variables(self.variableList, self.seasons)

        # make the mapping directory, because doing so within each process
        # seems to be giving ESMF_RegridWeightGen some trouble
        mappingSubdirectory = build_config_full_path(self.config, 'output',
                                                     'mappingSubdirectory')
        make_directories(mappingSubdirectory)
    def _create_symlinks(self):  # {{{
        """
        Create symlinks to monthly mean files so they have the expected file
        naming convention for ncclimo.

        Returns
        -------
        symlinkDirectory : str
            The path to the symlinks created for each timeSeriesStatsMonthly
            input file
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        fileNames = sorted(self.inputFiles)
        years, months = get_files_year_month(fileNames, self.historyStreams,
                                             self.streamName)

        climatologyOpDirectory = get_climatology_op_directory(config, self.op)

        symlinkDirectory = '{}/source_symlinks'.format(climatologyOpDirectory)

        make_directories(symlinkDirectory)

        for inFileName, year, month in zip(fileNames, years, months):
            outFileName = '{}/{}.hist.am.timeSeriesStatsMonthly.{:04d}-' \
                '{:02d}-01.nc'.format(symlinkDirectory, self.ncclimoModel,
                                      year, month)

            try:
                os.symlink(inFileName, outFileName)
            except OSError:
                pass

        return symlinkDirectory
    def _create_symlinks(self):  # {{{
        """
        Create symlinks to monthly mean files so they have the expected file
        naming convention for ncclimo.

        Returns
        -------
        symlinkDirectory : str
            The path to the symlinks created for each timeSeriesStatsMonthly
            input file
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        fileNames = sorted(self.inputFiles)
        years, months = get_files_year_month(fileNames, self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        climatologyBaseDirectory = build_config_full_path(
            config, 'output', 'mpasClimatologySubdirectory')

        symlinkDirectory = '{}/source_symlinks'.format(
            climatologyBaseDirectory)

        make_directories(symlinkDirectory)

        for inFileName, year, month in zip(fileNames, years, months):
            outFileName = '{}/{}.hist.am.timeSeriesStatsMonthly.{:04d}-' \
                '{:02d}-01.nc'.format(symlinkDirectory, self.ncclimoModel,
                                      year, month)

            if not os.path.exists(outFileName):
                os.symlink(inFileName, outFileName)

        return symlinkDirectory
示例#17
0
def get_unmasked_mpas_climatology_directory(config):  # {{{
    """
    Get the directory for an unmasked MPAS climatology produced by ncclimo,
    making the directory if it doesn't already exist

    Parameters
    ----------
    config :  ``MpasAnalysisConfigParser``
        configuration options
    """
    # Authors
    # -------
    # Xylar Asay-Davis

    climatologyBaseDirectory = build_config_full_path(
        config, 'output', 'mpasClimatologySubdirectory')

    mpasMeshName = config.get('input', 'mpasMeshName')

    directory = '{}/unmasked_{}'.format(climatologyBaseDirectory, mpasMeshName)

    make_directories(directory)
    return directory  # }}}
    def _compute_moc_climo_analysismember(self):  # {{{
        '''compute mean MOC streamfunction from analysis member'''

        config = self.config

        self.regionNames = config.getExpression(self.sectionName,
                                                'regionNames')
        self.regionNames.append('Global')

        # Read in depth and bin latitudes
        try:
            restartFileName = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS-O restart file found: need at least '
                          'one for MHT calcuation')

        with xr.open_dataset(restartFileName) as dsRestart:
            refBottomDepth = dsRestart.refBottomDepth.values

        nVertLevels = len(refBottomDepth)
        refLayerThickness = np.zeros(nVertLevels)
        refLayerThickness[0] = refBottomDepth[0]
        refLayerThickness[1:nVertLevels] = \
            refBottomDepth[1:nVertLevels] - refBottomDepth[0:nVertLevels-1]

        refZMid = refBottomDepth - 0.5 * refLayerThickness

        binBoundaryMocStreamfunction = None
        # first try timeSeriesStatsMonthly for bin boundaries, then try
        # mocStreamfunctionOutput stream as a backup option
        for streamName in [
                'timeSeriesStatsMonthlyOutput', 'mocStreamfunctionOutput'
        ]:
            try:
                inputFile = self.historyStreams.readpath(streamName)[0]
            except ValueError:
                raise IOError('At least one file from stream {} is needed '
                              'to compute MOC'.format(streamName))

            with xr.open_dataset(inputFile) as ds:
                if 'binBoundaryMocStreamfunction' in ds.data_vars:
                    binBoundaryMocStreamfunction = \
                        ds.binBoundaryMocStreamfunction.values
                    break

        if binBoundaryMocStreamfunction is None:
            raise ValueError('Could not find binBoundaryMocStreamfunction in '
                             'either timeSeriesStatsMonthlyOutput or '
                             'mocStreamfunctionOutput streams')

        binBoundaryMocStreamfunction = np.rad2deg(binBoundaryMocStreamfunction)

        # Compute and plot annual climatology of MOC streamfunction
        self.logger.info('\n  Compute and/or plot post-processed MOC '
                         'climatological streamfunction...')
        outputDirectory = build_config_full_path(
            config, 'output', 'mpasClimatologySubdirectory')

        make_directories(outputDirectory)

        outputFileClimo = '{}/mocStreamfunction_years{:04d}-{:04d}.nc'.format(
            outputDirectory, self.startYearClimo, self.endYearClimo)
        if not os.path.exists(outputFileClimo):
            self.logger.info('   Load data...')

            climatologyFileName = self.mpasClimatologyTask.get_file_name(
                season='ANN')
            annualClimatology = xr.open_dataset(climatologyFileName)
            annualClimatology = annualClimatology.isel(Time=0)

            # rename some variables for convenience
            annualClimatology = annualClimatology.rename({
                'timeMonthly_avg_mocStreamvalLatAndDepth':
                'avgMocStreamfunGlobal',
                'timeMonthly_avg_mocStreamvalLatAndDepthRegion':
                'avgMocStreamfunRegional'
            })

            # Create dictionary for MOC climatology (NB: need this form
            # in order to convert it to xarray dataset later in the script)
            self.depth = refZMid
            self.lat = {}
            self.moc = {}
            for region in self.regionNames:
                self.logger.info('   Compute {} MOC...'.format(region))
                if region == 'Global':
                    mocTop = annualClimatology.avgMocStreamfunGlobal.values
                else:
                    # hard-wire region=0 (Atlantic) for now
                    indRegion = 0
                    mocTop = annualClimatology.avgMocStreamfunRegional[
                        indRegion, :, :].values
                # Store computed MOC to dictionary
                self.lat[region] = binBoundaryMocStreamfunction
                self.moc[region] = mocTop

            # Save to file
            self.logger.info('   Save global and regional MOC to file...')
            ncFile = netCDF4.Dataset(outputFileClimo, mode='w')
            # create dimensions
            ncFile.createDimension('nz', nVertLevels)
            for region in self.regionNames:
                latBins = self.lat[region]
                mocTop = self.moc[region]
                ncFile.createDimension('nx{}'.format(region), len(latBins))
                # create variables
                x = ncFile.createVariable('lat{}'.format(region), 'f4',
                                          ('nx{}'.format(region), ))
                x.description = 'latitude bins for MOC {}'\
                                ' streamfunction'.format(region)
                x.units = 'degrees (-90 to 90)'
                y = ncFile.createVariable('moc{}'.format(region), 'f4',
                                          ('nz', 'nx{}'.format(region)))
                y.description = 'MOC {} streamfunction, annual'\
                                ' climatology'.format(region)
                y.units = 'Sv (10^6 m^3/s)'
                # save variables
                x[:] = latBins
                y[:, :] = mocTop
            depth = ncFile.createVariable('depth', 'f4', ('nz', ))
            depth.description = 'depth'
            depth.units = 'meters'
            depth[:] = self.depth
            ncFile.close()
        else:
            # Read from file
            self.logger.info('   Read previously computed MOC streamfunction '
                             'from file...')
            ncFile = netCDF4.Dataset(outputFileClimo, mode='r')
            self.depth = ncFile.variables['depth'][:]
            self.lat = {}
            self.moc = {}
            for region in self.regionNames:
                self.lat[region] = ncFile.variables['lat{}'.format(region)][:]
                self.moc[region] = \
                    ncFile.variables['moc{}'.format(region)][:, :]
            ncFile.close()
示例#19
0
def main():
    """
    Entry point for the main script ``mpas_analysis``
    """

    parser = argparse.ArgumentParser(
        description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='mpas_analysis {}'.format(
                            mpas_analysis.__version__),
                        help="Show version number and exit")
    parser.add_argument("--setup_only",
                        dest="setup_only",
                        action='store_true',
                        help="If only the setup phase, not the run or HTML "
                        "generation phases, should be executed.")
    parser.add_argument("--html_only",
                        dest="html_only",
                        action='store_true',
                        help="If only the setup and HTML generation phases, "
                        "not the run phase, should be executed.")
    parser.add_argument("-g",
                        "--generate",
                        dest="generate",
                        help="A list of analysis modules to generate "
                        "(nearly identical generate option in config file).",
                        metavar="ANALYSIS1[,ANALYSIS2,ANALYSIS3,...]")
    parser.add_argument("-l",
                        "--list",
                        dest="list",
                        action='store_true',
                        help="List the available analysis tasks")
    parser.add_argument("-p",
                        "--purge",
                        dest="purge",
                        action='store_true',
                        help="Purge the analysis by deleting the output"
                        "directory before running")
    parser.add_argument('configFiles',
                        metavar='CONFIG',
                        type=str,
                        nargs='*',
                        help='config file')
    parser.add_argument("--plot_colormaps",
                        dest="plot_colormaps",
                        action='store_true',
                        help="Make a plot displaying all available colormaps")
    parser.add_argument("--verbose",
                        dest="verbose",
                        action='store_true',
                        help="Verbose error reporting during setup-and-check "
                        "phase")
    args = parser.parse_args()

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(0)

    for configFile in args.configFiles:
        if not os.path.exists(configFile):
            raise OSError('Config file {} not found.'.format(configFile))

    # add config.default to cover default not included in the config files
    # provided on the command line
    if pkg_resources.resource_exists('mpas_analysis', 'config.default'):
        defaultConfig = pkg_resources.resource_filename(
            'mpas_analysis', 'config.default')
        configFiles = [defaultConfig] + args.configFiles
    else:
        print('WARNING: Did not find config.default.  Assuming other config '
              'file(s) contain a\n'
              'full set of configuration options.')
        defaultConfig = None
        configFiles = args.configFiles

    config = MpasAnalysisConfigParser()
    config.read(configFiles)

    if args.list:
        analyses = build_analysis_list(config, controlConfig=None)
        for analysisTask in analyses:
            print('task: {}'.format(analysisTask.taskName))
            print('    component: {}'.format(analysisTask.componentName)),
            print('    tags: {}'.format(', '.join(analysisTask.tags)))
        sys.exit(0)

    if args.plot_colormaps:
        _register_custom_colormaps()
        _plot_color_gradients()
        sys.exit(0)

    if config.has_option('runs', 'controlRunConfigFile'):
        controlConfigFile = config.get('runs', 'controlRunConfigFile')
        if not os.path.exists(controlConfigFile):
            raise OSError('A control config file {} was specified but the '
                          'file does not exist'.format(controlConfigFile))
        controlConfigFiles = [controlConfigFile]
        if defaultConfig is not None:
            controlConfigFiles = [defaultConfig] + controlConfigFiles
        controlConfig = MpasAnalysisConfigParser()
        controlConfig.read(controlConfigFiles)

        # replace the log directory so log files get written to this run's
        # log directory, not the control run's
        logsDirectory = build_config_full_path(config, 'output',
                                               'logsSubdirectory')

        controlConfig.set('output', 'logsSubdirectory', logsDirectory)

        print('Comparing to control run {} rather than observations. \n'
              'Make sure that MPAS-Analysis has been run previously with the '
              'control config file.'.format(
                  controlConfig.get('runs', 'mainRunName')))
    else:
        controlConfig = None

    if args.purge:
        purge_output(config)

    if config.has_option('runs', 'mainRunConfigFile'):
        symlink_main_run(config, defaultConfig)

    if args.generate:
        update_generate(config, args.generate)

    if controlConfig is not None:
        # we want to use the "generate" option from the current run, not
        # the control config file
        controlConfig.set('output', 'generate',
                          config.get('output', 'generate'))

    logsDirectory = build_config_full_path(config, 'output',
                                           'logsSubdirectory')
    make_directories(logsDirectory)

    update_time_bounds_in_config(config)

    file_cache_maxsize = config.getint('input', 'file_cache_maxsize')
    try:
        xarray.set_options(file_cache_maxsize=file_cache_maxsize)
    except ValueError:
        # xarray version doesn't support file_cache_maxsize yet...
        pass

    startTime = time.time()

    analyses = build_analysis_list(config, controlConfig)
    analyses = determine_analyses_to_generate(analyses, args.verbose)

    setupDuration = time.time() - startTime

    if not args.setup_only and not args.html_only:
        run_analysis(config, analyses)
        runDuration = time.time() - startTime
        m, s = divmod(setupDuration, 60)
        h, m = divmod(int(m), 60)
        print('Total setup time: {}:{:02d}:{:05.2f}'.format(h, m, s))
        m, s = divmod(runDuration, 60)
        h, m = divmod(int(m), 60)
        print('Total run time: {}:{:02d}:{:05.2f}'.format(h, m, s))

    if not args.setup_only:
        generate_html(config, analyses, controlConfig)
示例#20
0
    def combine_observations(self):  # {{{
        '''
        Combine SOSE oservations into a single file
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        longitudes = sorted(
            config.getExpression('soseTransects',
                                 'longitudes',
                                 usenumpyfunc=True))

        observationsDirectory = build_obs_path(config, 'ocean',
                                               'soseSubdirectory')

        outObsDirectory = build_config_full_path(
            config=config,
            section='output',
            relativePathOption='climatologySubdirectory',
            relativePathSection='oceanObservations')

        make_directories(outObsDirectory)

        combinedFileName = '{}/{}.nc'.format(outObsDirectory,
                                             self.transectCollectionName)
        obsFileNames = OrderedDict()
        for lon in longitudes:
            transectName = 'lon_{}'.format(lon)
            obsFileNames[transectName] = combinedFileName

        self.obsFileNames = obsFileNames

        if os.path.exists(combinedFileName):
            return

        print('Preprocessing SOSE transect data...')

        minLat = config.getfloat('soseTransects', 'minLat')
        maxLat = config.getfloat('soseTransects', 'maxLat')

        dsObs = None
        for field in self.fields:
            prefix = field['obsFilePrefix']
            fieldName = field['obsFieldName']
            if prefix is None:
                continue
            print('  {}'.format(field['prefix']))

            fileName = '{}/SOSE_2005-2010_monthly_{}_SouthernOcean' \
                       '_0.167x0.167degree_20180710.nc'.format(
                           observationsDirectory, prefix)

            dsLocal = xr.open_dataset(fileName)

            lat = dsLocal.lat.values
            mask = numpy.logical_and(lat >= minLat, lat <= maxLat)
            indices = numpy.argwhere(mask)
            dsLocal = dsLocal.isel(lat=slice(indices[0][0], indices[-1][0]))
            dsLocal.load()

            if fieldName == 'zonalVel':
                # need to average in longitude
                nLon = dsLocal.sizes['lon']
                lonIndicesP1 = numpy.mod(numpy.arange(nLon) + 1, nLon)
                dsLocal = 0.5 * (dsLocal + dsLocal.isel(lon=lonIndicesP1))

            if fieldName == 'meridVel':
                # need to average in latitude
                nLat = dsLocal.sizes['lat']
                latIndicesP1 = numpy.mod(numpy.arange(nLat) + 1, nLat)
                dsLocal = 0.5 * (dsLocal + dsLocal.isel(lat=latIndicesP1))

            dsLocal = dsLocal.sel(lon=longitudes, method=str('nearest'))

            if dsObs is None:
                dsObs = dsLocal
            else:
                dsLocal['lon'] = dsObs.lon
                dsLocal['lat'] = dsObs.lat
                dsObs[fieldName] = dsLocal[fieldName]
                dsLocal.close()

        if 'zonalVel' in dsObs and 'meridVel' in dsObs:
            # compute the velocity magnitude
            print('  velMag')
            description = 'Monthly velocity magnitude climatologies ' \
                          'from 2005-2010 average of the Southern Ocean ' \
                          'State Estimate (SOSE)'
            dsObs['velMag'] = numpy.sqrt(dsObs.zonalVel**2 + dsObs.meridVel**2)
            dsObs.velMag.attrs['units'] = 'm s$^{-1}$'
            dsObs.velMag.attrs['description'] = description

        write_netcdf(dsObs, combinedFileName)

        print('  Done.')
    def _setup_file_names(self):  # {{{
        """
        Create a dictionary of file names and directories for this climatology
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config
        climatologyBaseDirectory = build_config_full_path(
            config, 'output', 'mpasClimatologySubdirectory')

        mpasMeshName = config.get('input', 'mpasMeshName')

        comparisonFullMeshNames = {}
        for comparisonGridName in self.comparisonDescriptors:
            comparisonDescriptor = \
                self.comparisonDescriptors[comparisonGridName]
            comparisonFullMeshNames[comparisonGridName] = \
                comparisonDescriptor.meshName

        keys = []
        for season in self.seasons:
            stage = 'masked'
            keys.append((season, stage))
            stage = 'remapped'
            for comparisonGridName in self.comparisonDescriptors:
                keys.append((season, stage, comparisonGridName))

        self._outputDirs = {}
        self._outputFiles = {}

        for key in keys:
            season = key[0]
            stage = key[1]
            if stage == 'remapped':
                comparisonGridName = key[2]

            stageDirectory = '{}/{}'.format(climatologyBaseDirectory, stage)

            if stage == 'masked':
                directory = '{}/{}_{}'.format(
                        stageDirectory, self.climatologyName,
                        mpasMeshName)
            elif stage == 'remapped':
                directory = '{}/{}_{}_to_{}'.format(
                        stageDirectory,
                        self.climatologyName,
                        mpasMeshName,
                        comparisonFullMeshNames[comparisonGridName])

            make_directories(directory)

            monthValues = sorted(constants.monthDictionary[season])
            startMonth = monthValues[0]
            endMonth = monthValues[-1]

            suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format(
                    self.mpasClimatologyTask.startYear, startMonth,
                    self.mpasClimatologyTask.endYear, endMonth)

            if season in constants.abrevMonthNames:
                season = '{:02d}'.format(monthValues[0])
            fileName = '{}/{}_{}_{}.nc'.format(
                    directory, self.mpasClimatologyTask.ncclimoModel, season,
                    suffix)

            self._outputDirs[key] = directory
            self._outputFiles[key] = fileName
示例#22
0
    def run_task(self):  # {{{
        """
        Performs analysis of time series of sea-ice properties.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani

        self.logger.info("\nPlotting sea-ice area and volume time series...")

        config = self.config
        calendar = self.calendar

        sectionName = self.taskName

        plotTitles = {'iceArea': 'Sea-ice area',
                      'iceVolume': 'Sea-ice volume',
                      'iceThickness': 'Sea-ice mean thickness'}

        units = {'iceArea': '[km$^2$]',
                 'iceVolume': '[10$^3$ km$^3$]',
                 'iceThickness': '[m]'}

        obsFileNames = {
            'iceArea': {'NH': build_obs_path(
                config, 'seaIce',
                relativePathOption='areaNH',
                relativePathSection=sectionName),
                'SH': build_obs_path(
                config, 'seaIce',
                relativePathOption='areaSH',
                relativePathSection=sectionName)},
            'iceVolume': {'NH': build_obs_path(
                config, 'seaIce',
                relativePathOption='volNH',
                relativePathSection=sectionName),
                'SH': build_obs_path(
                config, 'seaIce',
                relativePathOption='volSH',
                relativePathSection=sectionName)}}

        # Some plotting rules
        titleFontSize = config.get('timeSeriesSeaIceAreaVol', 'titleFontSize')

        mainRunName = config.get('runs', 'mainRunName')
        preprocessedReferenceRunName = \
            config.get('runs', 'preprocessedReferenceRunName')
        preprocessedReferenceDirectory = \
            config.get('seaIcePreprocessedReference', 'baseDirectory')

        compareWithObservations = config.getboolean('timeSeriesSeaIceAreaVol',
                                                    'compareWithObservations')

        movingAveragePoints = config.getint('timeSeriesSeaIceAreaVol',
                                            'movingAveragePoints')

        polarPlot = config.getboolean('timeSeriesSeaIceAreaVol', 'polarPlot')

        outputDirectory = build_config_full_path(config, 'output',
                                                 'timeseriesSubdirectory')

        make_directories(outputDirectory)

        self.logger.info('  Load sea-ice data...')
        # Load mesh

        dsTimeSeries = self._compute_area_vol()

        yearStart = days_to_datetime(dsTimeSeries['NH'].Time.min(),
                                     calendar=calendar).year
        yearEnd = days_to_datetime(dsTimeSeries['NH'].Time.max(),
                                   calendar=calendar).year
        timeStart = date_to_days(year=yearStart, month=1, day=1,
                                 calendar=calendar)
        timeEnd = date_to_days(year=yearEnd, month=12, day=31,
                               calendar=calendar)

        if preprocessedReferenceRunName != 'None':
            # determine if we're beyond the end of the preprocessed data
            # (and go ahead and cache the data set while we're checking)
            outFolder = '{}/preprocessed'.format(outputDirectory)
            make_directories(outFolder)
            inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format(
                preprocessedReferenceDirectory, preprocessedReferenceRunName)
            outFileName = '{}/iceVolume.nc'.format(outFolder)

            combine_time_series_with_ncrcat(inFilesPreprocessed,
                                            outFileName,
                                            logger=self.logger)
            dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                               calendar=calendar,
                                               timeVariableNames='xtime')
            preprocessedYearEnd = days_to_datetime(dsPreprocessed.Time.max(),
                                                   calendar=calendar).year
            if yearStart <= preprocessedYearEnd:
                dsPreprocessedTimeSlice = \
                    dsPreprocessed.sel(Time=slice(timeStart, timeEnd))
            else:
                self.logger.warning('Preprocessed time series ends before the '
                                    'timeSeries startYear and will not be '
                                    'plotted.')
                preprocessedReferenceRunName = 'None'

        if self.controlConfig is not None:

            dsTimeSeriesRef = {}
            baseDirectory = build_config_full_path(
                self.controlConfig, 'output', 'timeSeriesSubdirectory')

            controlRunName = self.controlConfig.get('runs', 'mainRunName')

            for hemisphere in ['NH', 'SH']:
                inFileName = '{}/seaIceAreaVol{}.nc'.format(baseDirectory,
                                                            hemisphere)

                dsTimeSeriesRef[hemisphere] = xr.open_dataset(inFileName)

        norm = {'iceArea': 1e-6,  # m^2 to km^2
                'iceVolume': 1e-12,  # m^3 to 10^3 km^3
                'iceThickness': 1.}

        xLabel = 'Time [years]'

        galleryGroup = 'Time Series'
        groupLink = 'timeseries'

        obs = {}
        preprocessed = {}
        figureNameStd = {}
        figureNamePolar = {}
        title = {}
        plotVars = {}
        obsLegend = {}
        plotVarsRef = {}

        for hemisphere in ['NH', 'SH']:

            self.logger.info('  Make {} plots...'.format(hemisphere))

            for variableName in ['iceArea', 'iceVolume']:
                key = (hemisphere, variableName)

                # apply the norm to each variable
                plotVars[key] = (norm[variableName] *
                                 dsTimeSeries[hemisphere][variableName])

                if self.controlConfig is not None:
                    plotVarsRef[key] = norm[variableName] * \
                        dsTimeSeriesRef[hemisphere][variableName]

                prefix = '{}/{}{}_{}'.format(self.plotsDirectory,
                                             variableName,
                                             hemisphere,
                                             mainRunName)

                figureNameStd[key] = '{}.png'.format(prefix)
                figureNamePolar[key] = '{}_polar.png'.format(prefix)

                title[key] = '{} ({})'.format(plotTitles[variableName],
                                              hemisphere)

            if compareWithObservations:
                key = (hemisphere, 'iceArea')
                obsLegend[key] = 'SSM/I observations, annual cycle '
                if hemisphere == 'NH':
                    key = (hemisphere, 'iceVolume')
                    obsLegend[key] = 'PIOMAS, annual cycle (blue)'

            if preprocessedReferenceRunName != 'None':
                for variableName in ['iceArea', 'iceVolume']:
                    key = (hemisphere, variableName)

            if compareWithObservations:

                outFolder = '{}/obs'.format(outputDirectory)
                make_directories(outFolder)
                outFileName = '{}/iceArea{}.nc'.format(outFolder, hemisphere)

                combine_time_series_with_ncrcat(
                    obsFileNames['iceArea'][hemisphere],
                    outFileName, logger=self.logger)
                dsObs = open_mpas_dataset(fileName=outFileName,
                                          calendar=calendar,
                                          timeVariableNames='xtime')
                key = (hemisphere, 'iceArea')
                obs[key] = self._replicate_cycle(plotVars[key], dsObs.IceArea,
                                                 calendar)

                key = (hemisphere, 'iceVolume')
                if hemisphere == 'NH':
                    outFileName = '{}/iceVolume{}.nc'.format(outFolder,
                                                             hemisphere)
                    combine_time_series_with_ncrcat(
                        obsFileNames['iceVolume'][hemisphere],
                        outFileName, logger=self.logger)
                    dsObs = open_mpas_dataset(fileName=outFileName,
                                              calendar=calendar,
                                              timeVariableNames='xtime')
                    obs[key] = self._replicate_cycle(plotVars[key],
                                                     dsObs.IceVol,
                                                     calendar)
                else:
                    obs[key] = None

            if preprocessedReferenceRunName != 'None':
                outFolder = '{}/preprocessed'.format(outputDirectory)
                inFilesPreprocessed = '{}/icearea.{}.year*.nc'.format(
                    preprocessedReferenceDirectory,
                    preprocessedReferenceRunName)

                outFileName = '{}/iceArea.nc'.format(outFolder)

                combine_time_series_with_ncrcat(inFilesPreprocessed,
                                                outFileName,
                                                logger=self.logger)
                dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                                   calendar=calendar,
                                                   timeVariableNames='xtime')
                dsPreprocessedTimeSlice = dsPreprocessed.sel(
                    Time=slice(timeStart, timeEnd))
                key = (hemisphere, 'iceArea')
                preprocessed[key] = dsPreprocessedTimeSlice[
                    'icearea_{}'.format(hemisphere.lower())]

                inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format(
                    preprocessedReferenceDirectory,
                    preprocessedReferenceRunName)
                outFileName = '{}/iceVolume.nc'.format(outFolder)

                combine_time_series_with_ncrcat(inFilesPreprocessed,
                                                outFileName,
                                                logger=self.logger)
                dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                                   calendar=calendar,
                                                   timeVariableNames='xtime')
                dsPreprocessedTimeSlice = dsPreprocessed.sel(
                    Time=slice(timeStart, timeEnd))
                key = (hemisphere, 'iceVolume')
                preprocessed[key] = dsPreprocessedTimeSlice[
                    'icevolume_{}'.format(hemisphere.lower())]

            for variableName in ['iceArea', 'iceVolume']:
                key = (hemisphere, variableName)
                dsvalues = [plotVars[key]]
                legendText = [mainRunName]
                lineColors = ['k']
                lineWidths = [3]
                if compareWithObservations and key in obsLegend.keys():
                    dsvalues.append(obs[key])
                    legendText.append(obsLegend[key])
                    lineColors.append('b')
                    lineWidths.append(1.2)
                if preprocessedReferenceRunName != 'None':
                    dsvalues.append(preprocessed[key])
                    legendText.append(preprocessedReferenceRunName)
                    lineColors.append('purple')
                    lineWidths.append(1.2)

                if self.controlConfig is not None:
                    dsvalues.append(plotVarsRef[key])
                    legendText.append(controlRunName)
                    lineColors.append('r')
                    lineWidths.append(1.2)

                if config.has_option(sectionName, 'firstYearXTicks'):
                    firstYearXTicks = config.getint(sectionName,
                                                    'firstYearXTicks')
                else:
                    firstYearXTicks = None

                if config.has_option(sectionName, 'yearStrideXTicks'):
                    yearStrideXTicks = config.getint(sectionName,
                                                     'yearStrideXTicks')
                else:
                    yearStrideXTicks = None

                # separate plots for nothern and southern hemispheres
                timeseries_analysis_plot(config, dsvalues,
                                         movingAveragePoints,
                                         title[key], xLabel,
                                         units[variableName],
                                         calendar=calendar,
                                         lineColors=lineColors,
                                         lineWidths=lineWidths,
                                         legendText=legendText,
                                         titleFontSize=titleFontSize,
                                         firstYearXTicks=firstYearXTicks,
                                         yearStrideXTicks=yearStrideXTicks)

                savefig(figureNameStd[key])

                filePrefix = '{}{}_{}'.format(variableName,
                                              hemisphere,
                                              mainRunName)
                thumbnailDescription = '{} {}'.format(
                    hemisphere, plotTitles[variableName])
                caption = 'Running mean of {}'.format(
                    thumbnailDescription)
                write_image_xml(
                    config,
                    filePrefix,
                    componentName='Sea Ice',
                    componentSubdirectory='sea_ice',
                    galleryGroup=galleryGroup,
                    groupLink=groupLink,
                    thumbnailDescription=thumbnailDescription,
                    imageDescription=caption,
                    imageCaption=caption)

                if (polarPlot):
                    timeseries_analysis_plot_polar(
                        config,
                        dsvalues,
                        movingAveragePoints,
                        title[key],
                        lineColors=lineColors,
                        lineWidths=lineWidths,
                        legendText=legendText,
                        titleFontSize=titleFontSize)

                    savefig(figureNamePolar[key])

                    filePrefix = '{}{}_{}_polar'.format(variableName,
                                                        hemisphere,
                                                        mainRunName)
                    write_image_xml(
                        config,
                        filePrefix,
                        componentName='Sea Ice',
                        componentSubdirectory='sea_ice',
                        galleryGroup=galleryGroup,
                        groupLink=groupLink,
                        thumbnailDescription=thumbnailDescription,
                        imageDescription=caption,
                        imageCaption=caption)
示例#23
0
    def setup_and_check(self):  # {{{
        '''
        Perform steps to set up the analysis and check for errors in the setup.
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        # first, call setup_and_check from the base class (AnalysisTask),
        # which will perform some common setup, including storing:
        #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
        #     self.namelist, self.runStreams, self.historyStreams,
        #     self.calendar
        super(MpasTimeSeriesTask, self).setup_and_check()

        config = self.config
        baseDirectory = build_config_full_path(
            config, 'output', 'timeSeriesSubdirectory')

        make_directories(baseDirectory)

        self.outputFile = '{}/{}.nc'.format(baseDirectory,
                                            self.fullTaskName)

        self.check_analysis_enabled(
            analysisOptionName='config_am_timeseriesstatsmonthly_enable',
            raiseException=True)

        # get a list of timeSeriesStats output files from the streams file,
        # reading only those that are between the start and end dates
        startDate = config.get(self.section, 'startDate')
        endDate = config.get(self.section, 'endDate')
        streamName = 'timeSeriesStatsMonthlyOutput'
        self.inputFiles = self.historyStreams.readpath(
            streamName, startDate=startDate, endDate=endDate,
            calendar=self.calendar)

        if len(self.inputFiles) == 0:
            raise IOError('No files were found in stream {} between {} and '
                          '{}.'.format(streamName, startDate, endDate))

        self.runMessage = '\nComputing MPAS time series from first year ' \
                          'plus files:\n' \
                          '    {} through\n    {}'.format(
                              os.path.basename(self.inputFiles[0]),
                              os.path.basename(self.inputFiles[-1]))

        # Make sure first year of data is included for computing anomalies
        if config.has_option('timeSeries', 'anomalyRefYear'):
            anomalyYear = config.getint('timeSeries', 'anomalyRefYear')
            anomalyStartDate = '{:04d}-01-01_00:00:00'.format(anomalyYear)
        else:
            anomalyStartDate = get_simulation_start_time(self.runStreams)
            anomalyYear = int(anomalyStartDate[0:4])

        anomalyEndDate = '{:04d}-12-31_23:59:59'.format(anomalyYear)
        firstYearInputFiles = self.historyStreams.readpath(
            streamName, startDate=anomalyStartDate,
            endDate=anomalyEndDate,
            calendar=self.calendar)
        for fileName in firstYearInputFiles:
            if fileName not in self.inputFiles:
                self.inputFiles.append(fileName)

        self.inputFiles = sorted(self.inputFiles)

        with xr.open_dataset(self.inputFiles[0]) as ds:
            self.allVariables = list(ds.data_vars.keys())
示例#24
0
def get_remapped_mpas_climatology_file_name(config, season, componentName,
                                            climatologyName,
                                            comparisonGridName,
                                            op='avg'):  # {{{
    """
    Get the file name for a masked MPAS climatology

    Parameters
    ----------
    config :  ``MpasAnalysisConfigParser``
        Configuration options

    season : str
        One of the seasons in ``constants.monthDictionary``

    componentName : {'ocean', 'seaIce'}
        The MPAS component for which the climatology is being computed

    climatologyName : str
        The name of the climatology (typically the name of a field to mask
        and later remap)

    comparisonGridName : str
        The name of the comparison grid to use for remapping.  If it is one
        of the default comparison grid names ``{'latlon', 'antarctic',
        'arctic'}``, the full grid name is looked up via
        get_comparison_descriptor

    op : {'avg', 'min', 'max'}
         operator for monthly stats
    """
    # Authors
    # -------
    # Xylar Asay-Davis

    startYear = config.getint('climatology', 'startYear')
    endYear = config.getint('climatology', 'endYear')
    mpasMeshName = config.get('input', 'mpasMeshName')

    if componentName == 'ocean':
        ncclimoModel = 'mpaso'
    elif componentName == 'seaIce':
        ncclimoModel = 'mpascice'
    else:
        raise ValueError('component {} is not supported by ncclimo.\n'
                         'Check with Charlie Zender and Xylar Asay-Davis\n'
                         'about getting it added'.format(componentName))

    climatologyOpDirectory = get_climatology_op_directory(config, op)

    if comparisonGridName in ['latlon', 'antarctic', 'arctic']:
        comparisonDescriptor = get_comparison_descriptor(config,
                                                         comparisonGridName)
        comparisonFullMeshName = comparisonDescriptor.meshName
    else:
        comparisonFullMeshName = comparisonGridName

    stageDirectory = '{}/remapped'.format(climatologyOpDirectory)

    directory = '{}/{}_{}_to_{}'.format(stageDirectory, climatologyName,
                                        mpasMeshName, comparisonFullMeshName)

    make_directories(directory)

    monthValues = sorted(constants.monthDictionary[season])
    startMonth = monthValues[0]
    endMonth = monthValues[-1]

    suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format(
        startYear, startMonth, endYear, endMonth)

    if season in constants.abrevMonthNames:
        season = '{:02d}'.format(monthValues[0])
    fileName = '{}/{}_{}_{}.nc'.format(
        directory, ncclimoModel, season, suffix)

    return fileName  # }}}
示例#25
0
def get_remapper(config, sourceDescriptor, comparisonDescriptor,
                 mappingFilePrefix, method, logger=None):  # {{{
    """
    Given config options and descriptions of the source and comparison grids,
    returns a ``pyremap.Remapper`` object that can be used to remap from source
    files or data sets to corresponding data sets on the comparison grid.

    If necessary, creates the mapping file containing weights and indices
    needed to perform remapping.

    Parameters
    ----------
    config :  instance of ``MpasAnalysisConfigParser``
        Contains configuration options

    sourceDescriptor : ``MeshDescriptor`` subclass object
        A description of the source mesh or grid

    comparisonDescriptor : ``MeshDescriptor`` subclass object
        A description of the comparison grid

    mappingFilePrefix : str
        A prefix to be prepended to the mapping file name

    method : {'bilinear', 'neareststod', 'conserve'}
        The method of interpolation used.

    logger : ``logging.Logger``, optional
        A logger to which ncclimo output should be redirected

    Returns
    -------
    remapper : ``pyremap.Remapper`` object
        A remapper that can be used to remap files or data sets from the source
        grid or mesh to the comparison grid.
    """
    # Authors
    # -------
    # Xylar Asay-Davis

    mappingFileName = None

    if not _matches_comparison(sourceDescriptor, comparisonDescriptor):
        # we need to remap because the grids don't match

        mappingBaseName = '{}_{}_to_{}_{}.nc'.format(
            mappingFilePrefix,
            sourceDescriptor.meshName,
            comparisonDescriptor.meshName,
            method)

        tryCustom = config.get('diagnostics', 'customDirectory') != 'none'
        if tryCustom:
            # first see if mapping files are in the custom directory
            mappingSubdirectory = build_config_full_path(
                config, 'diagnostics', 'mappingSubdirectory',
                baseDirectoryOption='customDirectory')

            mappingFileName = '{}/{}'.format(mappingSubdirectory,
                                         mappingBaseName)
        if not tryCustom or not os.path.exists(mappingFileName):
            # second see if mapping files are in the base directory

            mappingSubdirectory = build_config_full_path(
                config, 'diagnostics', 'mappingSubdirectory',
                baseDirectoryOption='baseDirectory')

            mappingFileName = '{}/{}'.format(mappingSubdirectory,
                                             mappingBaseName)

        if not os.path.exists(mappingFileName):
            # we don't have a mapping file yet, so get ready to create one
            # in the output subfolder if needed
            mappingSubdirectory = \
                build_config_full_path(config, 'output',
                                       'mappingSubdirectory')
            make_directories(mappingSubdirectory)
            mappingFileName = '{}/{}'.format(mappingSubdirectory,
                                             mappingBaseName)

    remapper = Remapper(sourceDescriptor, comparisonDescriptor,
                        mappingFileName)

    remapper.build_mapping_file(method=method, logger=logger)

    return remapper  # }}}
    def _compute_moc_climo_postprocess(self):  # {{{
        '''compute mean MOC streamfunction as a post-process'''

        config = self.config

        dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \
            refTopDepth, refLayerThickness = self._load_mesh()

        self.regionNames = config.getExpression(self.sectionName,
                                                'regionNames')

        # Load basin region related variables and save them to dictionary
        mpasMeshName = config.get('input', 'mpasMeshName')
        regionMaskDirectory = config.get('regions', 'regionMaskDirectory')

        regionMaskFile = '{}/{}_SingleRegionAtlanticWTransportTransects_' \
                         'masks.nc'.format(regionMaskDirectory, mpasMeshName)

        if not os.path.exists(regionMaskFile):
            raise IOError('Regional masking file {} for MOC calculation '
                          'does not exist'.format(regionMaskFile))
        iRegion = 0
        self.dictRegion = {}
        for region in self.regionNames:
            self.logger.info('\n  Reading region and transect mask for '
                             '{}...'.format(region))
            ncFileRegional = netCDF4.Dataset(regionMaskFile, mode='r')
            maxEdgesInTransect = \
                ncFileRegional.dimensions['maxEdgesInTransect'].size
            transectEdgeMaskSigns = \
                ncFileRegional.variables['transectEdgeMaskSigns'][:, iRegion]
            transectEdgeGlobalIDs = \
                ncFileRegional.variables['transectEdgeGlobalIDs'][iRegion, :]
            regionCellMask = \
                ncFileRegional.variables['regionCellMasks'][:, iRegion]
            ncFileRegional.close()
            iRegion += 1

            indRegion = np.where(regionCellMask == 1)
            self.dictRegion[region] = {
                'indices': indRegion,
                'cellMask': regionCellMask,
                'maxEdgesInTransect': maxEdgesInTransect,
                'transectEdgeMaskSigns': transectEdgeMaskSigns,
                'transectEdgeGlobalIDs': transectEdgeGlobalIDs
            }
        # Add Global regionCellMask=1 everywhere to make the algorithm
        # for the global moc similar to that of the regional moc

        self.dictRegion['Global'] = {'cellMask': np.ones(np.size(latCell))}
        self.regionNames.append('Global')

        # Compute and plot annual climatology of MOC streamfunction
        self.logger.info('\n  Compute and/or plot post-processed MOC '
                         'climatological streamfunction...')
        outputDirectory = build_config_full_path(
            config, 'output', 'mpasClimatologySubdirectory')

        make_directories(outputDirectory)

        outputFileClimo = '{}/mocStreamfunction_years{:04d}-{:04d}.nc'.format(
            outputDirectory, self.startYearClimo, self.endYearClimo)
        if not os.path.exists(outputFileClimo):
            self.logger.info('   Load data...')

            climatologyFileName = self.mpasClimatologyTask.get_file_name(
                season='ANN')
            annualClimatology = xr.open_dataset(climatologyFileName)
            annualClimatology = annualClimatology.isel(Time=0)

            if self.includeBolus:
                annualClimatology['avgNormalVelocity'] = \
                    annualClimatology['timeMonthly_avg_normalVelocity'] + \
                    annualClimatology['timeMonthly_avg_normalGMBolusVelocity']

                annualClimatology['avgVertVelocityTop'] = \
                    annualClimatology['timeMonthly_avg_vertVelocityTop'] + \
                    annualClimatology['timeMonthly_avg_vertGMBolusVelocityTop']
            else:
                # rename some variables for convenience
                annualClimatology = annualClimatology.rename({
                    'timeMonthly_avg_normalVelocity':
                    'avgNormalVelocity',
                    'timeMonthly_avg_vertVelocityTop':
                    'avgVertVelocityTop'
                })

            # Convert to numpy arrays
            # (can result in a memory error for large array size)
            horizontalVel = annualClimatology.avgNormalVelocity.values
            verticalVel = annualClimatology.avgVertVelocityTop.values
            velArea = verticalVel * areaCell[:, np.newaxis]

            # Create dictionary for MOC climatology (NB: need this form
            # in order to convert it to xarray dataset later in the script)
            self.depth = refTopDepth
            self.lat = {}
            self.moc = {}
            for region in self.regionNames:
                self.logger.info('   Compute {} MOC...'.format(region))
                self.logger.info('    Compute transport through region '
                                 'southern transect...')
                if region == 'Global':
                    transportZ = np.zeros(nVertLevels)
                else:
                    maxEdgesInTransect = \
                        self.dictRegion[region]['maxEdgesInTransect']
                    transectEdgeGlobalIDs = \
                        self.dictRegion[region]['transectEdgeGlobalIDs']
                    transectEdgeMaskSigns = \
                        self.dictRegion[region]['transectEdgeMaskSigns']
                    transportZ = self._compute_transport(
                        maxEdgesInTransect, transectEdgeGlobalIDs,
                        transectEdgeMaskSigns, nVertLevels, dvEdge,
                        refLayerThickness, horizontalVel)

                regionCellMask = self.dictRegion[region]['cellMask']
                latBinSize = \
                    config.getExpression(self.sectionName,
                                         'latBinSize{}'.format(region))
                if region == 'Global':
                    latBins = np.arange(-90.0, 90.1, latBinSize)
                else:
                    indRegion = self.dictRegion[region]['indices']
                    latBins = latCell[indRegion]
                    latBins = np.arange(np.amin(latBins),
                                        np.amax(latBins) + latBinSize,
                                        latBinSize)
                mocTop = self._compute_moc(latBins, nVertLevels, latCell,
                                           regionCellMask, transportZ, velArea)

                # Store computed MOC to dictionary
                self.lat[region] = latBins
                self.moc[region] = mocTop

            # Save to file
            self.logger.info('   Save global and regional MOC to file...')
            ncFile = netCDF4.Dataset(outputFileClimo, mode='w')
            # create dimensions
            ncFile.createDimension('nz', len(refTopDepth))
            for region in self.regionNames:
                latBins = self.lat[region]
                mocTop = self.moc[region]
                ncFile.createDimension('nx{}'.format(region), len(latBins))
                # create variables
                x = ncFile.createVariable('lat{}'.format(region), 'f4',
                                          ('nx{}'.format(region), ))
                x.description = 'latitude bins for MOC {}'\
                                ' streamfunction'.format(region)
                x.units = 'degrees (-90 to 90)'
                y = ncFile.createVariable('moc{}'.format(region), 'f4',
                                          ('nz', 'nx{}'.format(region)))
                y.description = 'MOC {} streamfunction, annual'\
                                ' climatology'.format(region)
                y.units = 'Sv (10^6 m^3/s)'
                # save variables
                x[:] = latBins
                y[:, :] = mocTop
            depth = ncFile.createVariable('depth', 'f4', ('nz', ))
            depth.description = 'depth'
            depth.units = 'meters'
            depth[:] = self.depth
            ncFile.close()
        else:
            # Read from file
            self.logger.info('   Read previously computed MOC streamfunction '
                             'from file...')
            ncFile = netCDF4.Dataset(outputFileClimo, mode='r')
            self.depth = ncFile.variables['depth'][:]
            self.lat = {}
            self.moc = {}
            for region in self.regionNames:
                self.lat[region] = ncFile.variables['lat{}'.format(region)][:]
                self.moc[region] = \
                    ncFile.variables['moc{}'.format(region)][:, :]
            ncFile.close()
示例#27
0
    def run_task(self):  # {{{
        """
        Combine the time series
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        timeSeriesName = self.timeSeriesName

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(self.config, 'output',
                                   'timeseriesSubdirectory'),
            timeSeriesName)

        outputFileName = '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format(
            outputDirectory, timeSeriesName, self.startYears[0],
            self.endYears[-1])

        useExisting = False
        ds = None
        if os.path.exists(outputFileName):
            ds = xr.open_dataset(outputFileName, decode_times=False)
            if ds.sizes['Time'] > 0:
                useExisting = True
            else:
                ds.close()

        if not useExisting:

            inFileNames = []
            for startYear, endYear in zip(self.startYears, self.endYears):
                inFileName = '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format(
                    outputDirectory, timeSeriesName, startYear, endYear)
                inFileNames.append(inFileName)

            ds = xr.open_mfdataset(inFileNames, combine='nested',
                                   concat_dim='Time', decode_times=False)

            ds.load()

            ds['totalArea'] = ds['totalArea'].isel(Time=0)

            write_netcdf(ds, outputFileName)

        regionNames = ds['regionNames']
        ds = ds.drop_vars('regionNames')

        profileMask = ds['totalArea'] > 0

        outputDirectory = build_config_full_path(self.config, 'output',
                                                 'profilesSubdirectory')

        make_directories(outputDirectory)

        for season in self.seasons:
            outputFileName = \
                '{}/{}_{}_{:04d}-{:04d}.nc'.format(
                    outputDirectory, timeSeriesName, season,
                    self.startYears[0], self.endYears[-1])
            if not os.path.exists(outputFileName):
                monthValues = constants.monthDictionary[season]
                dsSeason = compute_climatology(ds, monthValues,
                                               calendar=self.calendar,
                                               maskVaries=False)

                for field in self.fields:
                    prefix = field['prefix']

                    mean = dsSeason['{}_mean'.format(prefix)].where(
                        profileMask)
                    meanSquared = \
                        dsSeason['{}_meanSquared'.format(prefix)].where(
                            profileMask)
                    stdName = '{}_std'.format(prefix)

                    dsSeason[stdName] = np.sqrt(meanSquared - mean**2).where(
                        profileMask)
                    dsSeason['{}_mean'.format(prefix)] = mean

                dsSeason.coords['regionNames'] = regionNames
                write_netcdf(dsSeason, outputFileName)
示例#28
0
    def run_task(self):  # {{{
        """
        Plots time-series output of Antarctic sub-ice-shelf melt rates.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Stephen Price

        self.logger.info("\nPlotting Antarctic melt rate time series for "
                         "{}...".format(self.iceShelf))

        self.logger.info('  Load melt rate data...')

        config = self.config
        calendar = self.calendar

        iceShelfMasksFile = self.iceShelfMasksFile

        fcAll = read_feature_collection(iceShelfMasksFile)

        fc = FeatureCollection()
        for feature in fcAll.features:
            if feature['properties']['name'] == self.iceShelf:
                fc.add_feature(feature)
                break

        totalMeltFlux, meltRates = self._load_ice_shelf_fluxes(config)

        plotControl = self.controlConfig is not None
        if plotControl:
            controlRunName = self.controlConfig.get('runs', 'mainRunName')

            refTotalMeltFlux, refMeltRates = \
                self._load_ice_shelf_fluxes(self.controlConfig)

        # Load observations from multiple files and put in dictionary based
        # on shelf keyname
        observationsDirectory = build_obs_path(config, 'ocean',
                                               'meltSubdirectory')
        obsFileNameDict = {'Rignot et al. (2013)':
                           'Rignot_2013_melt_rates_20200623.csv',
                           'Rignot et al. (2013) SS':
                           'Rignot_2013_melt_rates_SS_20200623.csv'}

        obsDict = {}  # dict for storing dict of obs data
        for obsName in obsFileNameDict:
            obsFileName = '{}/{}'.format(observationsDirectory,
                                         obsFileNameDict[obsName])
            obsDict[obsName] = {}
            obsFile = csv.reader(open(obsFileName, 'rU'))
            next(obsFile, None)  # skip the header line
            for line in obsFile:  # some later useful values commented out
                shelfName = line[0]
                if shelfName != self.iceShelf:
                    continue

                # surveyArea = line[1]
                meltFlux = float(line[2])
                meltFluxUncertainty = float(line[3])
                meltRate = float(line[4])
                meltRateUncertainty = float(line[5])
                # actualArea = float( line[6] )  # actual area here is in sq km

                # build dict of obs. keyed to filename description
                # (which will be used for plotting)
                obsDict[obsName] = {
                    'meltFlux': meltFlux,
                    'meltFluxUncertainty': meltFluxUncertainty,
                    'meltRate': meltRate,
                    'meltRateUncertainty': meltRateUncertainty}
                break

        # If areas from obs file used need to be converted from sq km to sq m

        mainRunName = config.get('runs', 'mainRunName')
        movingAverageMonths = config.getint('timeSeriesAntarcticMelt',
                                            'movingAverageMonths')

        outputDirectory = build_config_full_path(config, 'output',
                                                 'timeseriesSubdirectory')

        make_directories(outputDirectory)

        self.logger.info('  Make plots...')

        # get obs melt flux and unc. for shelf (similar for rates)
        obsMeltFlux = []
        obsMeltFluxUnc = []
        obsMeltRate = []
        obsMeltRateUnc = []
        for obsName in obsDict:
            if len(obsDict[obsName]) > 0:
                obsMeltFlux.append(
                    obsDict[obsName]['meltFlux'])
                obsMeltFluxUnc.append(
                    obsDict[obsName]['meltFluxUncertainty'])
                obsMeltRate.append(
                    obsDict[obsName]['meltRate'])
                obsMeltRateUnc.append(
                    obsDict[obsName]['meltRateUncertainty'])
            else:
                # append NaN so this particular obs won't plot
                self.logger.warning('{} observations not available for '
                                    '{}'.format(obsName, self.iceShelf))
                obsMeltFlux.append(None)
                obsMeltFluxUnc.append(None)
                obsMeltRate.append(None)
                obsMeltRateUnc.append(None)

        title = self.iceShelf.replace('_', ' ')

        xLabel = 'Time (yr)'
        yLabel = 'Melt Flux (GT/yr)'

        timeSeries = totalMeltFlux.isel(nRegions=self.regionIndex)

        filePrefix = 'melt_flux_{}'.format(self.iceShelf.replace(' ', '_'))
        outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

        fields = [timeSeries]
        lineColors = ['k']
        lineWidths = [2.5]
        legendText = [mainRunName]
        if plotControl:
            fields.append(refTotalMeltFlux.isel(nRegions=self.regionIndex))
            lineColors.append('r')
            lineWidths.append(1.2)
            legendText.append(controlRunName)

        fig = timeseries_analysis_plot(config, fields, calendar=calendar,
                                       title=title, xlabel=xLabel,
                                       ylabel=yLabel,
                                       movingAveragePoints=movingAverageMonths,
                                       lineColors=lineColors,
                                       lineWidths=lineWidths,
                                       legendText=legendText,
                                       obsMean=obsMeltFlux,
                                       obsUncertainty=obsMeltFluxUnc,
                                       obsLegend=list(obsDict.keys()))

        # do this before the inset because otherwise it moves the inset
        # and cartopy doesn't play too well with tight_layout anyway
        plt.tight_layout()

        add_inset(fig, fc, width=2.0, height=2.0)

        savefig(outFileName)

        caption = 'Running Mean of Total Melt Flux  under Ice ' \
                  'Shelves in the {} Region'.format(title)
        write_image_xml(
            config=config,
            filePrefix=filePrefix,
            componentName='Ocean',
            componentSubdirectory='ocean',
            galleryGroup='Antarctic Melt Time Series',
            groupLink='antmelttime',
            gallery='Total Melt Flux',
            thumbnailDescription=title,
            imageDescription=caption,
            imageCaption=caption)

        xLabel = 'Time (yr)'
        yLabel = 'Melt Rate (m/yr)'

        timeSeries = meltRates.isel(nRegions=self.regionIndex)

        filePrefix = 'melt_rate_{}'.format(self.iceShelf.replace(' ', '_'))
        outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

        fields = [timeSeries]
        lineColors = ['k']
        lineWidths = [2.5]
        legendText = [mainRunName]
        if plotControl:
            fields.append(refMeltRates.isel(nRegions=self.regionIndex))
            lineColors.append('r')
            lineWidths.append(1.2)
            legendText.append(controlRunName)

        if config.has_option(self.taskName, 'firstYearXTicks'):
            firstYearXTicks = config.getint(self.taskName,
                                            'firstYearXTicks')
        else:
            firstYearXTicks = None

        if config.has_option(self.taskName, 'yearStrideXTicks'):
            yearStrideXTicks = config.getint(self.taskName,
                                             'yearStrideXTicks')
        else:
            yearStrideXTicks = None

        fig = timeseries_analysis_plot(config, fields, calendar=calendar,
                                       title=title, xlabel=xLabel,
                                       ylabel=yLabel,
                                       movingAveragePoints=movingAverageMonths,
                                       lineColors=lineColors,
                                       lineWidths=lineWidths,
                                       legendText=legendText,
                                       firstYearXTicks=firstYearXTicks,
                                       yearStrideXTicks=yearStrideXTicks,
                                       obsMean=obsMeltRate,
                                       obsUncertainty=obsMeltRateUnc,
                                       obsLegend=list(obsDict.keys()))

        # do this before the inset because otherwise it moves the inset
        # and cartopy doesn't play too well with tight_layout anyway
        plt.tight_layout()

        add_inset(fig, fc, width=2.0, height=2.0)

        savefig(outFileName)

        caption = 'Running Mean of Area-averaged Melt Rate under Ice ' \
                  'Shelves in the {} Region'.format(title)
        write_image_xml(
            config=config,
            filePrefix=filePrefix,
            componentName='Ocean',
            componentSubdirectory='ocean',
            galleryGroup='Antarctic Melt Time Series',
            groupLink='antmelttime',
            gallery='Area-averaged Melt Rate',
            thumbnailDescription=title,
            imageDescription=caption,
            imageCaption=caption)
示例#29
0
    def get_file_name(self, stage, season=None, comparisonGridName=None):
        # {{{
        """
        Given config options, the name of a field and a string identifying the
        months in a seasonal climatology, returns the full path for MPAS
        climatology files before and after remapping.

        Parameters
        ----------
        stage : {'original', 'climatology', 'remapped'}
            The stage of the masking and remapping process

        season : str, optional
            One of the seasons in ``constants.monthDictionary``

        comparisonGridName : {'latlon', 'antarctic', 'arctic'}, optional
            The name of the comparison grid to use for remapping.

        Returns
        -------
        fileName : str
            The path to the climatology file for the specified season.
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config
        obsSection = '{}Observations'.format(self.componentName)
        if comparisonGridName is None:
            # just needed for getting the obs. grid name, so doesn't matter
            # which comparison grid
            remapper = self.remappers[self.comparisonGridNames[0]]
        else:
            remapper = self.remappers[comparisonGridName]

        obsGridName = remapper.sourceDescriptor.meshName

        outFilePrefix = self.outFilePrefix

        if stage in ['original', 'climatology']:
            climatologyDirectory = build_config_full_path(
                config=config,
                section='output',
                relativePathOption='climatologySubdirectory',
                relativePathSection=obsSection)

            make_directories(climatologyDirectory)

            if stage == 'original':
                fileName = '{}/{}_{}.nc'.format(climatologyDirectory,
                                                outFilePrefix, obsGridName)
            else:
                fileName = '{}/{}_{}_{}.nc'.format(climatologyDirectory,
                                                   outFilePrefix, obsGridName,
                                                   season)

        elif stage == 'remapped':
            remappedDirectory = build_config_full_path(
                config=config,
                section='output',
                relativePathOption='remappedClimSubdirectory',
                relativePathSection=obsSection)

            make_directories(remappedDirectory)

            comparisonGridName = remapper.destinationDescriptor.meshName
            fileName = '{}/{}_{}_to_{}_{}.nc'.format(remappedDirectory,
                                                     outFilePrefix,
                                                     obsGridName,
                                                     comparisonGridName,
                                                     season)

        else:
            raise ValueError('Unknown stage {}'.format(stage))

        return fileName  # }}}
    def run_task(self):  # {{{
        """
        Performs analysis of the time-series output of Antarctic sub-ice-shelf
        melt rates.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Stephen Price

        self.logger.info("\nPlotting Antarctic melt rate time series...")

        self.logger.info('  Load melt rate data...')

        config = self.config
        calendar = self.calendar

        totalMeltFlux, meltRates = self._compute_ice_shelf_fluxes()

        plotRef = self.refConfig is not None
        if plotRef:
            refRunName = self.refConfig.get('runs', 'mainRunName')

            refTotalMeltFlux, refMeltRates = \
                self._load_ice_shelf_fluxes(self.refConfig)

        # Load observations from multiple files and put in dictionary based
        # on shelf keyname
        observationsDirectory = build_config_full_path(config,
                                                       'oceanObservations',
                                                       'meltSubdirectory')
        obsFileNameDict = {
            'Rignot et al. (2013)': 'Rignot_2013_melt_rates.csv',
            'Rignot et al. (2013) SS': 'Rignot_2013_melt_rates_SS.csv'
        }

        obsDict = {}  # dict for storing dict of obs data
        for obsName in obsFileNameDict:
            obsFileName = '{}/{}'.format(observationsDirectory,
                                         obsFileNameDict[obsName])
            obsDict[obsName] = {}
            obsFile = csv.reader(open(obsFileName, 'rU'))
            next(obsFile, None)  # skip the header line
            for line in obsFile:  # some later useful values commented out
                shelfName = line[0]
                # surveyArea = line[1]
                meltFlux = float(line[2])
                meltFluxUncertainty = float(line[3])
                meltRate = float(line[4])
                meltRateUncertainty = float(line[5])
                # actualArea = float( line[6] )  # actual area here is in sq km

                # build dict of obs. keyed to filename description
                # (which will be used for plotting)
                obsDict[obsName][shelfName] = {
                    'meltFlux': meltFlux,
                    'meltFluxUncertainty': meltFluxUncertainty,
                    'meltRate': meltRate,
                    'meltRateUncertainty': meltRateUncertainty
                }

        # If areas from obs file used need to be converted from sq km to sq m

        mainRunName = config.get('runs', 'mainRunName')
        movingAverageMonths = config.getint('timeSeriesAntarcticMelt',
                                            'movingAverageMonths')

        nRegions = totalMeltFlux.sizes['nRegions']

        outputDirectory = build_config_full_path(config, 'output',
                                                 'timeseriesSubdirectory')

        make_directories(outputDirectory)

        self.logger.info('  Make plots...')
        for iRegion in range(nRegions):

            regionName = self.iceShelvesToPlot[iRegion]

            # get obs melt flux and unc. for shelf (similar for rates)
            obsMeltFlux = []
            obsMeltFluxUnc = []
            obsMeltRate = []
            obsMeltRateUnc = []
            for obsName in obsDict:
                if regionName in obsDict[obsName]:
                    obsMeltFlux.append(
                        obsDict[obsName][regionName]['meltFlux'])
                    obsMeltFluxUnc.append(
                        obsDict[obsName][regionName]['meltFluxUncertainty'])
                    obsMeltRate.append(
                        obsDict[obsName][regionName]['meltRate'])
                    obsMeltRateUnc.append(
                        obsDict[obsName][regionName]['meltRateUncertainty'])
                else:
                    # append NaN so this particular obs won't plot
                    self.logger.warning('{} observations not available for '
                                        '{}'.format(obsName, regionName))
                    obsMeltFlux.append(None)
                    obsMeltFluxUnc.append(None)
                    obsMeltRate.append(None)
                    obsMeltRateUnc.append(None)

            title = regionName.replace('_', ' ')

            regionName = regionName.replace(' ', '_')

            xLabel = 'Time (yr)'
            yLabel = 'Melt Flux (GT/yr)'

            timeSeries = totalMeltFlux.isel(nRegions=iRegion)

            filePrefix = 'melt_flux_{}'.format(regionName)
            figureName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

            fields = [timeSeries]
            lineColors = ['k']
            lineWidths = [2.5]
            legendText = [mainRunName]
            if plotRef:
                fields.append(refTotalMeltFlux.isel(nRegions=iRegion))
                lineColors.append('r')
                lineWidths.append(1.2)
                legendText.append(refRunName)

            timeseries_analysis_plot(config,
                                     fields,
                                     movingAverageMonths,
                                     title,
                                     xLabel,
                                     yLabel,
                                     figureName,
                                     calendar=calendar,
                                     lineColors=lineColors,
                                     lineWidths=lineWidths,
                                     legendText=legendText,
                                     obsMean=obsMeltFlux,
                                     obsUncertainty=obsMeltFluxUnc,
                                     obsLegend=list(obsDict.keys()))

            caption = 'Running Mean of Total Melt Flux  under Ice ' \
                      'Shelves in the {} Region'.format(title)
            write_image_xml(config=config,
                            filePrefix=filePrefix,
                            componentName='Ocean',
                            componentSubdirectory='ocean',
                            galleryGroup='Antarctic Melt Time Series',
                            groupLink='antmelttime',
                            gallery='Total Melt Flux',
                            thumbnailDescription=title,
                            imageDescription=caption,
                            imageCaption=caption)

            xLabel = 'Time (yr)'
            yLabel = 'Melt Rate (m/yr)'

            timeSeries = meltRates.isel(nRegions=iRegion)

            filePrefix = 'melt_rate_{}'.format(regionName)
            figureName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

            fields = [timeSeries]
            lineColors = ['k']
            lineWidths = [2.5]
            legendText = [mainRunName]
            if plotRef:
                fields.append(refMeltRates.isel(nRegions=iRegion))
                lineColors.append('r')
                lineWidths.append(1.2)
                legendText.append(refRunName)

            if config.has_option(self.taskName, 'firstYearXTicks'):
                firstYearXTicks = config.getint(self.taskName,
                                                'firstYearXTicks')
            else:
                firstYearXTicks = None

            if config.has_option(self.taskName, 'yearStrideXTicks'):
                yearStrideXTicks = config.getint(self.taskName,
                                                 'yearStrideXTicks')
            else:
                yearStrideXTicks = None

            timeseries_analysis_plot(config,
                                     fields,
                                     movingAverageMonths,
                                     title,
                                     xLabel,
                                     yLabel,
                                     figureName,
                                     calendar=calendar,
                                     lineColors=lineColors,
                                     lineWidths=lineWidths,
                                     legendText=legendText,
                                     obsMean=obsMeltRate,
                                     obsUncertainty=obsMeltRateUnc,
                                     obsLegend=list(obsDict.keys()),
                                     firstYearXTicks=firstYearXTicks,
                                     yearStrideXTicks=yearStrideXTicks)

            caption = 'Running Mean of Area-averaged Melt Rate under Ice ' \
                      'Shelves in the {} Region'.format(title)
            write_image_xml(config=config,
                            filePrefix=filePrefix,
                            componentName='Ocean',
                            componentSubdirectory='ocean',
                            galleryGroup='Antarctic Melt Time Series',
                            groupLink='antmelttime',
                            gallery='Area-averaged Melt Rate',
                            thumbnailDescription=title,
                            imageDescription=caption,
                            imageCaption=caption)