def test_start_end(self):
        fileName = str(self.datadir.join('example_jan_feb.nc'))
        timestr = ['xtime_start', 'xtime_end']
        variableList = \
            ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature']

        for calendar in ['gregorian', 'gregorian_noleap']:
            # all dates
            ds = open_mpas_dataset(fileName=fileName,
                                   calendar=calendar,
                                   timeVariableNames=timestr,
                                   variableList=variableList,
                                   startDate='0001-01-01',
                                   endDate='9999-12-31')
            self.assertEqual(len(ds.Time), 2)

            # just the first date
            ds = open_mpas_dataset(fileName=fileName,
                                   calendar=calendar,
                                   timeVariableNames=timestr,
                                   variableList=variableList,
                                   startDate='0005-01-01',
                                   endDate='0005-02-01')
            self.assertEqual(len(ds.Time), 1)

            # just the second date
            ds = open_mpas_dataset(fileName=fileName,
                                   calendar=calendar,
                                   timeVariableNames=timestr,
                                   variableList=variableList,
                                   startDate='0005-02-01',
                                   endDate='0005-03-01')
            self.assertEqual(len(ds.Time), 1)
 def test_open_process_climatology(self):
     fileName = str(self.datadir.join('timeSeries.nc'))
     calendar = 'gregorian_noleap'
     open_mpas_dataset(
         fileName=fileName,
         calendar=calendar,
         timeVariableNames=['xtime_startMonthly', 'xtime_endMonthly'],
         variableList=['timeMonthly_avg_tThreshMLD'])
Ejemplo n.º 3
0
    def _compute_area_vol(self):  # {{{
        '''
        Compute part of the time series of sea ice volume and area, given time
        indices to process.
        '''

        outFileNames = {}
        for hemisphere in ['NH', 'SH']:
            baseDirectory = build_config_full_path(self.config, 'output',
                                                   'timeSeriesSubdirectory')

            make_directories(baseDirectory)

            outFileName = '{}/seaIceAreaVol{}.nc'.format(
                baseDirectory, hemisphere)
            outFileNames[hemisphere] = outFileName

        dsTimeSeries = {}
        dsMesh = xr.open_dataset(self.restartFileName)
        dsMesh = subset_variables(dsMesh, variableList=['latCell', 'areaCell'])
        # Load data
        ds = open_mpas_dataset(fileName=self.inputFile,
                               calendar=self.calendar,
                               variableList=self.variableList,
                               startDate=self.startDate,
                               endDate=self.endDate)

        for hemisphere in ['NH', 'SH']:

            if hemisphere == 'NH':
                mask = dsMesh.latCell > 0
            else:
                mask = dsMesh.latCell < 0

            dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells')
            dsAreaSum = dsAreaSum.rename({
                'timeMonthly_avg_iceAreaCell':
                'iceArea',
                'timeMonthly_avg_iceVolumeCell':
                'iceVolume'
            })
            dsAreaSum['iceThickness'] = (dsAreaSum.iceVolume /
                                         dsMesh.areaCell.sum('nCells'))

            dsAreaSum['iceArea'].attrs['units'] = 'm$^2$'
            dsAreaSum['iceArea'].attrs['description'] = \
                'Total {} sea ice area'.format(hemisphere)
            dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$'
            dsAreaSum['iceVolume'].attrs['description'] = \
                'Total {} sea ice volume'.format(hemisphere)
            dsAreaSum['iceThickness'].attrs['units'] = 'm'
            dsAreaSum['iceThickness'].attrs['description'] = \
                'Mean {} sea ice volume'.format(hemisphere)

            dsTimeSeries[hemisphere] = dsAreaSum

            write_netcdf(dsAreaSum, outFileNames[hemisphere])

        return dsTimeSeries  # }}}
    def test_open_dataset_fn(self):
        fileName = str(self.datadir.join('example_jan.nc'))
        timestr = ['xtime_start', 'xtime_end']
        variableList = \
            ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature']

        for calendar in ['gregorian', 'gregorian_noleap']:
            ds = open_mpas_dataset(fileName=fileName,
                                   calendar=calendar,
                                   timeVariableNames=timestr,
                                   variableList=variableList)
            self.assertEqual(list(ds.data_vars.keys()), variableList)
    def run_task(self):  # {{{
        """
        Compute the regional-mean time series
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        self.logger.info("\nCompute time series of regional means...")

        startDate = '{:04d}-01-01_00:00:00'.format(self.startYear)
        endDate = '{:04d}-12-31_23:59:59'.format(self.endYear)

        regionGroup = self.regionGroup
        sectionSuffix = regionGroup[0].upper() + \
            regionGroup[1:].replace(' ', '')
        timeSeriesName = sectionSuffix[0].lower() + sectionSuffix[1:]
        sectionName = 'timeSeries{}'.format(sectionSuffix)

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(config, 'output', 'timeseriesSubdirectory'),
            timeSeriesName)
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outFileName = '{}/{}_{:04d}-{:04d}.nc'.format(outputDirectory,
                                                      timeSeriesName,
                                                      self.startYear,
                                                      self.endYear)

        inputFiles = sorted(
            self.historyStreams.readpath('timeSeriesStatsMonthlyOutput',
                                         startDate=startDate,
                                         endDate=endDate,
                                         calendar=self.calendar))

        years, months = get_files_year_month(inputFiles, self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        variables = config.getExpression(sectionName, 'variables')

        variableList = [var['mpas'] for var in variables] + \
            ['timeMonthly_avg_layerThickness']

        outputExists = os.path.exists(outFileName)
        outputValid = outputExists
        if outputExists:
            with open_mpas_dataset(fileName=outFileName,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=None,
                                   startDate=startDate,
                                   endDate=endDate) as dsOut:

                for inIndex in range(dsOut.dims['Time']):

                    mask = numpy.logical_and(
                        dsOut.year[inIndex].values == years,
                        dsOut.month[inIndex].values == months)
                    if numpy.count_nonzero(mask) == 0:
                        outputValid = False
                        break

        if outputValid:
            self.logger.info('  Time series exists -- Done.')
            return

        regionMaskFileName = '{}/depthMasks{}.nc'.format(
            outputDirectory, timeSeriesName)
        dsRegionMask = xarray.open_dataset(regionMaskFileName)
        nRegions = dsRegionMask.sizes['nRegions']
        areaCell = dsRegionMask.areaCell

        datasets = []
        nTime = len(inputFiles)
        for tIndex in range(nTime):
            self.logger.info('  {}/{}'.format(tIndex + 1, nTime))

            dsIn = open_mpas_dataset(fileName=inputFiles[tIndex],
                                     calendar=self.calendar,
                                     variableList=variableList,
                                     startDate=startDate,
                                     endDate=endDate).isel(Time=0)

            layerThickness = dsIn.timeMonthly_avg_layerThickness

            innerDatasets = []
            for regionIndex in range(nRegions):
                self.logger.info('    region: {}'.format(
                    self.regionNames[regionIndex]))
                dsRegion = dsRegionMask.isel(nRegions=regionIndex)
                cellMask = dsRegion.cellMask
                totalArea = dsRegion.totalArea
                depthMask = dsRegion.depthMask.where(cellMask, drop=True)
                localArea = areaCell.where(cellMask, drop=True)
                localThickness = layerThickness.where(cellMask, drop=True)

                volCell = (localArea * localThickness).where(depthMask)
                volCell = volCell.transpose('nCells', 'nVertLevels')
                totalVol = volCell.sum(dim='nVertLevels').sum(dim='nCells')
                self.logger.info('      totalVol (mil. km^3): {}'.format(
                    1e-15 * totalVol.values))

                dsOut = xarray.Dataset()
                dsOut['totalVol'] = totalVol
                dsOut.totalVol.attrs['units'] = 'm^3'

                for var in variables:
                    outName = var['name']
                    self.logger.info('      {}'.format(outName))
                    mpasVarName = var['mpas']
                    timeSeries = dsIn[mpasVarName].where(cellMask, drop=True)
                    units = timeSeries.units
                    description = timeSeries.long_name

                    is3d = 'nVertLevels' in timeSeries.dims
                    if is3d:
                        timeSeries = \
                            (volCell*timeSeries.where(depthMask)).sum(
                                dim='nVertLevels').sum(dim='nCells') / totalVol
                    else:
                        timeSeries = \
                            (localArea*timeSeries).sum(
                                dim='nCells') / totalArea

                    dsOut[outName] = timeSeries
                    dsOut[outName].attrs['units'] = units
                    dsOut[outName].attrs['description'] = description
                    dsOut[outName].attrs['is3d'] = str(is3d)

                innerDatasets.append(dsOut)

            datasets.append(innerDatasets)

        # combine data sets into a single data set
        dsOut = xarray.combine_nested(datasets, ['Time', 'nRegions'])

        dsOut['totalArea'] = dsRegionMask.totalArea
        dsOut.totalArea.attrs['units'] = 'm^2'
        dsOut['zbounds'] = dsRegionMask.zbounds
        dsOut.zbounds.attrs['units'] = 'm'
        dsOut.coords['regionNames'] = dsRegionMask.regionNames
        dsOut.coords['year'] = (('Time'), years)
        dsOut['year'].attrs['units'] = 'years'
        dsOut.coords['month'] = (('Time'), months)
        dsOut['month'].attrs['units'] = 'months'

        write_netcdf(dsOut, outFileName)  # }}}
Ejemplo n.º 6
0
    def run_task(self):  # {{{
        '''
        Computes NINO34 index and plots the time series and power spectrum with
        95 and 99% confidence bounds
        '''
        # Authors
        # -------
        # Luke Van Roekel, Xylar Asay-Davis

        config = self.config
        calendar = self.calendar

        regionToPlot = config.get('indexNino34', 'region')

        ninoIndexNumber = regionToPlot[4:]

        self.logger.info("\nPlotting El Nino {} Index time series and power "
                         "spectrum....".format(ninoIndexNumber))

        self.logger.info('  Load SST data...')
        fieldName = 'nino'

        startDate = self.config.get('index', 'startDate')
        endDate = self.config.get('index', 'endDate')

        startYear = self.config.getint('index', 'startYear')
        endYear = self.config.getint('index', 'endYear')

        dataSource = config.get('indexNino34', 'observationData')

        observationsDirectory = build_obs_path(
            config, 'ocean', '{}Subdirectory'.format(fieldName))

        # specify obsTitle based on data path
        # These are the only data sets supported
        if dataSource == 'HADIsst':
            dataPath = "{}/HADIsst_nino34_20180710.nc".format(
                observationsDirectory)
            obsTitle = 'HADSST'
            refDate = '1870-01-01'
        elif dataSource == 'ERS_SSTv4':
            dataPath = "{}/ERS_SSTv4_nino34_20180710.nc".format(
                observationsDirectory)
            obsTitle = 'ERS SSTv4'
            refDate = '1800-01-01'
        else:
            raise ValueError('Bad value for config option observationData {} '
                             'in [indexNino34] section.'.format(dataSource))

        mainRunName = config.get('runs', 'mainRunName')

        # regionIndex should correspond to NINO34 in surface weighted Average
        # AM
        regions = config.getExpression('regions', 'regions')
        regionToPlot = config.get('indexNino34', 'region')
        regionIndex = regions.index(regionToPlot)

        # Load data:
        ds = open_mpas_dataset(fileName=self.inputFile,
                               calendar=calendar,
                               variableList=self.variableList,
                               startDate=startDate,
                               endDate=endDate)

        # Observations have been processed to the nino34Index prior to reading
        dsObs = xr.open_dataset(dataPath, decode_cf=False, decode_times=False)
        # add the days between 0001-01-01 and the refDate so we have a new
        # reference date of 0001-01-01 (like for the model Time)
        dsObs["Time"] = dsObs.Time + \
            string_to_days_since_date(dateString=refDate, calendar=calendar)
        nino34Obs = dsObs.sst

        self.logger.info(
            '  Compute El Nino {} Index...'.format(ninoIndexNumber))
        varName = self.variableList[0]
        regionSST = ds[varName].isel(nOceanRegions=regionIndex)
        nino34Main = self._compute_nino34_index(regionSST, calendar)

        # Compute the observational index over the entire time range
        # nino34Obs = compute_nino34_index(dsObs.sst, calendar)

        self.logger.info(
            ' Computing El Nino {} power spectra...'.format(ninoIndexNumber))
        spectraMain = self._compute_nino34_spectra(nino34Main)

        # Compute the observational spectra over the whole record
        spectraObs = self._compute_nino34_spectra(nino34Obs)

        # Compute the observational spectra over the last 30 years for
        # comparison. Only saving the spectra
        subsetEndYear = 2016
        if self.controlConfig is None:
            subsetStartYear = 1976
        else:
            # make the subset the same length as the input data set
            subsetStartYear = subsetEndYear - (endYear - startYear)
        time_start = datetime_to_days(datetime.datetime(subsetStartYear, 1, 1),
                                      calendar=calendar)
        time_end = datetime_to_days(datetime.datetime(subsetEndYear, 12, 31),
                                    calendar=calendar)
        nino34Subset = nino34Obs.sel(Time=slice(time_start, time_end))
        spectraSubset = self._compute_nino34_spectra(nino34Subset)

        if self.controlConfig is None:
            nino34s = [nino34Obs[2:-3], nino34Subset, nino34Main[2:-3]]
            titles = [
                '{} (Full Record)'.format(obsTitle),
                '{} ({} - {})'.format(obsTitle, subsetStartYear,
                                      subsetEndYear), mainRunName
            ]
            spectra = [spectraObs, spectraSubset, spectraMain]
        else:
            baseDirectory = build_config_full_path(self.controlConfig,
                                                   'output',
                                                   'timeSeriesSubdirectory')

            refFileName = '{}/{}.nc'.format(
                baseDirectory, self.mpasTimeSeriesTask.fullTaskName)

            dsRef = open_mpas_dataset(fileName=refFileName,
                                      calendar=calendar,
                                      variableList=self.variableList)

            regionSSTRef = dsRef[varName].isel(nOceanRegions=regionIndex)
            nino34Ref = self._compute_nino34_index(regionSSTRef, calendar)

            nino34s = [nino34Subset, nino34Main[2:-3], nino34Ref[2:-3]]
            controlRunName = self.controlConfig.get('runs', 'mainRunName')

            spectraRef = self._compute_nino34_spectra(nino34Ref)

            titles = [
                '{} ({} - {})'.format(obsTitle, subsetStartYear,
                                      subsetEndYear), mainRunName,
                'Control: {}'.format(controlRunName)
            ]
            spectra = [spectraSubset, spectraMain, spectraRef]

        # Convert frequencies to period in years
        for s in spectra:
            s['period'] = \
                1.0 / (constants.eps + s['f'] * constants.sec_per_year)

        self.logger.info(
            ' Plot El Nino {} index and spectra...'.format(ninoIndexNumber))

        outFileName = '{}/nino{}_{}.png'.format(self.plotsDirectory,
                                                ninoIndexNumber, mainRunName)
        self._nino34_timeseries_plot(
            nino34s=nino34s,
            title=u'El Niño {} Index'.format(ninoIndexNumber),
            panelTitles=titles,
            outFileName=outFileName)

        self._write_xml(filePrefix='nino{}_{}'.format(ninoIndexNumber,
                                                      mainRunName),
                        plotType='Time Series',
                        ninoIndexNumber=ninoIndexNumber)

        outFileName = '{}/nino{}_spectra_{}.png'.format(
            self.plotsDirectory, ninoIndexNumber, mainRunName)
        self._nino34_spectra_plot(
            spectra=spectra,
            title=u'El Niño {} power spectrum'.format(ninoIndexNumber),
            panelTitles=titles,
            outFileName=outFileName)

        self._write_xml(filePrefix='nino{}_spectra_{}'.format(
            ninoIndexNumber, mainRunName),
                        plotType='Spectra',
                        ninoIndexNumber=ninoIndexNumber)
Ejemplo n.º 7
0
def compute_moving_avg_anomaly_from_start(timeSeriesFileName,
                                          variableList,
                                          anomalyStartTime,
                                          anomalyEndTime,
                                          startDate,
                                          endDate,
                                          calendar,
                                          movingAveragePoints=12,
                                          alter_dataset=None):  # {{{
    '''
    Compute the rolling mean of the anomaly of a quantity from the beginning
    of the simulation (such that the rolling mean starts at zero by definition)

    Parameters
    ----------
    timeSeriesFileName :  str
        a file produced by ``MpasTimeSeriesTask`` containing variables, the
        anomaly and rolling mean of which is to be computed

    variableList :  list of str
        variable names to include in the resulting data set

    simulationStartTime :  str
        the start date of the simulation

    startDate, endDate :  str
        the start and end dates of the time series

    calendar : {'gregorian', 'gregoraian_noleap'}
        The calendar used in the MPAS run

    movingAveragePoints : int, optional
        The number of points (months) over which to perform the rolling average
        of the data set

    alter_dataset : function
        A function for manipulating the data set (e.g. computing new
        variables), taking an ``xarray.Dataset`` as input argument and
        returning an ``xarray.Dataset``

    Returns
    -------
    ds : ``xarray.Dataset``
        The anomaly of the rolling time mean from the start of the simulation
    '''
    # Authors
    # -------
    # Xylar Asay-Davis

    ds = open_mpas_dataset(fileName=timeSeriesFileName,
                           calendar=calendar,
                           variableList=variableList,
                           startDate=startDate,
                           endDate=endDate)

    if alter_dataset is not None:
        ds = alter_dataset(ds)

    dsStart = open_mpas_dataset(fileName=timeSeriesFileName,
                                calendar=calendar,
                                variableList=variableList,
                                startDate=anomalyStartTime,
                                endDate=anomalyEndTime)

    if alter_dataset is not None:
        dsStart = alter_dataset(dsStart)

    dsStart = dsStart.isel(Time=slice(0, movingAveragePoints)).mean('Time')

    for variable in ds.data_vars:
        ds[variable] = ds[variable] - dsStart[variable]

    ds = compute_moving_avg(ds)

    return ds
Ejemplo n.º 8
0
    def run_task(self):  # {{{
        """
        Compute time series of regional profiles
        """
        # Authors
        # -------
        # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis

        self.logger.info("\nCompute time series of regional profiles...")

        startDate = '{:04d}-01-01_00:00:00'.format(self.startYear)
        endDate = '{:04d}-12-31_23:59:59'.format(self.endYear)

        timeSeriesName = self.masksSubtask.regionGroup.replace(' ', '')

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(self.config, 'output',
                                   'timeseriesSubdirectory'),
            timeSeriesName)
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outputFileName = '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format(
            outputDirectory, timeSeriesName, self.startYear, self.endYear)

        inputFiles = sorted(self.historyStreams.readpath(
            'timeSeriesStatsMonthlyOutput', startDate=startDate,
            endDate=endDate, calendar=self.calendar))

        years, months = get_files_year_month(inputFiles,
                                             self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        variableList = [field['mpas'] for field in self.fields]

        outputExists = os.path.exists(outputFileName)
        outputValid = outputExists
        if outputExists:
            with open_mpas_dataset(fileName=outputFileName,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=None,
                                   startDate=startDate,
                                   endDate=endDate) as dsIn:

                for inIndex in range(dsIn.dims['Time']):

                    mask = np.logical_and(
                        dsIn.year[inIndex].values == years,
                        dsIn.month[inIndex].values == months)
                    if np.count_nonzero(mask) == 0:
                        outputValid = False
                        break

        if outputValid:
            self.logger.info('  Time series exists -- Done.')
            return

        # get areaCell
        restartFileName = \
            self.runStreams.readpath('restart')[0]

        dsRestart = xr.open_dataset(restartFileName)
        dsRestart = dsRestart.isel(Time=0)
        areaCell = dsRestart.areaCell

        nVertLevels = dsRestart.sizes['nVertLevels']

        vertIndex = \
            xr.DataArray.from_dict({'dims': ('nVertLevels',),
                                    'data': np.arange(nVertLevels)})

        vertMask = vertIndex < dsRestart.maxLevelCell

        # get region masks
        regionMaskFileName = self.masksSubtask.maskFileName
        dsRegionMask = xr.open_dataset(regionMaskFileName)

        # figure out the indices of the regions to plot
        regionNames = decode_strings(dsRegionMask.regionNames)

        regionIndices = []
        for regionToPlot in self.regionNames:
            for index, regionName in enumerate(regionNames):
                if regionToPlot == regionName:
                    regionIndices.append(index)
                    break

        # select only those regions we want to plot
        dsRegionMask = dsRegionMask.isel(nRegions=regionIndices)
        cellMasks = dsRegionMask.regionCellMasks
        regionNamesVar = dsRegionMask.regionNames

        totalArea = (cellMasks * areaCell * vertMask).sum('nCells')

        datasets = []
        for timeIndex, fileName in enumerate(inputFiles):

            dsLocal = open_mpas_dataset(
                fileName=fileName,
                calendar=self.calendar,
                variableList=variableList,
                startDate=startDate,
                endDate=endDate)
            dsLocal = dsLocal.isel(Time=0)
            time = dsLocal.Time.values
            date = days_to_datetime(time, calendar=self.calendar)

            self.logger.info('    date: {:04d}-{:02d}'.format(date.year,
                                                              date.month))

            # for each region and variable, compute area-weighted sum and
            # squared sum
            for field in self.fields:
                variableName = field['mpas']
                prefix = field['prefix']
                self.logger.info('      {}'.format(field['titleName']))

                var = dsLocal[variableName].where(vertMask)

                meanName = '{}_mean'.format(prefix)
                dsLocal[meanName] = \
                    (cellMasks * areaCell * var).sum('nCells') / totalArea

                meanSquaredName = '{}_meanSquared'.format(prefix)
                dsLocal[meanSquaredName] = \
                    (cellMasks * areaCell * var**2).sum('nCells') / totalArea

            # drop the original variables
            dsLocal = dsLocal.drop_vars(variableList)

            datasets.append(dsLocal)

        # combine data sets into a single data set
        dsOut = xr.concat(datasets, 'Time')

        dsOut.coords['regionNames'] = regionNamesVar
        dsOut['totalArea'] = totalArea
        dsOut.coords['year'] = (('Time',), years)
        dsOut['year'].attrs['units'] = 'years'
        dsOut.coords['month'] = (('Time',), months)
        dsOut['month'].attrs['units'] = 'months'

        # Note: restart file, not a mesh file because we need refBottomDepth,
        # not in a mesh file
        try:
            restartFile = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS-O restart file found: need at least one '
                          'restart file for plotting time series vs. depth')

        with xr.open_dataset(restartFile) as dsRestart:
            depths = dsRestart.refBottomDepth.values
            z = np.zeros(depths.shape)
            z[0] = -0.5 * depths[0]
            z[1:] = -0.5 * (depths[0:-1] + depths[1:])

        dsOut.coords['z'] = (('nVertLevels',), z)
        dsOut['z'].attrs['units'] = 'meters'

        write_netcdf(dsOut, outputFileName)
Ejemplo n.º 9
0
    def run_task(self):  # {{{
        """
        Computes time-series of Antarctic sub-ice-shelf melt rates.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Stephen Price

        self.logger.info("Computing Antarctic melt rate time series...")

        mpasTimeSeriesTask = self.mpasTimeSeriesTask
        config = self.config

        baseDirectory = build_config_full_path(
            config, 'output', 'timeSeriesSubdirectory')

        outFileName = '{}/iceShelfAggregatedFluxes.nc'.format(baseDirectory)

        # Load data:
        inputFile = mpasTimeSeriesTask.outputFile
        dsIn = open_mpas_dataset(fileName=inputFile,
                                 calendar=self.calendar,
                                 variableList=self.variableList,
                                 startDate=self.startDate,
                                 endDate=self.endDate)
        try:
            if os.path.exists(outFileName):
                # The file already exists so load it
                dsOut = xarray.open_dataset(outFileName)
                if numpy.all(dsOut.Time.values == dsIn.Time.values):
                    return
                else:
                    self.logger.warning('File {} is incomplete. Deleting '
                                        'it.'.format(outFileName))
                    os.remove(outFileName)
        except OSError:
            # something is potentailly wrong with the file, so let's delete
            # it and try again
            self.logger.warning('Problems reading file {}. Deleting '
                                'it.'.format(outFileName))
            os.remove(outFileName)

        restartFileName = \
            mpasTimeSeriesTask.runStreams.readpath('restart')[0]

        dsRestart = xarray.open_dataset(restartFileName)
        areaCell = \
            dsRestart.landIceFraction.isel(Time=0) * dsRestart.areaCell

        regionMaskFileName = self.masksSubtask.maskFileName

        dsRegionMask = xarray.open_dataset(regionMaskFileName)

        # figure out the indices of the regions to plot
        regionNames = decode_strings(dsRegionMask.regionNames)

        regionIndices = []
        for iceShelf in self.iceShelvesToPlot:
            for index, regionName in enumerate(regionNames):
                if iceShelf == regionName:
                    regionIndices.append(index)
                    break

        # select only those regions we want to plot
        dsRegionMask = dsRegionMask.isel(nRegions=regionIndices)

        datasets = []
        nTime = dsIn.sizes['Time']
        for tIndex in range(nTime):
            self.logger.info('  {}/{}'.format(tIndex+1, nTime))

            freshwaterFlux = \
                dsIn.timeMonthly_avg_landIceFreshwaterFlux.isel(Time=tIndex)

            nRegions = dsRegionMask.sizes['nRegions']
            meltRates = numpy.zeros((nRegions,))
            totalMeltFluxes = numpy.zeros((nRegions,))

            for regionIndex in range(nRegions):
                cellMask = \
                    dsRegionMask.regionCellMasks.isel(nRegions=regionIndex)

                # convert from kg/s to kg/yr
                totalMeltFlux = constants.sec_per_year * \
                    (cellMask * areaCell * freshwaterFlux).sum(dim='nCells')

                totalArea = (cellMask * areaCell).sum(dim='nCells')

                # from kg/m^2/yr to m/yr
                meltRates[regionIndex] = ((1. / constants.rho_fw) *
                                          (totalMeltFlux / totalArea))

                # convert from kg/yr to GT/yr
                totalMeltFlux /= constants.kg_per_GT
                totalMeltFluxes[regionIndex] = totalMeltFlux

            dsOut = xarray.Dataset()
            dsOut.coords['Time'] = dsIn.Time.isel(Time=tIndex)
            dsOut['totalMeltFlux'] = (('nRegions'), totalMeltFluxes)
            dsOut['meltRates'] = (('nRegions'), meltRates)
            datasets.append(dsOut)

        dsOut = xarray.concat(objs=datasets, dim='Time')
        dsOut['regionNames'] = dsRegionMask.regionNames
        dsOut.totalMeltFlux.attrs['units'] = 'GT a$^{-1}$'
        dsOut.totalMeltFlux.attrs['description'] = \
            'Total melt flux summed over each ice shelf or region'
        dsOut.meltRates.attrs['units'] = 'm a$^{-1}$'
        dsOut.meltRates.attrs['description'] = \
            'Melt rate averaged over each ice shelf or region'

        write_netcdf(dsOut, outFileName)
    def run_task(self):  # {{{
        """
        Performs analysis of the time-series output of sea-surface temperature
        (SST).
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani

        self.logger.info("\nPlotting SST time series...")

        self.logger.info('  Load SST data...')

        config = self.config
        calendar = self.calendar

        mainRunName = config.get('runs', 'mainRunName')
        preprocessedReferenceRunName = \
            config.get('runs', 'preprocessedReferenceRunName')
        preprocessedInputDirectory = config.get('oceanPreprocessedReference',
                                                'baseDirectory')

        movingAveragePoints = config.getint('timeSeriesSST',
                                            'movingAveragePoints')

        regions = config.getExpression('regions', 'regions')
        plotTitles = config.getExpression('regions', 'plotTitles')
        regionsToPlot = config.getExpression('timeSeriesSST', 'regions')

        regionIndicesToPlot = [
            regions.index(region) for region in regionsToPlot
        ]

        outputDirectory = build_config_full_path(config, 'output',
                                                 'timeseriesSubdirectory')

        make_directories(outputDirectory)

        dsSST = open_mpas_dataset(fileName=self.inputFile,
                                  calendar=calendar,
                                  variableList=self.variableList,
                                  startDate=self.startDate,
                                  endDate=self.endDate)

        yearStart = days_to_datetime(dsSST.Time.min(), calendar=calendar).year
        yearEnd = days_to_datetime(dsSST.Time.max(), calendar=calendar).year
        timeStart = date_to_days(year=yearStart,
                                 month=1,
                                 day=1,
                                 calendar=calendar)
        timeEnd = date_to_days(year=yearEnd,
                               month=12,
                               day=31,
                               calendar=calendar)

        if self.refConfig is not None:
            baseDirectory = build_config_full_path(self.refConfig, 'output',
                                                   'timeSeriesSubdirectory')

            refFileName = '{}/{}.nc'.format(
                baseDirectory, self.mpasTimeSeriesTask.fullTaskName)

            refStartYear = self.refConfig.getint('timeSeries', 'startYear')
            refEndYear = self.refConfig.getint('timeSeries', 'endYear')
            refStartDate = '{:04d}-01-01_00:00:00'.format(refStartYear)
            refEndDate = '{:04d}-12-31_23:59:59'.format(refEndYear)

            dsRefSST = open_mpas_dataset(fileName=refFileName,
                                         calendar=calendar,
                                         variableList=self.variableList,
                                         startDate=refStartDate,
                                         endDate=refEndDate)
        else:
            dsRefSST = None

        if preprocessedReferenceRunName != 'None':
            self.logger.info('  Load in SST for a preprocesses reference '
                             'run...')
            inFilesPreprocessed = '{}/SST.{}.year*.nc'.format(
                preprocessedInputDirectory, preprocessedReferenceRunName)

            outFolder = '{}/preprocessed'.format(outputDirectory)
            make_directories(outFolder)
            outFileName = '{}/sst.nc'.format(outFolder)

            combine_time_series_with_ncrcat(inFilesPreprocessed,
                                            outFileName,
                                            logger=self.logger)
            dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                               calendar=calendar,
                                               timeVariableNames='xtime')
            yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(),
                                                   calendar=calendar).year
            if yearStart <= yearEndPreprocessed:
                dsPreprocessedTimeSlice = \
                    dsPreprocessed.sel(Time=slice(timeStart, timeEnd))
            else:
                self.logger.warning('Preprocessed time series ends before the '
                                    'timeSeries startYear and will not be '
                                    'plotted.')
                preprocessedReferenceRunName = 'None'

        self.logger.info('  Make plots...')
        for regionIndex in regionIndicesToPlot:
            region = regions[regionIndex]

            title = '{} SST'.format(plotTitles[regionIndex])
            xLabel = 'Time [years]'
            yLabel = '[$\degree$C]'

            varName = self.variableList[0]
            SST = dsSST[varName].isel(nOceanRegions=regionIndex)

            filePrefix = self.filePrefixes[region]

            figureName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

            lineColors = ['k']
            lineWidths = [3]

            fields = [SST]
            legendText = [mainRunName]

            if dsRefSST is not None:
                refSST = dsRefSST[varName].isel(nOceanRegions=regionIndex)
                fields.append(refSST)
                lineColors.append('r')
                lineWidths.append(1.5)
                refRunName = self.refConfig.get('runs', 'mainRunName')
                legendText.append(refRunName)

            if preprocessedReferenceRunName != 'None':
                SST_v0 = dsPreprocessedTimeSlice.SST
                fields.append(SST_v0)
                lineColors.append('purple')
                lineWidths.append(1.5)
                legendText.append(preprocessedReferenceRunName)

            if config.has_option(self.taskName, 'firstYearXTicks'):
                firstYearXTicks = config.getint(self.taskName,
                                                'firstYearXTicks')
            else:
                firstYearXTicks = None

            if config.has_option(self.taskName, 'yearStrideXTicks'):
                yearStrideXTicks = config.getint(self.taskName,
                                                 'yearStrideXTicks')
            else:
                yearStrideXTicks = None

            timeseries_analysis_plot(config,
                                     fields,
                                     movingAveragePoints,
                                     title,
                                     xLabel,
                                     yLabel,
                                     figureName,
                                     calendar=calendar,
                                     lineColors=lineColors,
                                     lineWidths=lineWidths,
                                     legendText=legendText,
                                     firstYearXTicks=firstYearXTicks,
                                     yearStrideXTicks=yearStrideXTicks)

            caption = 'Running Mean of {} Sea Surface Temperature'.format(
                region)
            write_image_xml(config=config,
                            filePrefix=filePrefix,
                            componentName='Ocean',
                            componentSubdirectory='ocean',
                            galleryGroup='Time Series',
                            groupLink='timeseries',
                            thumbnailDescription='{} SST'.format(region),
                            imageDescription=caption,
                            imageCaption=caption)
    def _compute_moc_time_series_postprocess(self):  # {{{
        '''compute MOC time series as a post-process'''

        # Compute and plot time series of Atlantic MOC at 26.5N (RAPID array)
        self.logger.info('\n  Compute and/or plot post-processed Atlantic MOC '
                         'time series...')
        self.logger.info('   Load data...')

        outputDirectory = build_config_full_path(self.config, 'output',
                                                 'timeseriesSubdirectory')
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outputFileTseries = '{}/mocTimeSeries.nc'.format(outputDirectory)

        dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \
            refTopDepth, refLayerThickness = self._load_mesh()

        latAtlantic = self.lat['Atlantic']
        dLat = latAtlantic - 26.5
        indlat26 = np.where(np.abs(dLat) == np.amin(np.abs(dLat)))

        dictRegion = self.dictRegion['Atlantic']
        maxEdgesInTransect = dictRegion['maxEdgesInTransect']
        transectEdgeGlobalIDs = dictRegion['transectEdgeGlobalIDs']
        transectEdgeMaskSigns = dictRegion['transectEdgeMaskSigns']
        regionCellMask = dictRegion['cellMask']

        streamName = 'timeSeriesStatsMonthlyOutput'
        inputFilesTseries = sorted(
            self.historyStreams.readpath(streamName,
                                         startDate=self.startDateTseries,
                                         endDate=self.endDateTseries,
                                         calendar=self.calendar))

        years, months = get_files_year_month(inputFilesTseries,
                                             self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        mocRegion = np.zeros(len(inputFilesTseries))
        times = np.zeros(len(inputFilesTseries))
        computed = np.zeros(len(inputFilesTseries), bool)

        continueOutput = os.path.exists(outputFileTseries)
        if continueOutput:
            self.logger.info('   Read in previously computed MOC time series')
            with open_mpas_dataset(fileName=outputFileTseries,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=['mocAtlantic26'],
                                   startDate=self.startDateTseries,
                                   endDate=self.endDateTseries) as dsMOCIn:

                dsMOCIn.load()

                # first, copy all computed data
                for inIndex in range(dsMOCIn.dims['Time']):

                    mask = np.logical_and(
                        dsMOCIn.year[inIndex].values == years,
                        dsMOCIn.month[inIndex].values == months)

                    outIndex = np.where(mask)[0][0]

                    mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex]
                    times[outIndex] = dsMOCIn.Time[inIndex]
                    computed[outIndex] = True

                if np.all(computed):
                    # no need to waste time writing out the data set again
                    return dsMOCIn

        for timeIndex, fileName in enumerate(inputFilesTseries):
            if computed[timeIndex]:
                continue

            dsLocal = open_mpas_dataset(fileName=fileName,
                                        calendar=self.calendar,
                                        variableList=self.variableList,
                                        startDate=self.startDateTseries,
                                        endDate=self.endDateTseries)
            dsLocal = dsLocal.isel(Time=0)
            time = dsLocal.Time.values
            times[timeIndex] = time
            date = days_to_datetime(time, calendar=self.calendar)

            self.logger.info('     date: {:04d}-{:02d}'.format(
                date.year, date.month))

            if self.includeBolus:
                dsLocal['avgNormalVelocity'] = \
                    dsLocal['timeMonthly_avg_normalVelocity'] + \
                    dsLocal['timeMonthly_avg_normalGMBolusVelocity']

                dsLocal['avgVertVelocityTop'] = \
                    dsLocal['timeMonthly_avg_vertVelocityTop'] + \
                    dsLocal['timeMonthly_avg_vertGMBolusVelocityTop']
            else:
                # rename some variables for convenience
                dsLocal = dsLocal.rename({
                    'timeMonthly_avg_normalVelocity':
                    'avgNormalVelocity',
                    'timeMonthly_avg_vertVelocityTop':
                    'avgVertVelocityTop'
                })

            horizontalVel = dsLocal.avgNormalVelocity.values
            verticalVel = dsLocal.avgVertVelocityTop.values
            velArea = verticalVel * areaCell[:, np.newaxis]
            transportZ = self._compute_transport(maxEdgesInTransect,
                                                 transectEdgeGlobalIDs,
                                                 transectEdgeMaskSigns,
                                                 nVertLevels, dvEdge,
                                                 refLayerThickness,
                                                 horizontalVel)
            mocTop = self._compute_moc(latAtlantic, nVertLevels, latCell,
                                       regionCellMask, transportZ, velArea)
            mocRegion[timeIndex] = np.amax(mocTop[:, indlat26])

        description = 'Max MOC Atlantic streamfunction nearest to RAPID ' \
            'Array latitude (26.5N)'

        dictonary = {
            'dims': ['Time'],
            'coords': {
                'Time': {
                    'dims': ('Time'),
                    'data': times,
                    'attrs': {
                        'units': 'days since 0001-01-01'
                    }
                },
                'year': {
                    'dims': ('Time'),
                    'data': years,
                    'attrs': {
                        'units': 'year'
                    }
                },
                'month': {
                    'dims': ('Time'),
                    'data': months,
                    'attrs': {
                        'units': 'month'
                    }
                }
            },
            'data_vars': {
                'mocAtlantic26': {
                    'dims': ('Time'),
                    'data': mocRegion,
                    'attrs': {
                        'units': 'Sv (10^6 m^3/s)',
                        'description': description
                    }
                }
            }
        }
        dsMOCTimeSeries = xr.Dataset.from_dict(dictonary)
        write_netcdf(dsMOCTimeSeries, outputFileTseries)

        return dsMOCTimeSeries  # }}}
    def _compute_moc_time_series_analysismember(self):  # {{{
        '''compute MOC time series from analysis member'''

        # Compute and plot time series of Atlantic MOC at 26.5N (RAPID array)
        self.logger.info(
            '\n  Compute Atlantic MOC time series from analysis member...')
        self.logger.info('   Load data...')

        outputDirectory = build_config_full_path(self.config, 'output',
                                                 'timeseriesSubdirectory')
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outputFileTseries = '{}/mocTimeSeries.nc'.format(outputDirectory)

        streamName = 'timeSeriesStatsMonthlyOutput'

        # Get bin latitudes and index of 26.5N
        binBoundaryMocStreamfunction = None
        # first try timeSeriesStatsMonthly for bin boundaries, then try
        # mocStreamfunctionOutput stream as a backup option
        for streamName in [
                'timeSeriesStatsMonthlyOutput', 'mocStreamfunctionOutput'
        ]:
            try:
                inputFile = self.historyStreams.readpath(streamName)[0]
            except ValueError:
                raise IOError('At least one file from stream {} is needed '
                              'to compute MOC'.format(streamName))

            with xr.open_dataset(inputFile) as ds:
                if 'binBoundaryMocStreamfunction' in ds.data_vars:
                    binBoundaryMocStreamfunction = \
                        ds.binBoundaryMocStreamfunction.values
                    break

        if binBoundaryMocStreamfunction is None:
            raise ValueError('Could not find binBoundaryMocStreamfunction in '
                             'either timeSeriesStatsMonthlyOutput or '
                             'mocStreamfunctionOutput streams')

        binBoundaryMocStreamfunction = np.rad2deg(binBoundaryMocStreamfunction)
        dLat = binBoundaryMocStreamfunction - 26.5
        indlat26 = np.where(np.abs(dLat) == np.amin(np.abs(dLat)))

        inputFilesTseries = sorted(
            self.historyStreams.readpath(streamName,
                                         startDate=self.startDateTseries,
                                         endDate=self.endDateTseries,
                                         calendar=self.calendar))

        years, months = get_files_year_month(inputFilesTseries,
                                             self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        mocRegion = np.zeros(len(inputFilesTseries))
        times = np.zeros(len(inputFilesTseries))
        computed = np.zeros(len(inputFilesTseries), bool)

        continueOutput = os.path.exists(outputFileTseries)
        if continueOutput:
            self.logger.info('   Read in previously computed MOC time series')
            with open_mpas_dataset(fileName=outputFileTseries,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=['mocAtlantic26'],
                                   startDate=self.startDateTseries,
                                   endDate=self.endDateTseries) as dsMOCIn:

                dsMOCIn.load()

                # first, copy all computed data
                for inIndex in range(dsMOCIn.dims['Time']):

                    mask = np.logical_and(
                        dsMOCIn.year[inIndex].values == years,
                        dsMOCIn.month[inIndex].values == months)

                    outIndex = np.where(mask)[0][0]

                    mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex]
                    times[outIndex] = dsMOCIn.Time[inIndex]
                    computed[outIndex] = True

                if np.all(computed):
                    # no need to waste time writing out the data set again
                    return dsMOCIn

        for timeIndex, fileName in enumerate(inputFilesTseries):
            if computed[timeIndex]:
                continue

            dsLocal = open_mpas_dataset(fileName=fileName,
                                        calendar=self.calendar,
                                        variableList=self.variableList,
                                        startDate=self.startDateTseries,
                                        endDate=self.endDateTseries)
            dsLocal = dsLocal.isel(Time=0)
            time = dsLocal.Time.values
            times[timeIndex] = time
            date = days_to_datetime(time, calendar=self.calendar)

            self.logger.info('     date: {:04d}-{:02d}'.format(
                date.year, date.month))

            # hard-wire region=0 (Atlantic) for now
            indRegion = 0
            mocTop = dsLocal.timeMonthly_avg_mocStreamvalLatAndDepthRegion[
                indRegion, :, :].values
            mocRegion[timeIndex] = np.amax(mocTop[:, indlat26])

        description = 'Max MOC Atlantic streamfunction nearest to RAPID ' \
            'Array latitude (26.5N)'

        dictonary = {
            'dims': ['Time'],
            'coords': {
                'Time': {
                    'dims': ('Time'),
                    'data': times,
                    'attrs': {
                        'units': 'days since 0001-01-01'
                    }
                },
                'year': {
                    'dims': ('Time'),
                    'data': years,
                    'attrs': {
                        'units': 'year'
                    }
                },
                'month': {
                    'dims': ('Time'),
                    'data': months,
                    'attrs': {
                        'units': 'month'
                    }
                }
            },
            'data_vars': {
                'mocAtlantic26': {
                    'dims': ('Time'),
                    'data': mocRegion,
                    'attrs': {
                        'units': 'Sv (10^6 m^3/s)',
                        'description': description
                    }
                }
            }
        }
        dsMOCTimeSeries = xr.Dataset.from_dict(dictonary)
        write_netcdf(dsMOCTimeSeries, outputFileTseries)

        return dsMOCTimeSeries  # }}}
    def run_task(self):  # {{{
        '''
        Process MOC analysis member data if available, or compute MOC at
        post-processing if not. Plots streamfunction climatolgoical sections
        as well as time series of max Atlantic MOC at 26.5N (latitude of
        RAPID MOC Array).
        '''
        # Authors
        # -------
        # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis

        self.logger.info("\nPlotting streamfunction of Meridional Overturning "
                         "Circulation (MOC)...")

        config = self.config

        # **** Compute MOC ****
        if not self.usePostprocessing and self.mocAnalysisMemberEnabled:
            self._compute_moc_climo_analysismember()
            dsMOCTimeSeries = self._compute_moc_time_series_analysismember()
        else:
            self._compute_moc_climo_postprocess()
            dsMOCTimeSeries = self._compute_moc_time_series_postprocess()

        # **** Plot MOC ****
        # Define plotting variables
        mainRunName = config.get('runs', 'mainRunName')
        movingAveragePoints = config.getint(self.sectionName,
                                            'movingAveragePoints')
        movingAveragePointsClimatological = config.getint(
            self.sectionName, 'movingAveragePointsClimatological')
        colorbarLabel = '[Sv]'
        xLabel = 'latitude [deg]'
        yLabel = 'depth [m]'

        for region in self.regionNames:
            self.logger.info('   Plot climatological {} MOC...'.format(region))
            title = '{} MOC (ANN, years {:04d}-{:04d})\n {}'.format(
                region, self.startYearClimo, self.endYearClimo, mainRunName)
            filePrefix = self.filePrefixes[region]
            figureName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

            x = self.lat[region]
            y = self.depth
            z = self.moc[region]
            # Subset lat range
            minLat = config.getExpression(self.sectionName,
                                          'latBinMin{}'.format(region))
            maxLat = config.getExpression(self.sectionName,
                                          'latBinMax{}'.format(region))
            indLat = np.logical_and(x >= minLat, x <= maxLat)
            x = x[indLat]
            z = z[:, indLat]

            plot_vertical_section(config,
                                  x,
                                  y,
                                  z,
                                  self.sectionName,
                                  suffix=region,
                                  colorbarLabel=colorbarLabel,
                                  title=title,
                                  xlabel=xLabel,
                                  ylabel=yLabel,
                                  fileout=figureName,
                                  N=movingAveragePointsClimatological)

            caption = '{} Meridional Overturning Streamfunction'.format(region)
            write_image_xml(
                config=config,
                filePrefix=filePrefix,
                componentName='Ocean',
                componentSubdirectory='ocean',
                galleryGroup='Meridional Overturning Streamfunction',
                groupLink='moc',
                thumbnailDescription=region,
                imageDescription=caption,
                imageCaption=caption)  # }}}

        # Plot time series
        self.logger.info('   Plot time series of max Atlantic MOC at 26.5N...')
        xLabel = 'Time [years]'
        yLabel = '[Sv]'
        title = 'Max Atlantic MOC at $26.5\degree$N\n {}'.format(mainRunName)
        filePrefix = self.filePrefixes['timeSeries']

        figureName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

        if config.has_option(self.taskName, 'firstYearXTicks'):
            firstYearXTicks = config.getint(self.taskName, 'firstYearXTicks')
        else:
            firstYearXTicks = None

        if config.has_option(self.taskName, 'yearStrideXTicks'):
            yearStrideXTicks = config.getint(self.taskName, 'yearStrideXTicks')
        else:
            yearStrideXTicks = None

        fields = [dsMOCTimeSeries.mocAtlantic26]
        lineColors = ['k']
        lineWidths = [2]
        legendText = [mainRunName]

        if self.refConfig is not None:

            refDirectory = build_config_full_path(self.refConfig, 'output',
                                                  'timeseriesSubdirectory')

            refStartYear = self.refConfig.getint('timeSeries', 'startYear')
            refEndYear = self.refConfig.getint('timeSeries', 'endYear')
            refStartDate = '{:04d}-01-01_00:00:00'.format(refStartYear)
            refEndDate = '{:04d}-12-31_23:59:59'.format(refEndYear)

            refFileName = '{}/mocTimeSeries.nc'.format(refDirectory)
            self.logger.info('   Read in reference run MOC time series')
            dsRefMOC = open_mpas_dataset(fileName=refFileName,
                                         calendar=self.calendar,
                                         timeVariableNames=None,
                                         variableList=['mocAtlantic26'],
                                         startDate=refStartDate,
                                         endDate=refEndDate)
            fields.append(dsRefMOC.mocAtlantic26)
            lineColors.append('r')
            lineWidths.append(2)
            refRunName = self.refConfig.get('runs', 'mainRunName')
            legendText.append(refRunName)

        timeseries_analysis_plot(config,
                                 fields,
                                 movingAveragePoints,
                                 title,
                                 xLabel,
                                 yLabel,
                                 figureName,
                                 calendar=self.calendar,
                                 lineColors=lineColors,
                                 lineWidths=lineWidths,
                                 legendText=legendText,
                                 firstYearXTicks=firstYearXTicks,
                                 yearStrideXTicks=yearStrideXTicks)

        caption = u'Time Series of maximum Meridional Overturning ' \
                  u'Circulation at 26.5°N'
        write_image_xml(config=config,
                        filePrefix=filePrefix,
                        componentName='Ocean',
                        componentSubdirectory='ocean',
                        galleryGroup='Meridional Overturning Streamfunction',
                        groupLink='moc',
                        thumbnailDescription='Time Series',
                        imageDescription=caption,
                        imageCaption=caption)  # }}}
Ejemplo n.º 14
0
    if not os.path.exists(timeSeriesFile):
        print('\nComputing regional time series for year={}'.format(year))

        datasets = []
        for month in range(1, 13):
            inputFile = '{}/{}.mpaso.hist.am.timeSeriesStatsMonthly.{:04d}-{:02d}-01.nc'.format(
                modeldir, runName, year, month)
            #inputFile = '{}/mpaso.hist.am.timeSeriesStatsMonthly.{:04d}-{:02d}-01.nc'.format(
            #    modeldir, year, month)
            if not os.path.exists(inputFile):
                raise IOError('Input file: {} not found'.format(inputFile))

            dsTimeSlice = open_mpas_dataset(fileName=inputFile,
                                            calendar=calendar,
                                            variableList=variableList,
                                            startDate=startDate,
                                            endDate=endDate)
            datasets.append(dsTimeSlice)
        # combine data sets into a single data set
        dsIn = xarray.concat(datasets, 'Time')

        # Global depth-masked layer thickness and layer volume
        layerThickness = dsIn.timeMonthly_avg_layerThickness
        layerThickness = layerThickness.where(depthMask, drop=False)
        layerVol = areaCell * layerThickness

        datasets = []
        regionIndices = []
        for regionName in regionNames:
            print('    region: {}'.format(regionName))
Ejemplo n.º 15
0
    def run_task(self):  # {{{
        """
        Performs analysis of time series of sea-ice properties.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani

        self.logger.info("\nPlotting sea-ice area and volume time series...")

        config = self.config
        calendar = self.calendar

        sectionName = self.taskName

        plotTitles = {'iceArea': 'Sea-ice area',
                      'iceVolume': 'Sea-ice volume',
                      'iceThickness': 'Sea-ice mean thickness'}

        units = {'iceArea': '[km$^2$]',
                 'iceVolume': '[10$^3$ km$^3$]',
                 'iceThickness': '[m]'}

        obsFileNames = {
            'iceArea': {'NH': build_obs_path(
                config, 'seaIce',
                relativePathOption='areaNH',
                relativePathSection=sectionName),
                'SH': build_obs_path(
                config, 'seaIce',
                relativePathOption='areaSH',
                relativePathSection=sectionName)},
            'iceVolume': {'NH': build_obs_path(
                config, 'seaIce',
                relativePathOption='volNH',
                relativePathSection=sectionName),
                'SH': build_obs_path(
                config, 'seaIce',
                relativePathOption='volSH',
                relativePathSection=sectionName)}}

        # Some plotting rules
        titleFontSize = config.get('timeSeriesSeaIceAreaVol', 'titleFontSize')

        mainRunName = config.get('runs', 'mainRunName')
        preprocessedReferenceRunName = \
            config.get('runs', 'preprocessedReferenceRunName')
        preprocessedReferenceDirectory = \
            config.get('seaIcePreprocessedReference', 'baseDirectory')

        compareWithObservations = config.getboolean('timeSeriesSeaIceAreaVol',
                                                    'compareWithObservations')

        movingAveragePoints = config.getint('timeSeriesSeaIceAreaVol',
                                            'movingAveragePoints')

        polarPlot = config.getboolean('timeSeriesSeaIceAreaVol', 'polarPlot')

        outputDirectory = build_config_full_path(config, 'output',
                                                 'timeseriesSubdirectory')

        make_directories(outputDirectory)

        self.logger.info('  Load sea-ice data...')
        # Load mesh

        dsTimeSeries = self._compute_area_vol()

        yearStart = days_to_datetime(dsTimeSeries['NH'].Time.min(),
                                     calendar=calendar).year
        yearEnd = days_to_datetime(dsTimeSeries['NH'].Time.max(),
                                   calendar=calendar).year
        timeStart = date_to_days(year=yearStart, month=1, day=1,
                                 calendar=calendar)
        timeEnd = date_to_days(year=yearEnd, month=12, day=31,
                               calendar=calendar)

        if preprocessedReferenceRunName != 'None':
            # determine if we're beyond the end of the preprocessed data
            # (and go ahead and cache the data set while we're checking)
            outFolder = '{}/preprocessed'.format(outputDirectory)
            make_directories(outFolder)
            inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format(
                preprocessedReferenceDirectory, preprocessedReferenceRunName)
            outFileName = '{}/iceVolume.nc'.format(outFolder)

            combine_time_series_with_ncrcat(inFilesPreprocessed,
                                            outFileName,
                                            logger=self.logger)
            dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                               calendar=calendar,
                                               timeVariableNames='xtime')
            preprocessedYearEnd = days_to_datetime(dsPreprocessed.Time.max(),
                                                   calendar=calendar).year
            if yearStart <= preprocessedYearEnd:
                dsPreprocessedTimeSlice = \
                    dsPreprocessed.sel(Time=slice(timeStart, timeEnd))
            else:
                self.logger.warning('Preprocessed time series ends before the '
                                    'timeSeries startYear and will not be '
                                    'plotted.')
                preprocessedReferenceRunName = 'None'

        if self.controlConfig is not None:

            dsTimeSeriesRef = {}
            baseDirectory = build_config_full_path(
                self.controlConfig, 'output', 'timeSeriesSubdirectory')

            controlRunName = self.controlConfig.get('runs', 'mainRunName')

            for hemisphere in ['NH', 'SH']:
                inFileName = '{}/seaIceAreaVol{}.nc'.format(baseDirectory,
                                                            hemisphere)

                dsTimeSeriesRef[hemisphere] = xr.open_dataset(inFileName)

        norm = {'iceArea': 1e-6,  # m^2 to km^2
                'iceVolume': 1e-12,  # m^3 to 10^3 km^3
                'iceThickness': 1.}

        xLabel = 'Time [years]'

        galleryGroup = 'Time Series'
        groupLink = 'timeseries'

        obs = {}
        preprocessed = {}
        figureNameStd = {}
        figureNamePolar = {}
        title = {}
        plotVars = {}
        obsLegend = {}
        plotVarsRef = {}

        for hemisphere in ['NH', 'SH']:

            self.logger.info('  Make {} plots...'.format(hemisphere))

            for variableName in ['iceArea', 'iceVolume']:
                key = (hemisphere, variableName)

                # apply the norm to each variable
                plotVars[key] = (norm[variableName] *
                                 dsTimeSeries[hemisphere][variableName])

                if self.controlConfig is not None:
                    plotVarsRef[key] = norm[variableName] * \
                        dsTimeSeriesRef[hemisphere][variableName]

                prefix = '{}/{}{}_{}'.format(self.plotsDirectory,
                                             variableName,
                                             hemisphere,
                                             mainRunName)

                figureNameStd[key] = '{}.png'.format(prefix)
                figureNamePolar[key] = '{}_polar.png'.format(prefix)

                title[key] = '{} ({})'.format(plotTitles[variableName],
                                              hemisphere)

            if compareWithObservations:
                key = (hemisphere, 'iceArea')
                obsLegend[key] = 'SSM/I observations, annual cycle '
                if hemisphere == 'NH':
                    key = (hemisphere, 'iceVolume')
                    obsLegend[key] = 'PIOMAS, annual cycle (blue)'

            if preprocessedReferenceRunName != 'None':
                for variableName in ['iceArea', 'iceVolume']:
                    key = (hemisphere, variableName)

            if compareWithObservations:

                outFolder = '{}/obs'.format(outputDirectory)
                make_directories(outFolder)
                outFileName = '{}/iceArea{}.nc'.format(outFolder, hemisphere)

                combine_time_series_with_ncrcat(
                    obsFileNames['iceArea'][hemisphere],
                    outFileName, logger=self.logger)
                dsObs = open_mpas_dataset(fileName=outFileName,
                                          calendar=calendar,
                                          timeVariableNames='xtime')
                key = (hemisphere, 'iceArea')
                obs[key] = self._replicate_cycle(plotVars[key], dsObs.IceArea,
                                                 calendar)

                key = (hemisphere, 'iceVolume')
                if hemisphere == 'NH':
                    outFileName = '{}/iceVolume{}.nc'.format(outFolder,
                                                             hemisphere)
                    combine_time_series_with_ncrcat(
                        obsFileNames['iceVolume'][hemisphere],
                        outFileName, logger=self.logger)
                    dsObs = open_mpas_dataset(fileName=outFileName,
                                              calendar=calendar,
                                              timeVariableNames='xtime')
                    obs[key] = self._replicate_cycle(plotVars[key],
                                                     dsObs.IceVol,
                                                     calendar)
                else:
                    obs[key] = None

            if preprocessedReferenceRunName != 'None':
                outFolder = '{}/preprocessed'.format(outputDirectory)
                inFilesPreprocessed = '{}/icearea.{}.year*.nc'.format(
                    preprocessedReferenceDirectory,
                    preprocessedReferenceRunName)

                outFileName = '{}/iceArea.nc'.format(outFolder)

                combine_time_series_with_ncrcat(inFilesPreprocessed,
                                                outFileName,
                                                logger=self.logger)
                dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                                   calendar=calendar,
                                                   timeVariableNames='xtime')
                dsPreprocessedTimeSlice = dsPreprocessed.sel(
                    Time=slice(timeStart, timeEnd))
                key = (hemisphere, 'iceArea')
                preprocessed[key] = dsPreprocessedTimeSlice[
                    'icearea_{}'.format(hemisphere.lower())]

                inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format(
                    preprocessedReferenceDirectory,
                    preprocessedReferenceRunName)
                outFileName = '{}/iceVolume.nc'.format(outFolder)

                combine_time_series_with_ncrcat(inFilesPreprocessed,
                                                outFileName,
                                                logger=self.logger)
                dsPreprocessed = open_mpas_dataset(fileName=outFileName,
                                                   calendar=calendar,
                                                   timeVariableNames='xtime')
                dsPreprocessedTimeSlice = dsPreprocessed.sel(
                    Time=slice(timeStart, timeEnd))
                key = (hemisphere, 'iceVolume')
                preprocessed[key] = dsPreprocessedTimeSlice[
                    'icevolume_{}'.format(hemisphere.lower())]

            for variableName in ['iceArea', 'iceVolume']:
                key = (hemisphere, variableName)
                dsvalues = [plotVars[key]]
                legendText = [mainRunName]
                lineColors = ['k']
                lineWidths = [3]
                if compareWithObservations and key in obsLegend.keys():
                    dsvalues.append(obs[key])
                    legendText.append(obsLegend[key])
                    lineColors.append('b')
                    lineWidths.append(1.2)
                if preprocessedReferenceRunName != 'None':
                    dsvalues.append(preprocessed[key])
                    legendText.append(preprocessedReferenceRunName)
                    lineColors.append('purple')
                    lineWidths.append(1.2)

                if self.controlConfig is not None:
                    dsvalues.append(plotVarsRef[key])
                    legendText.append(controlRunName)
                    lineColors.append('r')
                    lineWidths.append(1.2)

                if config.has_option(sectionName, 'firstYearXTicks'):
                    firstYearXTicks = config.getint(sectionName,
                                                    'firstYearXTicks')
                else:
                    firstYearXTicks = None

                if config.has_option(sectionName, 'yearStrideXTicks'):
                    yearStrideXTicks = config.getint(sectionName,
                                                     'yearStrideXTicks')
                else:
                    yearStrideXTicks = None

                # separate plots for nothern and southern hemispheres
                timeseries_analysis_plot(config, dsvalues,
                                         movingAveragePoints,
                                         title[key], xLabel,
                                         units[variableName],
                                         calendar=calendar,
                                         lineColors=lineColors,
                                         lineWidths=lineWidths,
                                         legendText=legendText,
                                         titleFontSize=titleFontSize,
                                         firstYearXTicks=firstYearXTicks,
                                         yearStrideXTicks=yearStrideXTicks)

                savefig(figureNameStd[key])

                filePrefix = '{}{}_{}'.format(variableName,
                                              hemisphere,
                                              mainRunName)
                thumbnailDescription = '{} {}'.format(
                    hemisphere, plotTitles[variableName])
                caption = 'Running mean of {}'.format(
                    thumbnailDescription)
                write_image_xml(
                    config,
                    filePrefix,
                    componentName='Sea Ice',
                    componentSubdirectory='sea_ice',
                    galleryGroup=galleryGroup,
                    groupLink=groupLink,
                    thumbnailDescription=thumbnailDescription,
                    imageDescription=caption,
                    imageCaption=caption)

                if (polarPlot):
                    timeseries_analysis_plot_polar(
                        config,
                        dsvalues,
                        movingAveragePoints,
                        title[key],
                        lineColors=lineColors,
                        lineWidths=lineWidths,
                        legendText=legendText,
                        titleFontSize=titleFontSize)

                    savefig(figureNamePolar[key])

                    filePrefix = '{}{}_{}_polar'.format(variableName,
                                                        hemisphere,
                                                        mainRunName)
                    write_image_xml(
                        config,
                        filePrefix,
                        componentName='Sea Ice',
                        componentSubdirectory='sea_ice',
                        galleryGroup=galleryGroup,
                        groupLink=groupLink,
                        thumbnailDescription=thumbnailDescription,
                        imageDescription=caption,
                        imageCaption=caption)
Ejemplo n.º 16
0
    def run_task(self):  # {{{
        """
        Computes time-series of transport through transects.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Stephen Price

        self.logger.info("Computing time series of transport through "
                         "transects...")

        config = self.config

        startDate = '{:04d}-01-01_00:00:00'.format(self.startYear)
        endDate = '{:04d}-12-31_23:59:59'.format(self.endYear)

        outputDirectory = '{}/transport/'.format(
            build_config_full_path(config, 'output', 'timeseriesSubdirectory'))
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outFileName = '{}/transport_{:04d}-{:04d}.nc'.format(
            outputDirectory, self.startYear, self.endYear)

        inputFiles = sorted(
            self.historyStreams.readpath('timeSeriesStatsMonthlyOutput',
                                         startDate=startDate,
                                         endDate=endDate,
                                         calendar=self.calendar))

        years, months = get_files_year_month(inputFiles, self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        variableList = ['timeMonthly_avg_layerThickness']
        with open_mpas_dataset(fileName=inputFiles[0],
                               calendar=self.calendar,
                               startDate=startDate,
                               endDate=endDate) as dsIn:
            if 'timeMonthly_avg_normalTransportVelocity' in dsIn:
                variableList.append('timeMonthly_avg_normalTransportVelocity')
            elif 'timeMonthly_avg_normalGMBolusVelocity' in dsIn:
                variableList = variableList + \
                    ['timeMonthly_avg_normalVelocity',
                     'timeMonthly_avg_normalGMBolusVelocity']
            else:
                self.logger.warning('Cannot compute transport velocity. '
                                    'Using advection velocity.')
                variableList.append('timeMonthly_avg_normalVelocity')

        outputExists = os.path.exists(outFileName)
        outputValid = outputExists
        if outputExists:
            with open_mpas_dataset(fileName=outFileName,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=None,
                                   startDate=startDate,
                                   endDate=endDate) as dsOut:

                for inIndex in range(dsOut.dims['Time']):

                    mask = numpy.logical_and(
                        dsOut.year[inIndex].values == years,
                        dsOut.month[inIndex].values == months)
                    if numpy.count_nonzero(mask) == 0:
                        outputValid = False
                        break

        if outputValid:
            self.logger.info('  Time series exists -- Done.')
            return

        transectMaskFileName = self.masksSubtask.maskFileName

        dsTransectMask = xarray.open_dataset(transectMaskFileName)

        # figure out the indices of the transects to plot
        maskTransectNames = decode_strings(dsTransectMask.transectNames)

        dsMesh = xarray.open_dataset(self.restartFileName)
        dvEdge = dsMesh.dvEdge
        cellsOnEdge = dsMesh.cellsOnEdge - 1

        timeDatasets = []
        self.logger.info('  Computing transport...')
        for fileName in inputFiles:
            self.logger.info('    input file: {}'.format(fileName))
            dsTimeSlice = open_mpas_dataset(fileName=fileName,
                                            calendar=self.calendar,
                                            variableList=variableList,
                                            startDate=startDate,
                                            endDate=endDate)

            transectDatasets = []
            transectIndices = []
            for transect in self.transectsToPlot:
                self.logger.info('    transect: {}'.format(transect))
                try:
                    transectIndex = maskTransectNames.index(transect)
                except ValueError:
                    self.logger.warning('      Not found in masks. '
                                        'Skipping.')
                    continue
                transectIndices.append(transectIndex)

                # select the current transect
                dsMask = dsTransectMask.isel(nTransects=[transectIndex])
                edgeIndices = dsMask.transectEdgeGlobalIDs - 1
                edgeIndices = edgeIndices.where(edgeIndices >= 0,
                                                drop=True).astype(int)
                edgeSign = dsMask.transectEdgeMaskSigns.isel(
                    nEdges=edgeIndices)

                dsIn = dsTimeSlice.isel(nEdges=edgeIndices)

                dv = dvEdge.isel(nEdges=edgeIndices)
                coe = cellsOnEdge.isel(nEdges=edgeIndices)

                # work on data from simulations
                if 'timeMonthly_avg_normalTransportVelocity' in dsIn:
                    vel = dsIn.timeMonthly_avg_normalTransportVelocity
                elif 'timeMonthly_avg_normalGMBolusVelocity' in dsIn:
                    vel = (dsIn.timeMonthly_avg_normalVelocity +
                           dsIn.timeMonthly_avg_normalGMBolusVelocity)
                else:
                    vel = dsIn.timeMonthly_avg_normalVelocity

                # get layer thickness on edges by averaging adjacent cells
                h = 0.5 * dsIn.timeMonthly_avg_layerThickness.isel(
                    nCells=coe).sum(dim='TWO')

                edgeTransport = edgeSign * vel * h * dv

                # convert from m^3/s to Sv
                transport = (constants.m3ps_to_Sv * edgeTransport.sum(
                    dim=['maxEdgesInTransect', 'nVertLevels']))

                dsOut = xarray.Dataset()
                dsOut['transport'] = transport
                dsOut.transport.attrs['units'] = 'Sv'
                dsOut.transport.attrs['description'] = \
                    'Transport through transects'
                transectDatasets.append(dsOut)

            dsOut = xarray.concat(transectDatasets, 'nTransects')
            timeDatasets.append(dsOut)

        # combine data sets into a single data set
        dsOut = xarray.concat(timeDatasets, 'Time')
        dsOut.coords['transectNames'] = dsTransectMask.transectNames.isel(
            nTransects=transectIndices)
        dsOut.coords['year'] = (('Time'), years)
        dsOut['year'].attrs['units'] = 'years'
        dsOut.coords['month'] = (('Time'), months)
        dsOut['month'].attrs['units'] = 'months'
        write_netcdf(dsOut, outFileName)
Ejemplo n.º 17
0
    def run_task(self):  # {{{
        """
        Make the Hovmoller plot from the time series.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani, Greg Streletz

        self.logger.info("\nPlotting {} trends vs. depth...".format(
            self.fieldNameInTitle))

        config = self.config

        mainRunName = config.get('runs', 'mainRunName')

        plotTitles = config.getExpression('regions', 'plotTitles')
        allRegionNames = config.getExpression('regions', 'regions')
        regionIndex = allRegionNames.index(self.regionName)
        regionNameInTitle = plotTitles[regionIndex]

        startDate = self.config.get('timeSeries', 'startDate')
        endDate = self.config.get('timeSeries', 'endDate')

        # Load data
        self.logger.info('  Load ocean data...')
        ds = open_mpas_dataset(fileName=self.inFileName,
                               calendar=self.calendar,
                               variableList=[self.mpasFieldName],
                               timeVariableNames=None,
                               startDate=startDate,
                               endDate=endDate)
        ds = ds.isel(nOceanRegionsTmp=regionIndex)

        # Note: restart file, not a mesh file because we need refBottomDepth,
        # not in a mesh file
        try:
            restartFile = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS-O restart file found: need at least one '
                          'restart file for OHC calculation')

        # Define/read in general variables
        self.logger.info('  Read in depth...')
        with xr.open_dataset(restartFile) as dsRestart:
            # reference depth [m]
            depth = dsRestart.refBottomDepth.values

        Time = ds.Time.values
        field = ds[self.mpasFieldName].values.transpose()

        xLabel = 'Time [years]'
        yLabel = 'Depth [m]'

        title = '{}, {} \n {}'.format(self.fieldNameInTitle, regionNameInTitle,
                                      mainRunName)

        figureName = '{}/{}.png'.format(self.plotsDirectory, self.filePrefix)

        if config.has_option(self.sectionName, 'firstYearXTicks'):
            firstYearXTicks = config.getint(self.sectionName,
                                            'firstYearXTicks')
        else:
            firstYearXTicks = None

        if config.has_option(self.sectionName, 'yearStrideXTicks'):
            yearStrideXTicks = config.getint(self.sectionName,
                                             'yearStrideXTicks')
        else:
            yearStrideXTicks = None

        plot_vertical_section(config,
                              Time,
                              depth,
                              field,
                              self.sectionName,
                              suffix='',
                              colorbarLabel=self.unitsLabel,
                              title=title,
                              xlabel=xLabel,
                              ylabel=yLabel,
                              fileout=figureName,
                              linewidths=1,
                              xArrayIsTime=True,
                              calendar=self.calendar,
                              firstYearXTicks=firstYearXTicks,
                              yearStrideXTicks=yearStrideXTicks)

        write_image_xml(config=config,
                        filePrefix=self.filePrefix,
                        componentName='Ocean',
                        componentSubdirectory='ocean',
                        galleryGroup=self.galleryGroup,
                        groupLink=self.groupLink,
                        galleryName=self.galleryName,
                        thumbnailDescription='{} {}'.format(
                            self.regionName, self.thumbnailSuffix),
                        imageDescription=self.imageCaption,
                        imageCaption=self.imageCaption)
    def run_task(self):  # {{{
        """
        Compute vertical agregates of the data and plot the time series
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Milena Veneziani, Greg Streletz

        self.logger.info("\nPlotting depth-integrated time series of "
                         "{}...".format(self.fieldNameInTitle))

        config = self.config
        calendar = self.calendar

        mainRunName = config.get('runs', 'mainRunName')

        plotTitles = config.getExpression('regions', 'plotTitles')
        allRegionNames = config.getExpression('regions', 'regions')
        regionIndex = allRegionNames.index(self.regionName)
        regionNameInTitle = plotTitles[regionIndex]

        startDate = config.get('timeSeries', 'startDate')
        endDate = config.get('timeSeries', 'endDate')

        # Load data
        self.logger.info('  Load ocean data...')
        ds = open_mpas_dataset(fileName=self.inFileName,
                               calendar=calendar,
                               variableList=[self.mpasFieldName, 'depth'],
                               timeVariableNames=None,
                               startDate=startDate,
                               endDate=endDate)
        ds = ds.isel(nOceanRegionsTmp=regionIndex)

        depths = ds.depth.values

        divisionDepths = config.getExpression(self.sectionName, 'depths')

        # for each depth interval to plot, determine the top and bottom depth
        topDepths = [0, 0] + divisionDepths
        bottomDepths = [depths[-1]] + divisionDepths + [depths[-1]]

        legends = []
        for top, bottom in zip(topDepths, bottomDepths):
            if bottom == depths[-1]:
                legends.append('{}m-bottom'.format(top))
            else:
                legends.append('{}m-{}m'.format(top, bottom))

        # more possible symbols than we typically use
        lines = ['-', '-', '--', None, None, None, None]
        markers = [None, None, None, '+', 'o', '^', 'v']
        widths = [5, 3, 3, 3, 3, 3, 3]
        points = [None, None, None, 300, 300, 300, 300]

        color = 'k'

        xLabel = 'Time [years]'
        yLabel = self.yAxisLabel

        title = '{}, {} \n {} (black)'.format(self.fieldNameInTitle,
                                              regionNameInTitle, mainRunName)

        outFileName = '{}/{}.png'.format(self.plotsDirectory, self.filePrefix)

        timeSeries = []
        lineColors = []
        lineStyles = []
        lineMarkers = []
        lineWidths = []
        maxPoints = []
        legendText = []

        for rangeIndex in range(len(topDepths)):
            top = topDepths[rangeIndex]
            bottom = bottomDepths[rangeIndex]
            field = ds[self.mpasFieldName].where(ds.depth > top)
            field = field.where(ds.depth <= bottom)
            timeSeries.append(field.sum('nVertLevels'))

            lineColors.append(color)
            lineStyles.append(lines[rangeIndex])
            lineMarkers.append(markers[rangeIndex])
            lineWidths.append(widths[rangeIndex])
            maxPoints.append(points[rangeIndex])
            legendText.append(legends[rangeIndex])

        preprocessedReferenceRunName = config.get(
            'runs', 'preprocessedReferenceRunName')
        if preprocessedReferenceRunName != 'None':
            preprocessedInputDirectory = config.get(
                'oceanPreprocessedReference', 'baseDirectory')

            self.logger.info('  Load in preprocessed reference data...')
            preprocessedFilePrefix = config.get(self.sectionName,
                                                'preprocessedFilePrefix')
            inFilesPreprocessed = '{}/{}.{}.year*.nc'.format(
                preprocessedInputDirectory, preprocessedFilePrefix,
                preprocessedReferenceRunName)

            combine_time_series_with_ncrcat(
                inFilesPreprocessed,
                self.preprocessedIntermediateFileName,
                logger=self.logger)
            dsPreprocessed = open_mpas_dataset(
                fileName=self.preprocessedIntermediateFileName,
                calendar=calendar,
                timeVariableNames='xtime')

            yearStart = days_to_datetime(ds.Time.min(), calendar=calendar).year
            yearEnd = days_to_datetime(ds.Time.max(), calendar=calendar).year
            timeStart = date_to_days(year=yearStart,
                                     month=1,
                                     day=1,
                                     calendar=calendar)
            timeEnd = date_to_days(year=yearEnd,
                                   month=12,
                                   day=31,
                                   calendar=calendar)

            yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(),
                                                   calendar=calendar).year
            if yearStart <= yearEndPreprocessed:
                dsPreprocessed = dsPreprocessed.sel(
                    Time=slice(timeStart, timeEnd))
            else:
                self.logger.warning('Warning: Preprocessed time series ends '
                                    'before the timeSeries startYear and will '
                                    'not be plotted.')
                preprocessedReferenceRunName = 'None'

            # rolling mean seems to have trouble with dask data sets so we
            # write out the data set and read it back as a single-file data set
            # (without dask)
            dsPreprocessed = dsPreprocessed.drop('xtime')
            write_netcdf(dsPreprocessed, self.preprocessedFileName)
            dsPreprocessed = xarray.open_dataset(self.preprocessedFileName)

        if preprocessedReferenceRunName != 'None':
            color = 'purple'
            title = '{} \n {} (purple)'.format(title,
                                               preprocessedReferenceRunName)

            preprocessedFieldPrefix = config.get(self.sectionName,
                                                 'preprocessedFieldPrefix')

            movingAveragePoints = config.getint(self.sectionName,
                                                'movingAveragePoints')

            suffixes = ['tot'
                        ] + ['{}m'.format(depth)
                             for depth in divisionDepths] + ['btm']

            # these preprocessed data are already anomalies
            dsPreprocessed = compute_moving_avg(dsPreprocessed,
                                                movingAveragePoints)
            for rangeIndex in range(len(suffixes)):
                variableName = '{}_{}'.format(preprocessedFieldPrefix,
                                              suffixes[rangeIndex])
                if variableName in list(dsPreprocessed.data_vars.keys()):
                    timeSeries.append(dsPreprocessed[variableName])
                else:
                    self.logger.warning(
                        'Warning: Preprocessed variable {} '
                        'not found. Skipping.'.format(variableName))
                    timeSeries.extend(None)

                lineColors.append(color)
                lineStyles.append(lines[rangeIndex])
                lineMarkers.append(markers[rangeIndex])
                lineWidths.append(widths[rangeIndex])
                maxPoints.append(points[rangeIndex])
                legendText.append(None)

        if self.controlConfig is not None:

            controlRunName = self.controlConfig.get('runs', 'mainRunName')

            title = '{} \n {} (red)'.format(title, controlRunName)

            self.logger.info('  Load ocean data from control run...')
            controlStartYear = self.controlConfig.getint(
                'timeSeries', 'startYear')
            controlEndYear = self.controlConfig.getint('timeSeries', 'endYear')
            controlStartDate = '{:04d}-01-01_00:00:00'.format(controlStartYear)
            controlEndDate = '{:04d}-12-31_23:59:59'.format(controlEndYear)
            dsRef = open_mpas_dataset(
                fileName=self.refFileName,
                calendar=calendar,
                variableList=[self.mpasFieldName, 'depth'],
                timeVariableNames=None,
                startDate=controlStartDate,
                endDate=controlEndDate)
            dsRef = dsRef.isel(nOceanRegionsTmp=regionIndex)

            color = 'r'

            for rangeIndex in range(len(topDepths)):
                top = topDepths[rangeIndex]
                bottom = bottomDepths[rangeIndex]
                field = dsRef[self.mpasFieldName].where(dsRef.depth > top)
                field = field.where(dsRef.depth <= bottom)
                timeSeries.append(field.sum('nVertLevels'))

                lineColors.append(color)
                lineStyles.append(lines[rangeIndex])
                lineMarkers.append(markers[rangeIndex])
                lineWidths.append(widths[rangeIndex])
                maxPoints.append(points[rangeIndex])
                legendText.append(None)

        if config.has_option(self.taskName, 'firstYearXTicks'):
            firstYearXTicks = config.getint(self.taskName, 'firstYearXTicks')
        else:
            firstYearXTicks = None

        if config.has_option(self.taskName, 'yearStrideXTicks'):
            yearStrideXTicks = config.getint(self.taskName, 'yearStrideXTicks')
        else:
            yearStrideXTicks = None

        timeseries_analysis_plot(config=config,
                                 dsvalues=timeSeries,
                                 calendar=calendar,
                                 title=title,
                                 xlabel=xLabel,
                                 ylabel=yLabel,
                                 movingAveragePoints=None,
                                 lineColors=lineColors,
                                 lineStyles=lineStyles,
                                 markers=lineMarkers,
                                 lineWidths=lineWidths,
                                 legendText=legendText,
                                 maxPoints=maxPoints,
                                 firstYearXTicks=firstYearXTicks,
                                 yearStrideXTicks=yearStrideXTicks)

        savefig(outFileName)

        write_image_xml(config=config,
                        filePrefix=self.filePrefix,
                        componentName='Ocean',
                        componentSubdirectory='ocean',
                        galleryGroup=self.galleryGroup,
                        groupLink=self.groupLink,
                        gallery=self.galleryName,
                        thumbnailDescription='{} {}'.format(
                            self.regionName, self.thumbnailSuffix),
                        imageDescription=self.imageCaption,
                        imageCaption=self.imageCaption)
    def _compute_ice_shelf_fluxes(self):  # {{{
        """
        Reads melt flux time series and computes regional total melt flux and
        mean melt rate.
        """
        # Authors
        # -------
        # Xylar Asay-Davis, Stephen Price

        mpasTimeSeriesTask = self.mpasTimeSeriesTask
        config = self.config

        baseDirectory = build_config_full_path(config, 'output',
                                               'timeSeriesSubdirectory')

        outFileName = '{}/{}'.format(baseDirectory, self.outFileName)

        # Load data:
        inputFile = mpasTimeSeriesTask.outputFile
        dsIn = open_mpas_dataset(fileName=inputFile,
                                 calendar=self.calendar,
                                 variableList=self.variableList,
                                 startDate=self.startDate,
                                 endDate=self.endDate)
        try:
            if os.path.exists(outFileName):
                # The file already exists so load it
                dsOut = xarray.open_dataset(outFileName)
                if numpy.all(dsOut.Time.values == dsIn.Time.values):
                    return dsOut.totalMeltFlux, dsOut.meltRates
                else:
                    self.logger.warning('File {} is incomplete. Deleting '
                                        'it.'.format(outFileName))
                    os.remove(outFileName)
        except OSError:
            # something is potentailly wrong with the file, so let's delete
            # it and try again
            self.logger.warning('Problems reading file {}. Deleting '
                                'it.'.format(outFileName))
            os.remove(outFileName)

        # work on data from simulations
        freshwaterFlux = dsIn.timeMonthly_avg_landIceFreshwaterFlux

        restartFileName = \
            mpasTimeSeriesTask.runStreams.readpath('restart')[0]

        dsRestart = xarray.open_dataset(restartFileName)
        areaCell = dsRestart.landIceFraction.isel(Time=0) * dsRestart.areaCell

        mpasMeshName = config.get('input', 'mpasMeshName')
        regionMaskDirectory = config.get('regions', 'regionMaskDirectory')

        regionMaskFileName = '{}/{}_iceShelfMasks.nc'.format(
            regionMaskDirectory, mpasMeshName)

        dsRegionMask = xarray.open_dataset(regionMaskFileName)

        # select only those regions we want to plot
        dsRegionMask = dsRegionMask.isel(nRegions=self.regionIndices)
        cellMasks = dsRegionMask.regionCellMasks

        # convert from kg/s to kg/yr
        totalMeltFlux = constants.sec_per_year * \
            (cellMasks*areaCell*freshwaterFlux).sum(dim='nCells')

        totalArea = (cellMasks * areaCell).sum(dim='nCells')

        # from kg/m^2/yr to m/yr
        meltRates = (1. / constants.rho_fw) * (totalMeltFlux / totalArea)

        # convert from kg/yr to GT/yr
        totalMeltFlux /= constants.kg_per_GT

        baseDirectory = build_config_full_path(config, 'output',
                                               'timeSeriesSubdirectory')

        outFileName = '{}/iceShelfAggregatedFluxes.nc'.format(baseDirectory)

        dsOut = xarray.Dataset()
        dsOut['totalMeltFlux'] = totalMeltFlux
        dsOut.totalMeltFlux.attrs['units'] = 'GT a$^{-1}$'
        dsOut.totalMeltFlux.attrs['description'] = \
            'Total melt flux summed over each ice shelf or region'
        dsOut['meltRates'] = meltRates
        dsOut.meltRates.attrs['units'] = 'm a$^{-1}$'
        dsOut.meltRates.attrs['description'] = \
            'Melt rate averaged over each ice shelf or region'

        write_netcdf(dsOut, outFileName)

        return totalMeltFlux, meltRates  # }}}
    def run_task(self):  # {{{
        '''
        Compute the regional-mean time series
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        self.logger.info("\nCompute time series of regional means...")

        startDate = '{:04d}-01-01_00:00:00'.format(self.startYear)
        endDate = '{:04d}-12-31_23:59:59'.format(self.endYear)

        regionGroup = self.regionGroup
        sectionSuffix = regionGroup[0].upper() + \
            regionGroup[1:].replace(' ', '')
        timeSeriesName = sectionSuffix[0].lower() + sectionSuffix[1:]
        sectionName = 'timeSeries{}'.format(sectionSuffix)

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(config, 'output', 'timeseriesSubdirectory'),
            timeSeriesName)
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outFileName = '{}/{}_{:04d}-{:04d}.nc'.format(outputDirectory,
                                                      timeSeriesName,
                                                      self.startYear,
                                                      self.endYear)

        inputFiles = sorted(
            self.historyStreams.readpath('timeSeriesStatsMonthlyOutput',
                                         startDate=startDate,
                                         endDate=endDate,
                                         calendar=self.calendar))

        years, months = get_files_year_month(inputFiles, self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        variables = config.getExpression(sectionName, 'variables')

        variableList = [var['mpas'] for var in variables] + \
            ['timeMonthly_avg_layerThickness']

        outputExists = os.path.exists(outFileName)
        outputValid = outputExists
        if outputExists:
            with open_mpas_dataset(fileName=outFileName,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=None,
                                   startDate=startDate,
                                   endDate=endDate) as dsOut:

                for inIndex in range(dsOut.dims['Time']):

                    mask = numpy.logical_and(
                        dsOut.year[inIndex].values == years,
                        dsOut.month[inIndex].values == months)
                    if numpy.count_nonzero(mask) == 0:
                        outputValid = False
                        break

        if outputValid:
            self.logger.info('  Time series exists -- Done.')
            return

        # Load mesh related variables
        try:
            restartFileName = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS-O restart file found: need at least one '
                          'restart file for ocean region time series')

        cellsChunk = 32768
        timeChunk = 1

        datasets = []
        for timeIndex, fileName in enumerate(inputFiles):

            dsTimeSlice = open_mpas_dataset(fileName=fileName,
                                            calendar=self.calendar,
                                            variableList=variableList,
                                            startDate=startDate,
                                            endDate=endDate)
            datasets.append(dsTimeSlice)

        chunk = {'Time': timeChunk, 'nCells': cellsChunk}

        if config.has_option(sectionName, 'zmin'):
            config_zmin = config.getfloat(sectionName, 'zmin')
        else:
            config_zmin = None

        if config.has_option(sectionName, 'zmax'):
            config_zmax = config.getfloat(sectionName, 'zmax')
        else:
            config_zmax = None

        with dask.config.set(schedular='threads',
                             pool=ThreadPool(self.daskThreads)):
            # combine data sets into a single data set
            dsIn = xarray.concat(datasets, 'Time').chunk(chunk)

            chunk = {'nCells': cellsChunk}
            dsRestart = xarray.open_dataset(restartFileName)
            dsRestart = dsRestart.isel(Time=0).chunk(chunk)
            dsIn['areaCell'] = dsRestart.areaCell
            if 'landIceMask' in dsRestart:
                # only the region outside of ice-shelf cavities
                dsIn['openOceanMask'] = dsRestart.landIceMask == 0

            dsIn['zMid'] = compute_zmid(dsRestart.bottomDepth,
                                        dsRestart.maxLevelCell,
                                        dsRestart.layerThickness)

            regionMaskFileName = self.masksSubtask.maskFileName

            dsRegionMask = xarray.open_dataset(regionMaskFileName)

            maskRegionNames = decode_strings(dsRegionMask.regionNames)

            datasets = []
            regionIndices = []
            for regionName in self.regionNames:

                self.logger.info('    region: {}'.format(regionName))
                regionIndex = maskRegionNames.index(regionName)
                regionIndices.append(regionIndex)

                chunk = {'nCells': cellsChunk}
                dsMask = dsRegionMask.isel(nRegions=regionIndex).chunk(chunk)

                cellMask = dsMask.regionCellMasks == 1
                if 'openOceanMask' in dsIn:
                    cellMask = numpy.logical_and(cellMask, dsIn.openOceanMask)
                dsRegion = dsIn.where(cellMask, drop=True)

                totalArea = dsRegion['areaCell'].sum()
                self.logger.info('      totalArea: {} mil. km^2'.format(
                    1e-12 * totalArea.values))

                self.logger.info("Don't worry about the following dask "
                                 "warnings.")
                if config_zmin is None:
                    zmin = dsMask.zmin
                else:
                    zmin = config_zmin

                if config_zmax is None:
                    zmax = dsMask.zmax
                else:
                    zmax = config_zmax

                depthMask = numpy.logical_and(dsRegion.zMid >= zmin,
                                              dsRegion.zMid <= zmax)
                depthMask.compute()
                self.logger.info("Dask warnings should be done.")
                dsRegion['depthMask'] = depthMask

                layerThickness = dsRegion.timeMonthly_avg_layerThickness
                dsRegion['volCell'] = (dsRegion.areaCell *
                                       layerThickness).where(depthMask)
                totalVol = dsRegion.volCell.sum(dim='nVertLevels').sum(
                    dim='nCells')
                totalVol.compute()
                self.logger.info('      totalVol (mil. km^3): {}'.format(
                    1e-15 * totalVol.values))

                dsRegion = dsRegion.transpose('Time', 'nCells', 'nVertLevels')

                dsOut = xarray.Dataset()
                dsOut['totalVol'] = totalVol
                dsOut.totalVol.attrs['units'] = 'm^3'
                dsOut['totalArea'] = totalArea
                dsOut.totalArea.attrs['units'] = 'm^2'
                dsOut['zbounds'] = ('nbounds', [zmin, zmax])
                dsOut.zbounds.attrs['units'] = 'm'

                for var in variables:
                    outName = var['name']
                    self.logger.info('      {}'.format(outName))
                    mpasVarName = var['mpas']
                    timeSeries = dsRegion[mpasVarName]
                    units = timeSeries.units
                    description = timeSeries.long_name

                    is3d = 'nVertLevels' in timeSeries.dims
                    if is3d:
                        timeSeries = \
                            (dsRegion.volCell*timeSeries.where(depthMask)).sum(
                                dim='nVertLevels').sum(dim='nCells') / totalVol
                    else:
                        timeSeries = \
                            (dsRegion.areaCell*timeSeries).sum(
                                dim='nCells') / totalArea

                    timeSeries.compute()

                    dsOut[outName] = timeSeries
                    dsOut[outName].attrs['units'] = units
                    dsOut[outName].attrs['description'] = description
                    dsOut[outName].attrs['is3d'] = str(is3d)

                datasets.append(dsOut)

            # combine data sets into a single data set
            dsOut = xarray.concat(datasets, 'nRegions')

            dsOut.coords['regionNames'] = dsRegionMask.regionNames.isel(
                nRegions=regionIndices)
            dsOut.coords['year'] = (('Time'), years)
            dsOut['year'].attrs['units'] = 'years'
            dsOut.coords['month'] = (('Time'), months)
            dsOut['month'].attrs['units'] = 'months'

            write_netcdf(dsOut, outFileName)