def run_task(self):  # {{{
        '''
        Compute climatologies of melt rates from E3SM/MPAS output

        This function has been overridden to compute  ``zMid`` based on data
        from a restart file for later use in vertically interpolating to
        reference depths.
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        # first, compute zMid and cell mask from the restart file
        with xr.open_dataset(self.restartFileName) as ds:
            ds = mpas_xarray.subset_variables(ds, ['maxLevelCell',
                                                   'bottomDepth',
                                                   'layerThickness'])
            ds = ds.isel(Time=0)

            self.maxLevelCell = ds.maxLevelCell - 1

            zMid = compute_zmid(ds.bottomDepth, ds.maxLevelCell,
                                ds.layerThickness)

            self.zMid = \
                xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevels'),
                                        'data': zMid})
            ds.close()

        # then, call run from the base class (RemapMpasClimatologySubtask),
        # which will perform the horizontal remapping
        super(ComputeTransectsSubtask, self).run_task()

        obsDatasets = self.obsDatasets.get_observations()

        self.logger.info('Interpolating each transect vertically...')
        # finally, vertically interpolate and write out each transect
        for season in self.seasons:

            remappedFileName = self.get_remapped_file_name(
                    season, comparisonGridName=self.transectCollectionName)

            with xr.open_dataset(remappedFileName) as ds:
                transectNames = list(obsDatasets.keys())
                for transectIndex, transectName in enumerate(transectNames):
                    self.logger.info('  {}'.format(transectName))
                    dsObs = obsDatasets[transectName]
                    outFileName = self.get_remapped_file_name(
                            season, comparisonGridName=transectName)
                    outObsFileName = self.obsDatasets.get_out_file_name(
                            transectName, self.verticalComparisonGridName)
                    self._vertical_interp(ds, transectIndex, dsObs,
                                          outFileName, outObsFileName)
                ds.close()

        for transectName in obsDatasets:
            obsDatasets[transectName].close()
    def _compute_ohc(self, climatology):  # {{{
        """
        Compute the OHC from the temperature and layer thicknesses in a given
        climatology data sets.
        """
        dsRestart = xarray.open_dataset(self.restartFileName)
        dsRestart = dsRestart.isel(Time=0)

        # specific heat [J/(kg*degC)]
        cp = self.namelist.getfloat('config_specific_heat_sea_water')
        # [kg/m3]
        rho = self.namelist.getfloat('config_density0')

        unitsScalefactor = 1e-9

        nVertLevels = dsRestart.sizes['nVertLevels']

        zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell,
                            dsRestart.layerThickness)

        vertIndex = xarray.DataArray.from_dict({
            'dims': ('nVertLevels', ),
            'data':
            numpy.arange(nVertLevels)
        })

        temperature = climatology['timeMonthly_avg_activeTracers_temperature']
        layerThickness = climatology['timeMonthly_avg_layerThickness']

        masks = [
            vertIndex < dsRestart.maxLevelCell, zMid <= self.minDepth,
            zMid >= self.maxDepth
        ]
        for mask in masks:
            temperature = temperature.where(mask)
            layerThickness = layerThickness.where(mask)

        ohc = unitsScalefactor * rho * cp * layerThickness * temperature
        ohc = ohc.sum(dim='nVertLevels')
        return ohc  # }}}
Beispiel #3
0
    def run_task(self):  # {{{
        """
        Compute climatologies of T or S  from ACME/MPAS output

        This function has been overridden to load ``maxLevelCell`` from a
        restart file for later use in indexing bottom T and S.
        ``verticalIndex`` is also computed for later indexing of
        the model level. It then simply calls the run function from
        ClimatologyMapOcean.
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        # first, load the land-ice mask from the restart file
        ds = xr.open_dataset(self.restartFileName)
        ds = ds[['maxLevelCell', 'bottomDepth', 'layerThickness']]
        ds = ds.isel(Time=0)

        self.maxLevelCell = ds.maxLevelCell - 1

        depthNames = [str(depth) for depth in self.depths]

        zMid = compute_zmid(ds.bottomDepth, ds.maxLevelCell,
                            ds.layerThickness)

        nVertLevels = zMid.shape[1]
        zMid.coords['verticalIndex'] = \
            ('nVertLevels',
             np.arange(nVertLevels))

        zTop = zMid.isel(nVertLevels=0)
        # Each vertical layer has at most one non-NaN value so the "sum"
        # over the vertical is used to collapse the array in the vertical
        # dimension
        zBot = zMid.where(zMid.verticalIndex == self.maxLevelCell).sum(
            dim='nVertLevels')

        verticalIndices = np.zeros((len(self.depths), ds.dims['nCells']), int)

        mask = np.zeros(verticalIndices.shape, bool)

        for depthIndex, depth in enumerate(self.depths):
            depth = self.depths[depthIndex]
            if depth == 'top':
                # switch to zero-based index
                verticalIndices[depthIndex, :] = 0
                mask[depthIndex, :] = self.maxLevelCell.values >= 0
            elif depth == 'bot':
                # switch to zero-based index
                verticalIndices[depthIndex, :] = self.maxLevelCell.values
                mask[depthIndex, :] = self.maxLevelCell.values >= 0
            else:

                verticalIndex = np.abs(zMid - depth).argmin(dim='nVertLevels')

                verticalIndices[depthIndex, :] = verticalIndex.values
                mask[depthIndex, :] = np.logical_and(depth <= zTop,
                                                     depth >= zBot).values

        self.verticalIndices = \
            xr.DataArray.from_dict({'dims': ('depthSlice', 'nCells'),
                                    'coords': {'depthSlice':
                                               {'dims': ('depthSlice',),
                                                'data': depthNames}},
                                    'data': verticalIndices})
        self.verticalIndexMask = \
            xr.DataArray.from_dict({'dims': ('depthSlice', 'nCells'),
                                    'coords': {'depthSlice':
                                               {'dims': ('depthSlice',),
                                                'data': depthNames}},
                                    'data': mask})

        # then, call run from the base class (RemapMpasClimatologySubtask),
        # which will perform the main function of the task
        super(RemapDepthSlicesSubtask, self).run_task()
    def _write_mpas_t_s(self, config):  # {{{

        climatologyName = 'TS_{}_{}'.format(self.prefix, self.season)
        outFileName = get_masked_mpas_climatology_file_name(config,
                                                            self.season,
                                                            self.componentName,
                                                            climatologyName,
                                                            op='avg')

        if os.path.exists(outFileName):
            ds = xarray.open_dataset(outFileName)
            zmin, zmax = ds.zbounds.values
            return zmin, zmax

        with dask.config.set(schedular='threads',
                             pool=ThreadPool(self.daskThreads)):

            self.logger.info('  Extracting T and S in the region...')

            sectionName = self.sectionName

            cellsChunk = 32768
            chunk = {'nCells': cellsChunk}

            try:
                restartFileName = self.runStreams.readpath('restart')[0]
            except ValueError:
                raise IOError('No MPAS-O restart file found: need at least one'
                              ' restart file to plot T-S diagrams')
            dsRestart = xarray.open_dataset(restartFileName)
            dsRestart = dsRestart.isel(Time=0).chunk(chunk)

            regionMaskFileName = self.mpasMasksSubtask.maskFileName

            dsRegionMask = xarray.open_dataset(regionMaskFileName)

            maskRegionNames = decode_strings(dsRegionMask.regionNames)
            regionIndex = maskRegionNames.index(self.regionName)

            dsMask = dsRegionMask.isel(nRegions=regionIndex).chunk(chunk)

            cellMask = dsMask.regionCellMasks == 1
            if 'landIceMask' in dsRestart:
                # only the region outside of ice-shelf cavities
                cellMask = numpy.logical_and(cellMask,
                                             dsRestart.landIceMask == 0)

            if config.has_option(sectionName, 'zmin'):
                zmin = config.getfloat(sectionName, 'zmin')
            else:
                if 'zminRegions' in dsMask:
                    zmin = dsMask.zminRegions.values
                else:
                    # the old naming convention, used in some pre-generated
                    # mask files
                    zmin = dsMask.zmin.values

            if config.has_option(sectionName, 'zmax'):
                zmax = config.getfloat(sectionName, 'zmax')
            else:
                if 'zmaxRegions' in dsMask:
                    zmax = dsMask.zmaxRegions.values
                else:
                    # the old naming convention, used in some pre-generated
                    # mask files
                    zmax = dsMask.zmax.values

            inFileName = get_unmasked_mpas_climatology_file_name(
                config, self.season, self.componentName, op='avg')

            ds = xarray.open_dataset(inFileName)

            variableList = [
                'timeMonthly_avg_activeTracers_temperature',
                'timeMonthly_avg_activeTracers_salinity',
                'timeMonthly_avg_layerThickness'
            ]
            ds = ds[variableList]

            ds['zMid'] = compute_zmid(dsRestart.bottomDepth,
                                      dsRestart.maxLevelCell,
                                      dsRestart.layerThickness)

            ds['volume'] = (dsRestart.areaCell *
                            ds['timeMonthly_avg_layerThickness'])

            ds = ds.where(cellMask, drop=True)

            self.logger.info("Don't worry about the following dask "
                             "warnings.")
            depthMask = numpy.logical_and(ds.zMid >= zmin, ds.zMid <= zmax)
            depthMask.compute()
            self.logger.info("Dask warnings should be done.")
            ds['depthMask'] = depthMask

            for var in variableList:
                ds[var] = ds[var].where(depthMask)

            T = ds['timeMonthly_avg_activeTracers_temperature'].values.ravel()
            mask = numpy.isfinite(T)
            T = T[mask]

            S = ds['timeMonthly_avg_activeTracers_salinity'].values.ravel()
            S = S[mask]

            zMid = ds['zMid'].values.ravel()[mask]

            volume = ds['volume'].values.ravel()[mask]

            dsOut = xarray.Dataset()
            dsOut['T'] = ('nPoints', T)
            dsOut['S'] = ('nPoints', S)
            dsOut['z'] = ('nPoints', zMid)
            dsOut['volume'] = ('nPoints', volume)
            dsOut['zbounds'] = ('nBounds', [zmin, zmax])
            write_netcdf(dsOut, outFileName)

        return zmin, zmax  # }}}
    def run_task(self):  # {{{
        """
        Compute the regional-mean time series
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        self.logger.info("\nCompute depth mask for regional means...")

        regionGroup = self.regionGroup
        sectionSuffix = regionGroup[0].upper() + \
            regionGroup[1:].replace(' ', '')
        timeSeriesName = sectionSuffix[0].lower() + sectionSuffix[1:]
        sectionName = 'timeSeries{}'.format(sectionSuffix)

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(config, 'output', 'timeseriesSubdirectory'),
            timeSeriesName)
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outFileName = '{}/depthMasks{}.nc'.format(outputDirectory,
                                                  timeSeriesName)

        if os.path.exists(outFileName):
            self.logger.info('  Mask file exists -- Done.')
            return

        # Load mesh related variables
        try:
            restartFileName = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS-O restart file found: need at least one '
                          'restart file for ocean region time series')

        if config.has_option(sectionName, 'zmin'):
            config_zmin = config.getfloat(sectionName, 'zmin')
        else:
            config_zmin = None

        if config.has_option(sectionName, 'zmax'):
            config_zmax = config.getfloat(sectionName, 'zmax')
        else:
            config_zmax = None

        dsRestart = xarray.open_dataset(restartFileName).isel(Time=0)
        zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell,
                            dsRestart.layerThickness)
        areaCell = dsRestart.areaCell
        if 'landIceMask' in dsRestart:
            # only the region outside of ice-shelf cavities
            openOceanMask = dsRestart.landIceMask == 0
        else:
            openOceanMask = None

        regionMaskFileName = self.masksSubtask.maskFileName
        dsRegionMask = xarray.open_dataset(regionMaskFileName)
        maskRegionNames = decode_strings(dsRegionMask.regionNames)

        regionIndices = []
        for regionName in self.regionNames:
            for index, otherName in enumerate(maskRegionNames):
                if regionName == otherName:
                    regionIndices.append(index)
                    break

        # select only those regions we want to plot
        dsRegionMask = dsRegionMask.isel(nRegions=regionIndices)

        nRegions = dsRegionMask.sizes['nRegions']

        datasets = []
        for regionIndex in range(nRegions):
            self.logger.info('    region: {}'.format(
                self.regionNames[regionIndex]))
            dsRregion = dsRegionMask.isel(nRegions=regionIndex)
            cellMask = dsRregion.regionCellMasks == 1

            if openOceanMask is not None:
                cellMask = numpy.logical_and(cellMask, openOceanMask)

            totalArea = areaCell.where(cellMask).sum()
            self.logger.info('      totalArea: {} mil. km^2'.format(
                1e-12 * totalArea.values))

            if config_zmin is None:
                if 'zminRegions' in dsRregion:
                    zmin = dsRregion.zminRegions
                else:
                    # the old naming convention, used in some pre-generated
                    # mask files
                    zmin = dsRregion.zmin
            else:
                zmin = (('nRegions', ), config_zmin)

            if config_zmax is None:
                if 'zmaxRegions' in dsRregion:
                    zmax = dsRregion.zmaxRegions
                else:
                    # the old naming convention, used in some pre-generated
                    # mask files
                    zmax = dsRregion.zmax
            else:
                zmax = (('nRegions', ), config_zmax)

            depthMask = numpy.logical_and(zMid >= zmin, zMid <= zmax)
            dsOut = xarray.Dataset()
            dsOut['zmin'] = zmin
            dsOut['zmax'] = zmax
            dsOut['totalArea'] = totalArea
            dsOut['cellMask'] = cellMask
            dsOut['depthMask'] = depthMask
            datasets.append(dsOut)

        dsOut = xarray.concat(objs=datasets, dim='nRegions')
        zbounds = numpy.zeros((nRegions, 2))
        zbounds[:, 0] = dsOut.zmin.values
        zbounds[:, 1] = dsOut.zmax.values
        dsOut['zbounds'] = (('nRegions', 'nbounds'), zbounds)
        dsOut['areaCell'] = areaCell
        dsOut['regionNames'] = dsRegionMask.regionNames
        write_netcdf(dsOut, outFileName)
    def run_task(self):  # {{{
        '''
        Compute the regional-mean time series
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        config = self.config

        self.logger.info("\nCompute time series of regional means...")

        startDate = '{:04d}-01-01_00:00:00'.format(self.startYear)
        endDate = '{:04d}-12-31_23:59:59'.format(self.endYear)

        regionGroup = self.regionGroup
        sectionSuffix = regionGroup[0].upper() + \
            regionGroup[1:].replace(' ', '')
        timeSeriesName = sectionSuffix[0].lower() + sectionSuffix[1:]
        sectionName = 'timeSeries{}'.format(sectionSuffix)

        outputDirectory = '{}/{}/'.format(
            build_config_full_path(config, 'output', 'timeseriesSubdirectory'),
            timeSeriesName)
        try:
            os.makedirs(outputDirectory)
        except OSError:
            pass

        outFileName = '{}/{}_{:04d}-{:04d}.nc'.format(outputDirectory,
                                                      timeSeriesName,
                                                      self.startYear,
                                                      self.endYear)

        inputFiles = sorted(
            self.historyStreams.readpath('timeSeriesStatsMonthlyOutput',
                                         startDate=startDate,
                                         endDate=endDate,
                                         calendar=self.calendar))

        years, months = get_files_year_month(inputFiles, self.historyStreams,
                                             'timeSeriesStatsMonthlyOutput')

        variables = config.getExpression(sectionName, 'variables')

        variableList = [var['mpas'] for var in variables] + \
            ['timeMonthly_avg_layerThickness']

        outputExists = os.path.exists(outFileName)
        outputValid = outputExists
        if outputExists:
            with open_mpas_dataset(fileName=outFileName,
                                   calendar=self.calendar,
                                   timeVariableNames=None,
                                   variableList=None,
                                   startDate=startDate,
                                   endDate=endDate) as dsOut:

                for inIndex in range(dsOut.dims['Time']):

                    mask = numpy.logical_and(
                        dsOut.year[inIndex].values == years,
                        dsOut.month[inIndex].values == months)
                    if numpy.count_nonzero(mask) == 0:
                        outputValid = False
                        break

        if outputValid:
            self.logger.info('  Time series exists -- Done.')
            return

        # Load mesh related variables
        try:
            restartFileName = self.runStreams.readpath('restart')[0]
        except ValueError:
            raise IOError('No MPAS-O restart file found: need at least one '
                          'restart file for ocean region time series')

        cellsChunk = 32768
        timeChunk = 1

        datasets = []
        for timeIndex, fileName in enumerate(inputFiles):

            dsTimeSlice = open_mpas_dataset(fileName=fileName,
                                            calendar=self.calendar,
                                            variableList=variableList,
                                            startDate=startDate,
                                            endDate=endDate)
            datasets.append(dsTimeSlice)

        chunk = {'Time': timeChunk, 'nCells': cellsChunk}

        if config.has_option(sectionName, 'zmin'):
            config_zmin = config.getfloat(sectionName, 'zmin')
        else:
            config_zmin = None

        if config.has_option(sectionName, 'zmax'):
            config_zmax = config.getfloat(sectionName, 'zmax')
        else:
            config_zmax = None

        with dask.config.set(schedular='threads',
                             pool=ThreadPool(self.daskThreads)):
            # combine data sets into a single data set
            dsIn = xarray.concat(datasets, 'Time').chunk(chunk)

            chunk = {'nCells': cellsChunk}
            dsRestart = xarray.open_dataset(restartFileName)
            dsRestart = dsRestart.isel(Time=0).chunk(chunk)
            dsIn['areaCell'] = dsRestart.areaCell
            if 'landIceMask' in dsRestart:
                # only the region outside of ice-shelf cavities
                dsIn['openOceanMask'] = dsRestart.landIceMask == 0

            dsIn['zMid'] = compute_zmid(dsRestart.bottomDepth,
                                        dsRestart.maxLevelCell,
                                        dsRestart.layerThickness)

            regionMaskFileName = self.masksSubtask.maskFileName

            dsRegionMask = xarray.open_dataset(regionMaskFileName)

            maskRegionNames = decode_strings(dsRegionMask.regionNames)

            datasets = []
            regionIndices = []
            for regionName in self.regionNames:

                self.logger.info('    region: {}'.format(regionName))
                regionIndex = maskRegionNames.index(regionName)
                regionIndices.append(regionIndex)

                chunk = {'nCells': cellsChunk}
                dsMask = dsRegionMask.isel(nRegions=regionIndex).chunk(chunk)

                cellMask = dsMask.regionCellMasks == 1
                if 'openOceanMask' in dsIn:
                    cellMask = numpy.logical_and(cellMask, dsIn.openOceanMask)
                dsRegion = dsIn.where(cellMask, drop=True)

                totalArea = dsRegion['areaCell'].sum()
                self.logger.info('      totalArea: {} mil. km^2'.format(
                    1e-12 * totalArea.values))

                self.logger.info("Don't worry about the following dask "
                                 "warnings.")
                if config_zmin is None:
                    zmin = dsMask.zmin
                else:
                    zmin = config_zmin

                if config_zmax is None:
                    zmax = dsMask.zmax
                else:
                    zmax = config_zmax

                depthMask = numpy.logical_and(dsRegion.zMid >= zmin,
                                              dsRegion.zMid <= zmax)
                depthMask.compute()
                self.logger.info("Dask warnings should be done.")
                dsRegion['depthMask'] = depthMask

                layerThickness = dsRegion.timeMonthly_avg_layerThickness
                dsRegion['volCell'] = (dsRegion.areaCell *
                                       layerThickness).where(depthMask)
                totalVol = dsRegion.volCell.sum(dim='nVertLevels').sum(
                    dim='nCells')
                totalVol.compute()
                self.logger.info('      totalVol (mil. km^3): {}'.format(
                    1e-15 * totalVol.values))

                dsRegion = dsRegion.transpose('Time', 'nCells', 'nVertLevels')

                dsOut = xarray.Dataset()
                dsOut['totalVol'] = totalVol
                dsOut.totalVol.attrs['units'] = 'm^3'
                dsOut['totalArea'] = totalArea
                dsOut.totalArea.attrs['units'] = 'm^2'
                dsOut['zbounds'] = ('nbounds', [zmin, zmax])
                dsOut.zbounds.attrs['units'] = 'm'

                for var in variables:
                    outName = var['name']
                    self.logger.info('      {}'.format(outName))
                    mpasVarName = var['mpas']
                    timeSeries = dsRegion[mpasVarName]
                    units = timeSeries.units
                    description = timeSeries.long_name

                    is3d = 'nVertLevels' in timeSeries.dims
                    if is3d:
                        timeSeries = \
                            (dsRegion.volCell*timeSeries.where(depthMask)).sum(
                                dim='nVertLevels').sum(dim='nCells') / totalVol
                    else:
                        timeSeries = \
                            (dsRegion.areaCell*timeSeries).sum(
                                dim='nCells') / totalArea

                    timeSeries.compute()

                    dsOut[outName] = timeSeries
                    dsOut[outName].attrs['units'] = units
                    dsOut[outName].attrs['description'] = description
                    dsOut[outName].attrs['is3d'] = str(is3d)

                datasets.append(dsOut)

            # combine data sets into a single data set
            dsOut = xarray.concat(datasets, 'nRegions')

            dsOut.coords['regionNames'] = dsRegionMask.regionNames.isel(
                nRegions=regionIndices)
            dsOut.coords['year'] = (('Time'), years)
            dsOut['year'].attrs['units'] = 'years'
            dsOut.coords['month'] = (('Time'), months)
            dsOut['month'].attrs['units'] = 'months'

            write_netcdf(dsOut, outFileName)
Beispiel #7
0
            inputFile = '{}/{}.mpaso.hist.am.timeSeriesStatsMonthly.{:04d}-{:02d}-01.nc'.format(
                rundir, runName, year, month)
            if not os.path.exists(inputFile):
                raise IOError('Input file: {} not found'.format(inputFile))

            dsTimeSlice = open_mpas_dataset(fileName=inputFile,
                                            calendar=calendar,
                                            variableList=variableList,
                                            startDate=startDate,
                                            endDate=endDate)
            datasets.append(dsTimeSlice)
        # combine data sets into a single data set
        dsIn = xarray.concat(datasets, 'Time')

        layerThickness = dsIn.timeMonthly_avg_layerThickness
        zMid = compute_zmid(refBottomDepth, maxLevelCell, layerThickness)

        # Compute regional averages one depth range at a time
        for k in range(len(zmins)):

            zmin = zmins[k]
            zmax = zmaxs[k]
            # Global depth-masked layer volume
            depthMask = np.logical_and(zMid >= zmin, zMid <= zmax)
            layerVol = areaCell * layerThickness.where(depthMask, drop=False)
            globalLayerVol = layerVol.sum(dim='nVertLevels').sum(dim='nCells')

            timeSeriesFile = '{}/{}_z{:04d}-{:04d}_year{:04d}.nc'.format(
                outdir, groupName, np.abs(np.int(zmax)), np.abs(np.int(zmin)),
                year)
Beispiel #8
0
    # or one element longer than colorbarLevels list
    raise ValueError('length mismatch between indices and ' 'colorbarLevels')
colormap = cols.ListedColormap(colormap0(colorIndices))
colormap.set_under(underColor)
colormap.set_over(overColor)
cnorm = cols.BoundaryNorm(clevels, colormap.N)

tic = time.perf_counter()

mesh = xr.open_dataset(meshfile)
lat = mesh.latCell.values
lon = mesh.lonCell.values
weights = np.cos(lat)
lat = np.rad2deg(lat)
lon = np.rad2deg(lon)
zMid = compute_zmid(mesh.bottomDepth, mesh.maxLevelCell,
                    mesh.layerThickness).squeeze()
depthMask = np.logical_and(zMid >= zmin, zMid <= zmax)
depthMask.compute()

ds = xr.open_mfdataset(infiles, combine='nested', concat_dim='Time')
ds['depthMask'] = depthMask
layerThickness = ds.timeMonthly_avg_layerThickness.where(depthMask)
layerThicknessSum = layerThickness.sum(dim='nVertLevels')
ntime = ds.dims['Time']

toc = time.perf_counter()
print('\nReading data done in {:0.4f} seconds'.format(toc - tic))

if plot_anomalies:
    figtitle0 = 'Anomaly'
else: