def run_task(self): # {{{ """ Plots time-series output of properties in an ocean region. """ # Authors # ------- # Xylar Asay-Davis self.logger.info("\nPlotting time series of ocean properties of {}" "...".format(self.regionName)) self.logger.info(' Load time series...') config = self.config calendar = self.calendar regionMaskSuffix = config.getExpression(self.sectionName, 'regionMaskSuffix') regionMaskFile = get_region_mask(config, '{}.geojson'.format(regionMaskSuffix)) fcAll = read_feature_collection(regionMaskFile) fc = FeatureCollection() for feature in fcAll.features: if feature['properties']['name'] == self.regionName: fc.add_feature(feature) break baseDirectory = build_config_full_path(config, 'output', 'timeSeriesSubdirectory') startYear = config.getint('timeSeries', 'startYear') endYear = config.getint('timeSeries', 'endYear') regionGroup = self.regionGroup timeSeriesName = regionGroup[0].lower() + \ regionGroup[1:].replace(' ', '') inFileName = '{}/{}/{}_{:04d}-{:04d}.nc'.format( baseDirectory, timeSeriesName, timeSeriesName, startYear, endYear) dsIn = xarray.open_dataset(inFileName).isel(nRegions=self.regionIndex) zbounds = dsIn.zbounds.values controlConfig = self.controlConfig plotControl = controlConfig is not None if plotControl: controlRunName = controlConfig.get('runs', 'mainRunName') baseDirectory = build_config_full_path(controlConfig, 'output', 'timeSeriesSubdirectory') startYear = controlConfig.getint('timeSeries', 'startYear') endYear = controlConfig.getint('timeSeries', 'endYear') inFileName = '{}/{}/{}_{:04d}-{:04d}.nc'.format( baseDirectory, timeSeriesName, timeSeriesName, startYear, endYear) dsRef = xarray.open_dataset(inFileName).isel( nRegions=self.regionIndex) zboundsRef = dsRef.zbounds.values mainRunName = config.get('runs', 'mainRunName') movingAverageMonths = 1 self.logger.info(' Make plots...') groupLink = self.regionGroup[0].lower() + \ self.regionGroup[1:].replace(' ', '') for var in self.variables: varName = var['name'] mainArray = dsIn[varName] is3d = mainArray.attrs['is3d'] == 'True' if is3d: title = 'Volume-Mean {} in {}'.format(var['title'], self.regionName) else: title = 'Area-Mean {} in {}'.format(var['title'], self.regionName) if plotControl: refArray = dsRef[varName] xLabel = 'Time (yr)' yLabel = '{} ({})'.format(var['title'], var['units']) filePrefix = '{}_{}'.format(self.prefix, varName) outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) fields = [mainArray] lineColors = ['k'] lineWidths = [2.5] legendText = [mainRunName] if plotControl: fields.append(refArray) lineColors.append('r') lineWidths.append(1.2) legendText.append(controlRunName) if is3d: if not plotControl or numpy.all(zbounds == zboundsRef): title = '{} ({} < z < {} m)'.format( title, zbounds[0], zbounds[1]) else: legendText[0] = '{} ({} < z < {} m)'.format( legendText[0], zbounds[0], zbounds[1]) legendText[1] = '{} ({} < z < {} m)'.format( legendText[1], zboundsRef[0], zboundsRef[1]) fig = timeseries_analysis_plot( config, fields, calendar=calendar, title=title, xlabel=xLabel, ylabel=yLabel, movingAveragePoints=movingAverageMonths, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText) # do this before the inset because otherwise it moves the inset # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() add_inset(fig, fc, width=2.0, height=2.0) savefig(outFileName, tight=False) caption = 'Regional mean of {}'.format(title) write_image_xml(config=config, filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', galleryGroup='{} Time Series'.format( self.regionGroup), groupLink=groupLink, gallery=var['title'], thumbnailDescription=self.regionName, imageDescription=caption, imageCaption=caption)
def run_task(self): # {{{ """ Plots time-series output of Antarctic sub-ice-shelf melt rates. """ # Authors # ------- # Xylar Asay-Davis, Stephen Price self.logger.info("\nPlotting Antarctic melt rate time series for " "{}...".format(self.iceShelf)) self.logger.info(' Load melt rate data...') config = self.config calendar = self.calendar iceShelfMasksFile = self.iceShelfMasksFile fcAll = read_feature_collection(iceShelfMasksFile) fc = FeatureCollection() for feature in fcAll.features: if feature['properties']['name'] == self.iceShelf: fc.add_feature(feature) break totalMeltFlux, meltRates = self._load_ice_shelf_fluxes(config) plotControl = self.controlConfig is not None if plotControl: controlRunName = self.controlConfig.get('runs', 'mainRunName') refTotalMeltFlux, refMeltRates = \ self._load_ice_shelf_fluxes(self.controlConfig) # Load observations from multiple files and put in dictionary based # on shelf keyname observationsDirectory = build_obs_path(config, 'ocean', 'meltSubdirectory') obsFileNameDict = {'Rignot et al. (2013)': 'Rignot_2013_melt_rates_20200623.csv', 'Rignot et al. (2013) SS': 'Rignot_2013_melt_rates_SS_20200623.csv'} obsDict = {} # dict for storing dict of obs data for obsName in obsFileNameDict: obsFileName = '{}/{}'.format(observationsDirectory, obsFileNameDict[obsName]) obsDict[obsName] = {} obsFile = csv.reader(open(obsFileName, 'rU')) next(obsFile, None) # skip the header line for line in obsFile: # some later useful values commented out shelfName = line[0] if shelfName != self.iceShelf: continue # surveyArea = line[1] meltFlux = float(line[2]) meltFluxUncertainty = float(line[3]) meltRate = float(line[4]) meltRateUncertainty = float(line[5]) # actualArea = float( line[6] ) # actual area here is in sq km # build dict of obs. keyed to filename description # (which will be used for plotting) obsDict[obsName] = { 'meltFlux': meltFlux, 'meltFluxUncertainty': meltFluxUncertainty, 'meltRate': meltRate, 'meltRateUncertainty': meltRateUncertainty} break # If areas from obs file used need to be converted from sq km to sq m mainRunName = config.get('runs', 'mainRunName') movingAverageMonths = config.getint('timeSeriesAntarcticMelt', 'movingAverageMonths') outputDirectory = build_config_full_path(config, 'output', 'timeseriesSubdirectory') make_directories(outputDirectory) self.logger.info(' Make plots...') # get obs melt flux and unc. for shelf (similar for rates) obsMeltFlux = [] obsMeltFluxUnc = [] obsMeltRate = [] obsMeltRateUnc = [] for obsName in obsDict: if len(obsDict[obsName]) > 0: obsMeltFlux.append( obsDict[obsName]['meltFlux']) obsMeltFluxUnc.append( obsDict[obsName]['meltFluxUncertainty']) obsMeltRate.append( obsDict[obsName]['meltRate']) obsMeltRateUnc.append( obsDict[obsName]['meltRateUncertainty']) else: # append NaN so this particular obs won't plot self.logger.warning('{} observations not available for ' '{}'.format(obsName, self.iceShelf)) obsMeltFlux.append(None) obsMeltFluxUnc.append(None) obsMeltRate.append(None) obsMeltRateUnc.append(None) title = self.iceShelf.replace('_', ' ') xLabel = 'Time (yr)' yLabel = 'Melt Flux (GT/yr)' timeSeries = totalMeltFlux.isel(nRegions=self.regionIndex) filePrefix = 'melt_flux_{}'.format(self.iceShelf.replace(' ', '_')) outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) fields = [timeSeries] lineColors = ['k'] lineWidths = [2.5] legendText = [mainRunName] if plotControl: fields.append(refTotalMeltFlux.isel(nRegions=self.regionIndex)) lineColors.append('r') lineWidths.append(1.2) legendText.append(controlRunName) fig = timeseries_analysis_plot(config, fields, calendar=calendar, title=title, xlabel=xLabel, ylabel=yLabel, movingAveragePoints=movingAverageMonths, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, obsMean=obsMeltFlux, obsUncertainty=obsMeltFluxUnc, obsLegend=list(obsDict.keys())) # do this before the inset because otherwise it moves the inset # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() add_inset(fig, fc, width=2.0, height=2.0) savefig(outFileName) caption = 'Running Mean of Total Melt Flux under Ice ' \ 'Shelves in the {} Region'.format(title) write_image_xml( config=config, filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', galleryGroup='Antarctic Melt Time Series', groupLink='antmelttime', gallery='Total Melt Flux', thumbnailDescription=title, imageDescription=caption, imageCaption=caption) xLabel = 'Time (yr)' yLabel = 'Melt Rate (m/yr)' timeSeries = meltRates.isel(nRegions=self.regionIndex) filePrefix = 'melt_rate_{}'.format(self.iceShelf.replace(' ', '_')) outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) fields = [timeSeries] lineColors = ['k'] lineWidths = [2.5] legendText = [mainRunName] if plotControl: fields.append(refMeltRates.isel(nRegions=self.regionIndex)) lineColors.append('r') lineWidths.append(1.2) legendText.append(controlRunName) if config.has_option(self.taskName, 'firstYearXTicks'): firstYearXTicks = config.getint(self.taskName, 'firstYearXTicks') else: firstYearXTicks = None if config.has_option(self.taskName, 'yearStrideXTicks'): yearStrideXTicks = config.getint(self.taskName, 'yearStrideXTicks') else: yearStrideXTicks = None fig = timeseries_analysis_plot(config, fields, calendar=calendar, title=title, xlabel=xLabel, ylabel=yLabel, movingAveragePoints=movingAverageMonths, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, obsMean=obsMeltRate, obsUncertainty=obsMeltRateUnc, obsLegend=list(obsDict.keys())) # do this before the inset because otherwise it moves the inset # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() add_inset(fig, fc, width=2.0, height=2.0) savefig(outFileName) caption = 'Running Mean of Area-averaged Melt Rate under Ice ' \ 'Shelves in the {} Region'.format(title) write_image_xml( config=config, filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', galleryGroup='Antarctic Melt Time Series', groupLink='antmelttime', gallery='Area-averaged Melt Rate', thumbnailDescription=title, imageDescription=caption, imageCaption=caption)
def run_task(self): # {{{ """ Compute vertical agregates of the data and plot the time series """ # Authors # ------- # Xylar Asay-Davis, Milena Veneziani, Greg Streletz self.logger.info("\nPlotting depth-integrated time series of " "{}...".format(self.fieldNameInTitle)) config = self.config calendar = self.calendar mainRunName = config.get('runs', 'mainRunName') plotTitles = config.getExpression('regions', 'plotTitles') allRegionNames = config.getExpression('regions', 'regions') regionIndex = allRegionNames.index(self.regionName) regionNameInTitle = plotTitles[regionIndex] startDate = config.get('timeSeries', 'startDate') endDate = config.get('timeSeries', 'endDate') # Load data self.logger.info(' Load ocean data...') ds = open_mpas_dataset(fileName=self.inFileName, calendar=calendar, variableList=[self.mpasFieldName, 'depth'], timeVariableNames=None, startDate=startDate, endDate=endDate) ds = ds.isel(nOceanRegionsTmp=regionIndex) depths = ds.depth.values divisionDepths = config.getExpression(self.sectionName, 'depths') # for each depth interval to plot, determine the top and bottom depth topDepths = [0, 0] + divisionDepths bottomDepths = [depths[-1]] + divisionDepths + [depths[-1]] legends = [] for top, bottom in zip(topDepths, bottomDepths): if bottom == depths[-1]: legends.append('{}m-bottom'.format(top)) else: legends.append('{}m-{}m'.format(top, bottom)) # more possible symbols than we typically use lines = ['-', '-', '--', None, None, None, None] markers = [None, None, None, '+', 'o', '^', 'v'] widths = [5, 3, 3, 3, 3, 3, 3] points = [None, None, None, 300, 300, 300, 300] color = 'k' xLabel = 'Time [years]' yLabel = self.yAxisLabel title = '{}, {} \n {} (black)'.format(self.fieldNameInTitle, regionNameInTitle, mainRunName) outFileName = '{}/{}.png'.format(self.plotsDirectory, self.filePrefix) timeSeries = [] lineColors = [] lineStyles = [] lineMarkers = [] lineWidths = [] maxPoints = [] legendText = [] for rangeIndex in range(len(topDepths)): top = topDepths[rangeIndex] bottom = bottomDepths[rangeIndex] field = ds[self.mpasFieldName].where(ds.depth > top) field = field.where(ds.depth <= bottom) timeSeries.append(field.sum('nVertLevels')) lineColors.append(color) lineStyles.append(lines[rangeIndex]) lineMarkers.append(markers[rangeIndex]) lineWidths.append(widths[rangeIndex]) maxPoints.append(points[rangeIndex]) legendText.append(legends[rangeIndex]) preprocessedReferenceRunName = config.get( 'runs', 'preprocessedReferenceRunName') if preprocessedReferenceRunName != 'None': preprocessedInputDirectory = config.get( 'oceanPreprocessedReference', 'baseDirectory') self.logger.info(' Load in preprocessed reference data...') preprocessedFilePrefix = config.get(self.sectionName, 'preprocessedFilePrefix') inFilesPreprocessed = '{}/{}.{}.year*.nc'.format( preprocessedInputDirectory, preprocessedFilePrefix, preprocessedReferenceRunName) combine_time_series_with_ncrcat( inFilesPreprocessed, self.preprocessedIntermediateFileName, logger=self.logger) dsPreprocessed = open_mpas_dataset( fileName=self.preprocessedIntermediateFileName, calendar=calendar, timeVariableNames='xtime') yearStart = days_to_datetime(ds.Time.min(), calendar=calendar).year yearEnd = days_to_datetime(ds.Time.max(), calendar=calendar).year timeStart = date_to_days(year=yearStart, month=1, day=1, calendar=calendar) timeEnd = date_to_days(year=yearEnd, month=12, day=31, calendar=calendar) yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(), calendar=calendar).year if yearStart <= yearEndPreprocessed: dsPreprocessed = dsPreprocessed.sel( Time=slice(timeStart, timeEnd)) else: self.logger.warning('Warning: Preprocessed time series ends ' 'before the timeSeries startYear and will ' 'not be plotted.') preprocessedReferenceRunName = 'None' # rolling mean seems to have trouble with dask data sets so we # write out the data set and read it back as a single-file data set # (without dask) dsPreprocessed = dsPreprocessed.drop('xtime') write_netcdf(dsPreprocessed, self.preprocessedFileName) dsPreprocessed = xarray.open_dataset(self.preprocessedFileName) if preprocessedReferenceRunName != 'None': color = 'purple' title = '{} \n {} (purple)'.format(title, preprocessedReferenceRunName) preprocessedFieldPrefix = config.get(self.sectionName, 'preprocessedFieldPrefix') movingAveragePoints = config.getint(self.sectionName, 'movingAveragePoints') suffixes = ['tot' ] + ['{}m'.format(depth) for depth in divisionDepths] + ['btm'] # these preprocessed data are already anomalies dsPreprocessed = compute_moving_avg(dsPreprocessed, movingAveragePoints) for rangeIndex in range(len(suffixes)): variableName = '{}_{}'.format(preprocessedFieldPrefix, suffixes[rangeIndex]) if variableName in list(dsPreprocessed.data_vars.keys()): timeSeries.append(dsPreprocessed[variableName]) else: self.logger.warning( 'Warning: Preprocessed variable {} ' 'not found. Skipping.'.format(variableName)) timeSeries.extend(None) lineColors.append(color) lineStyles.append(lines[rangeIndex]) lineMarkers.append(markers[rangeIndex]) lineWidths.append(widths[rangeIndex]) maxPoints.append(points[rangeIndex]) legendText.append(None) if self.controlConfig is not None: controlRunName = self.controlConfig.get('runs', 'mainRunName') title = '{} \n {} (red)'.format(title, controlRunName) self.logger.info(' Load ocean data from control run...') controlStartYear = self.controlConfig.getint( 'timeSeries', 'startYear') controlEndYear = self.controlConfig.getint('timeSeries', 'endYear') controlStartDate = '{:04d}-01-01_00:00:00'.format(controlStartYear) controlEndDate = '{:04d}-12-31_23:59:59'.format(controlEndYear) dsRef = open_mpas_dataset( fileName=self.refFileName, calendar=calendar, variableList=[self.mpasFieldName, 'depth'], timeVariableNames=None, startDate=controlStartDate, endDate=controlEndDate) dsRef = dsRef.isel(nOceanRegionsTmp=regionIndex) color = 'r' for rangeIndex in range(len(topDepths)): top = topDepths[rangeIndex] bottom = bottomDepths[rangeIndex] field = dsRef[self.mpasFieldName].where(dsRef.depth > top) field = field.where(dsRef.depth <= bottom) timeSeries.append(field.sum('nVertLevels')) lineColors.append(color) lineStyles.append(lines[rangeIndex]) lineMarkers.append(markers[rangeIndex]) lineWidths.append(widths[rangeIndex]) maxPoints.append(points[rangeIndex]) legendText.append(None) if config.has_option(self.taskName, 'firstYearXTicks'): firstYearXTicks = config.getint(self.taskName, 'firstYearXTicks') else: firstYearXTicks = None if config.has_option(self.taskName, 'yearStrideXTicks'): yearStrideXTicks = config.getint(self.taskName, 'yearStrideXTicks') else: yearStrideXTicks = None timeseries_analysis_plot(config=config, dsvalues=timeSeries, calendar=calendar, title=title, xlabel=xLabel, ylabel=yLabel, movingAveragePoints=None, lineColors=lineColors, lineStyles=lineStyles, markers=lineMarkers, lineWidths=lineWidths, legendText=legendText, maxPoints=maxPoints, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks) savefig(outFileName) write_image_xml(config=config, filePrefix=self.filePrefix, componentName='Ocean', componentSubdirectory='ocean', galleryGroup=self.galleryGroup, groupLink=self.groupLink, gallery=self.galleryName, thumbnailDescription='{} {}'.format( self.regionName, self.thumbnailSuffix), imageDescription=self.imageCaption, imageCaption=self.imageCaption)
def run_task(self): # {{{ """ Performs analysis of time series of sea-ice properties. """ # Authors # ------- # Xylar Asay-Davis, Milena Veneziani self.logger.info("\nPlotting sea-ice area and volume time series...") config = self.config calendar = self.calendar sectionName = self.taskName plotTitles = {'iceArea': 'Sea-ice area', 'iceVolume': 'Sea-ice volume', 'iceThickness': 'Sea-ice mean thickness'} units = {'iceArea': '[km$^2$]', 'iceVolume': '[10$^3$ km$^3$]', 'iceThickness': '[m]'} obsFileNames = { 'iceArea': {'NH': build_obs_path( config, 'seaIce', relativePathOption='areaNH', relativePathSection=sectionName), 'SH': build_obs_path( config, 'seaIce', relativePathOption='areaSH', relativePathSection=sectionName)}, 'iceVolume': {'NH': build_obs_path( config, 'seaIce', relativePathOption='volNH', relativePathSection=sectionName), 'SH': build_obs_path( config, 'seaIce', relativePathOption='volSH', relativePathSection=sectionName)}} # Some plotting rules titleFontSize = config.get('timeSeriesSeaIceAreaVol', 'titleFontSize') mainRunName = config.get('runs', 'mainRunName') preprocessedReferenceRunName = \ config.get('runs', 'preprocessedReferenceRunName') preprocessedReferenceDirectory = \ config.get('seaIcePreprocessedReference', 'baseDirectory') compareWithObservations = config.getboolean('timeSeriesSeaIceAreaVol', 'compareWithObservations') movingAveragePoints = config.getint('timeSeriesSeaIceAreaVol', 'movingAveragePoints') polarPlot = config.getboolean('timeSeriesSeaIceAreaVol', 'polarPlot') outputDirectory = build_config_full_path(config, 'output', 'timeseriesSubdirectory') make_directories(outputDirectory) self.logger.info(' Load sea-ice data...') # Load mesh dsTimeSeries = self._compute_area_vol() yearStart = days_to_datetime(dsTimeSeries['NH'].Time.min(), calendar=calendar).year yearEnd = days_to_datetime(dsTimeSeries['NH'].Time.max(), calendar=calendar).year timeStart = date_to_days(year=yearStart, month=1, day=1, calendar=calendar) timeEnd = date_to_days(year=yearEnd, month=12, day=31, calendar=calendar) if preprocessedReferenceRunName != 'None': # determine if we're beyond the end of the preprocessed data # (and go ahead and cache the data set while we're checking) outFolder = '{}/preprocessed'.format(outputDirectory) make_directories(outFolder) inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format( preprocessedReferenceDirectory, preprocessedReferenceRunName) outFileName = '{}/iceVolume.nc'.format(outFolder) combine_time_series_with_ncrcat(inFilesPreprocessed, outFileName, logger=self.logger) dsPreprocessed = open_mpas_dataset(fileName=outFileName, calendar=calendar, timeVariableNames='xtime') preprocessedYearEnd = days_to_datetime(dsPreprocessed.Time.max(), calendar=calendar).year if yearStart <= preprocessedYearEnd: dsPreprocessedTimeSlice = \ dsPreprocessed.sel(Time=slice(timeStart, timeEnd)) else: self.logger.warning('Preprocessed time series ends before the ' 'timeSeries startYear and will not be ' 'plotted.') preprocessedReferenceRunName = 'None' if self.controlConfig is not None: dsTimeSeriesRef = {} baseDirectory = build_config_full_path( self.controlConfig, 'output', 'timeSeriesSubdirectory') controlRunName = self.controlConfig.get('runs', 'mainRunName') for hemisphere in ['NH', 'SH']: inFileName = '{}/seaIceAreaVol{}.nc'.format(baseDirectory, hemisphere) dsTimeSeriesRef[hemisphere] = xr.open_dataset(inFileName) norm = {'iceArea': 1e-6, # m^2 to km^2 'iceVolume': 1e-12, # m^3 to 10^3 km^3 'iceThickness': 1.} xLabel = 'Time [years]' galleryGroup = 'Time Series' groupLink = 'timeseries' obs = {} preprocessed = {} figureNameStd = {} figureNamePolar = {} title = {} plotVars = {} obsLegend = {} plotVarsRef = {} for hemisphere in ['NH', 'SH']: self.logger.info(' Make {} plots...'.format(hemisphere)) for variableName in ['iceArea', 'iceVolume']: key = (hemisphere, variableName) # apply the norm to each variable plotVars[key] = (norm[variableName] * dsTimeSeries[hemisphere][variableName]) if self.controlConfig is not None: plotVarsRef[key] = norm[variableName] * \ dsTimeSeriesRef[hemisphere][variableName] prefix = '{}/{}{}_{}'.format(self.plotsDirectory, variableName, hemisphere, mainRunName) figureNameStd[key] = '{}.png'.format(prefix) figureNamePolar[key] = '{}_polar.png'.format(prefix) title[key] = '{} ({})'.format(plotTitles[variableName], hemisphere) if compareWithObservations: key = (hemisphere, 'iceArea') obsLegend[key] = 'SSM/I observations, annual cycle ' if hemisphere == 'NH': key = (hemisphere, 'iceVolume') obsLegend[key] = 'PIOMAS, annual cycle (blue)' if preprocessedReferenceRunName != 'None': for variableName in ['iceArea', 'iceVolume']: key = (hemisphere, variableName) if compareWithObservations: outFolder = '{}/obs'.format(outputDirectory) make_directories(outFolder) outFileName = '{}/iceArea{}.nc'.format(outFolder, hemisphere) combine_time_series_with_ncrcat( obsFileNames['iceArea'][hemisphere], outFileName, logger=self.logger) dsObs = open_mpas_dataset(fileName=outFileName, calendar=calendar, timeVariableNames='xtime') key = (hemisphere, 'iceArea') obs[key] = self._replicate_cycle(plotVars[key], dsObs.IceArea, calendar) key = (hemisphere, 'iceVolume') if hemisphere == 'NH': outFileName = '{}/iceVolume{}.nc'.format(outFolder, hemisphere) combine_time_series_with_ncrcat( obsFileNames['iceVolume'][hemisphere], outFileName, logger=self.logger) dsObs = open_mpas_dataset(fileName=outFileName, calendar=calendar, timeVariableNames='xtime') obs[key] = self._replicate_cycle(plotVars[key], dsObs.IceVol, calendar) else: obs[key] = None if preprocessedReferenceRunName != 'None': outFolder = '{}/preprocessed'.format(outputDirectory) inFilesPreprocessed = '{}/icearea.{}.year*.nc'.format( preprocessedReferenceDirectory, preprocessedReferenceRunName) outFileName = '{}/iceArea.nc'.format(outFolder) combine_time_series_with_ncrcat(inFilesPreprocessed, outFileName, logger=self.logger) dsPreprocessed = open_mpas_dataset(fileName=outFileName, calendar=calendar, timeVariableNames='xtime') dsPreprocessedTimeSlice = dsPreprocessed.sel( Time=slice(timeStart, timeEnd)) key = (hemisphere, 'iceArea') preprocessed[key] = dsPreprocessedTimeSlice[ 'icearea_{}'.format(hemisphere.lower())] inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format( preprocessedReferenceDirectory, preprocessedReferenceRunName) outFileName = '{}/iceVolume.nc'.format(outFolder) combine_time_series_with_ncrcat(inFilesPreprocessed, outFileName, logger=self.logger) dsPreprocessed = open_mpas_dataset(fileName=outFileName, calendar=calendar, timeVariableNames='xtime') dsPreprocessedTimeSlice = dsPreprocessed.sel( Time=slice(timeStart, timeEnd)) key = (hemisphere, 'iceVolume') preprocessed[key] = dsPreprocessedTimeSlice[ 'icevolume_{}'.format(hemisphere.lower())] for variableName in ['iceArea', 'iceVolume']: key = (hemisphere, variableName) dsvalues = [plotVars[key]] legendText = [mainRunName] lineColors = ['k'] lineWidths = [3] if compareWithObservations and key in obsLegend.keys(): dsvalues.append(obs[key]) legendText.append(obsLegend[key]) lineColors.append('b') lineWidths.append(1.2) if preprocessedReferenceRunName != 'None': dsvalues.append(preprocessed[key]) legendText.append(preprocessedReferenceRunName) lineColors.append('purple') lineWidths.append(1.2) if self.controlConfig is not None: dsvalues.append(plotVarsRef[key]) legendText.append(controlRunName) lineColors.append('r') lineWidths.append(1.2) if config.has_option(sectionName, 'firstYearXTicks'): firstYearXTicks = config.getint(sectionName, 'firstYearXTicks') else: firstYearXTicks = None if config.has_option(sectionName, 'yearStrideXTicks'): yearStrideXTicks = config.getint(sectionName, 'yearStrideXTicks') else: yearStrideXTicks = None # separate plots for nothern and southern hemispheres timeseries_analysis_plot(config, dsvalues, movingAveragePoints, title[key], xLabel, units[variableName], calendar=calendar, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, titleFontSize=titleFontSize, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks) savefig(figureNameStd[key]) filePrefix = '{}{}_{}'.format(variableName, hemisphere, mainRunName) thumbnailDescription = '{} {}'.format( hemisphere, plotTitles[variableName]) caption = 'Running mean of {}'.format( thumbnailDescription) write_image_xml( config, filePrefix, componentName='Sea Ice', componentSubdirectory='sea_ice', galleryGroup=galleryGroup, groupLink=groupLink, thumbnailDescription=thumbnailDescription, imageDescription=caption, imageCaption=caption) if (polarPlot): timeseries_analysis_plot_polar( config, dsvalues, movingAveragePoints, title[key], lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, titleFontSize=titleFontSize) savefig(figureNamePolar[key]) filePrefix = '{}{}_{}_polar'.format(variableName, hemisphere, mainRunName) write_image_xml( config, filePrefix, componentName='Sea Ice', componentSubdirectory='sea_ice', galleryGroup=galleryGroup, groupLink=groupLink, thumbnailDescription=thumbnailDescription, imageDescription=caption, imageCaption=caption)
def run_task(self): # {{{ """ Plots time-series output of transport through transects. """ # Authors # ------- # Xylar Asay-Davis, Stephen Price self.logger.info("\nPlotting time series of transport through " "{}...".format(self.transect)) self.logger.info(' Load transport data...') obsDict = { 'Drake Passage': [120, 175], 'Tasmania-Ant': [147, 167], 'Africa-Ant': None, 'Antilles Inflow': [-23.1, -13.7], 'Mona Passage': [-3.8, -1.4], 'Windward Passage': [-7.2, -6.8], 'Florida-Cuba': [30, 33], 'Florida-Bahamas': [30, 33], 'Indonesian Throughflow': [-21, -11], 'Agulhas': [-90, -50], 'Mozambique Channel': [-20, -8], 'Bering Strait': [0.6, 1.0], 'Lancaster Sound': [-1.0, -0.5], 'Fram Strait': [-4.7, 0.7], 'Davis Strait': [-1.6, -3.6], 'Barents Sea Opening': [1.4, 2.6], 'Nares Strait': [-1.8, 0.2] } config = self.config calendar = self.calendar fcAll = read_feature_collection(self.transportTransectFileName) fc = FeatureCollection() for feature in fcAll.features: if feature['properties']['name'] == self.transect: fc.add_feature(feature) break transport, trans_mean, trans_std = self._load_transport(config) if self.transect in obsDict: bounds = obsDict[self.transect] else: bounds = None plotControl = self.controlConfig is not None mainRunName = config.get('runs', 'mainRunName') movingAverageMonths = config.getint('timeSeriesTransport', 'movingAverageMonths') self.logger.info(' Plotting...') transectName = self.transect.replace('_', ' ') title = transectName thumbnailDescription = transectName xLabel = 'Time (yr)' yLabel = 'Transport (Sv)' filePrefix = 'transport_{}'.format(self.transect.replace(' ', '_')) outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) fields = [transport] lineColors = ['k'] lineWidths = [2.5] meanString = 'mean={:.2f} $\pm$ {:.2f}'.format(trans_mean, trans_std) if plotControl: controlRunName = self.controlConfig.get('runs', 'mainRunName') ref_transport, ref_mean, ref_std = \ self._load_transport(self.controlConfig) refMeanString = 'mean={:.2f} $\pm$ {:.2f}'.format( ref_mean, ref_std) fields.append(ref_transport) lineColors.append('r') lineWidths.append(1.2) legendText = [ '{} ({})'.format(mainRunName, meanString), '{} ({})'.format(controlRunName, refMeanString) ] else: legendText = [mainRunName] title = '{} ({})'.format(title, meanString) if config.has_option(self.taskName, 'firstYearXTicks'): firstYearXTicks = config.getint(self.taskName, 'firstYearXTicks') else: firstYearXTicks = None if config.has_option(self.taskName, 'yearStrideXTicks'): yearStrideXTicks = config.getint(self.taskName, 'yearStrideXTicks') else: yearStrideXTicks = None fig = timeseries_analysis_plot(config, fields, calendar=calendar, title=title, xlabel=xLabel, ylabel=yLabel, movingAveragePoints=movingAverageMonths, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks) if bounds is not None: t = transport.Time.values plt.gca().fill_between(t, bounds[0] * numpy.ones_like(t), bounds[1] * numpy.ones_like(t), alpha=0.3, label='observations') plt.legend(loc='lower left') # do this before the inset because otherwise it moves the inset # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() add_inset(fig, fc, width=2.0, height=2.0) savefig(outFileName) caption = 'Transport through the {} Transect'.format(transectName) write_image_xml(config=config, filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', galleryGroup='Transport Time Series', groupLink='transporttime', thumbnailDescription=thumbnailDescription, imageDescription=caption, imageCaption=caption)
def run_task(self): # {{{ """ Performs analysis of the time-series output of sea-surface temperature (SST). """ # Authors # ------- # Xylar Asay-Davis, Milena Veneziani self.logger.info("\nPlotting SST time series...") self.logger.info(' Load SST data...') config = self.config calendar = self.calendar mainRunName = config.get('runs', 'mainRunName') preprocessedReferenceRunName = \ config.get('runs', 'preprocessedReferenceRunName') preprocessedInputDirectory = config.get('oceanPreprocessedReference', 'baseDirectory') movingAveragePoints = config.getint('timeSeriesSST', 'movingAveragePoints') regions = config.getExpression('regions', 'regions') plotTitles = config.getExpression('regions', 'plotTitles') regionsToPlot = config.getExpression('timeSeriesSST', 'regions') regionIndicesToPlot = [ regions.index(region) for region in regionsToPlot ] outputDirectory = build_config_full_path(config, 'output', 'timeseriesSubdirectory') make_directories(outputDirectory) dsSST = open_mpas_dataset(fileName=self.inputFile, calendar=calendar, variableList=self.variableList, startDate=self.startDate, endDate=self.endDate) yearStart = days_to_datetime(dsSST.Time.min(), calendar=calendar).year yearEnd = days_to_datetime(dsSST.Time.max(), calendar=calendar).year timeStart = date_to_days(year=yearStart, month=1, day=1, calendar=calendar) timeEnd = date_to_days(year=yearEnd, month=12, day=31, calendar=calendar) if self.controlConfig is not None: baseDirectory = build_config_full_path(self.controlConfig, 'output', 'timeSeriesSubdirectory') controlFileName = '{}/{}.nc'.format( baseDirectory, self.mpasTimeSeriesTask.fullTaskName) controlStartYear = self.controlConfig.getint( 'timeSeries', 'startYear') controlEndYear = self.controlConfig.getint('timeSeries', 'endYear') controlStartDate = '{:04d}-01-01_00:00:00'.format(controlStartYear) controlEndDate = '{:04d}-12-31_23:59:59'.format(controlEndYear) dsRefSST = open_mpas_dataset(fileName=controlFileName, calendar=calendar, variableList=self.variableList, startDate=controlStartDate, endDate=controlEndDate) else: dsRefSST = None if preprocessedReferenceRunName != 'None': self.logger.info(' Load in SST for a preprocesses reference ' 'run...') inFilesPreprocessed = '{}/SST.{}.year*.nc'.format( preprocessedInputDirectory, preprocessedReferenceRunName) outFolder = '{}/preprocessed'.format(outputDirectory) make_directories(outFolder) outFileName = '{}/sst.nc'.format(outFolder) combine_time_series_with_ncrcat(inFilesPreprocessed, outFileName, logger=self.logger) dsPreprocessed = open_mpas_dataset(fileName=outFileName, calendar=calendar, timeVariableNames='xtime') yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(), calendar=calendar).year if yearStart <= yearEndPreprocessed: dsPreprocessedTimeSlice = \ dsPreprocessed.sel(Time=slice(timeStart, timeEnd)) else: self.logger.warning('Preprocessed time series ends before the ' 'timeSeries startYear and will not be ' 'plotted.') preprocessedReferenceRunName = 'None' self.logger.info(' Make plots...') for regionIndex in regionIndicesToPlot: region = regions[regionIndex] title = '{} SST'.format(plotTitles[regionIndex]) xLabel = 'Time [years]' yLabel = r'[$\degree$C]' varName = self.variableList[0] SST = dsSST[varName].isel(nOceanRegions=regionIndex) filePrefix = self.filePrefixes[region] outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) lineColors = ['k'] lineWidths = [3] fields = [SST] legendText = [mainRunName] if dsRefSST is not None: refSST = dsRefSST[varName].isel(nOceanRegions=regionIndex) fields.append(refSST) lineColors.append('r') lineWidths.append(1.5) controlRunName = self.controlConfig.get('runs', 'mainRunName') legendText.append(controlRunName) if preprocessedReferenceRunName != 'None': SST_v0 = dsPreprocessedTimeSlice.SST fields.append(SST_v0) lineColors.append('purple') lineWidths.append(1.5) legendText.append(preprocessedReferenceRunName) if config.has_option(self.taskName, 'firstYearXTicks'): firstYearXTicks = config.getint(self.taskName, 'firstYearXTicks') else: firstYearXTicks = None if config.has_option(self.taskName, 'yearStrideXTicks'): yearStrideXTicks = config.getint(self.taskName, 'yearStrideXTicks') else: yearStrideXTicks = None timeseries_analysis_plot(config, fields, movingAveragePoints, title, xLabel, yLabel, calendar=calendar, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks) savefig(outFileName) caption = 'Running Mean of {} Sea Surface Temperature'.format( region) write_image_xml(config=config, filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', galleryGroup='Time Series', groupLink='timeseries', thumbnailDescription='{} SST'.format(region), imageDescription=caption, imageCaption=caption)