Exemple #1
0
def main():
    input_file = sys.argv[1]
    window_size = int(sys.argv[2])

    model_name = input_file.split("/")[-2]
    output_folder = ("./../../../../output/TRENDY/model_evaluation/"
                     f"{model_name}/")

    assert input_file.endswith("year.nc")

    ds = xr.open_dataset(input_file)
    df = TRENDY_flux.ModelEvaluation(ds)
    """ Plots."""
    TS_OUTPUT = f"{output_folder}regress_timeseries_to_GCP"
    REGRESS_OUTPUT = (f"{output_folder}"
                      f"regress_cascading_window{window_size}"
                      f"_trend_to_GCP")
    TREND_OUTPUT = f"{output_folder}compare_trend_to_GCP"

    df.regress_timeseries_to_GCP("both")
    plt.savefig(TS_OUTPUT + f".png")

    plt.clf()
    linreg = df.regress_cascading_window_trend_to_GCP(window_size, "time",
                                                      "both")
    plt.savefig(REGRESS_OUTPUT + f"_time.png")
    pickle.dump(linreg, open(REGRESS_OUTPUT + f"_time.pik", "wb"))

    plt.clf()
    linreg = df.regress_cascading_window_trend_to_GCP(window_size, "CO2",
                                                      "both")
    plt.savefig(REGRESS_OUTPUT + f"_CO2.png")
    pickle.dump(linreg, open(REGRESS_OUTPUT + f"_CO2.pik", "wb"))

    plt.clf()
    results = df.compare_trend_to_GCP()
    plt.savefig(TREND_OUTPUT + f".png")
    pickle.dump(results, open(TREND_OUTPUT + f".pik", "wb"))
Exemple #2
0
""" IMPORTS """
import os
import sys
from core import TRENDY_flux as TRENDYf
from core import GCP_flux as GCPf

from importlib import reload
reload(TRENDYf)

import numpy as np
import xarray as xr
import pandas as pd
from scipy import stats
""" INPUTS """
MAIN_DIR = './../../../'
fname = (MAIN_DIR +
         "output/TRENDY/spatial/output_all/LPJ-GUESS_S3_nbp/year.nc")
""" EXECUTION """
ds = xr.open_dataset(fname)
df = TRENDYf.ModelEvaluation(ds)

df.regress_timeseries_to_GCP(plot="both")

df.regress_cascading_window_trend_to_GCP(25, plot=None, indep="time")

df.regress_cascading_window_trend_to_GCP(25, plot=None, indep="CO2")

df.compare_trend_to_GCP(True)

df.autocorrelation_plot('South_Land')
Exemple #3
0
"""
""" IMPORTS """
import xarray as xr
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt

import sys
from core import TRENDY_flux as TRENDYf

from importlib import reload

reload(TRENDYf)
""" INPUTS """
model = "CLASS-CTEM"
sim = "S1"
fname = f"./../../../data/TRENDY/models/{model}/{sim}/{model}_{sim}_nbp.nc"
""" FUNCTIONS """
""" EXECUTION """
df = TRENDYf.SpatialAgg(fname)

df.data

df.time_range(slice_obj=True)

res = df.regional_cut((30, 60), (100, 120), '1900', '1905')

df.latitudinal_splits(lat_split=40, start_time='1900', end_time='1908')

var = list(df.data.var())[0]
Exemple #4
0
""" Some TRENDY input datasets have cfdatetime as time.
This program contains the developments to convert them into np.datetime64 before
executing spatial output.
"""
""" IMPORTS """
import xarray as xr
import numpy as np
from datetime import datetime
import pandas as pd

import sys
from core import TRENDY_flux

from importlib import reload
reload(TRENDY_flux)
""" INPUTS """
model = "LPJ-GUESS"
sim = "S1"
fname = f"./../../../data/TRENDY/models/{model}/{sim}/{model}_{sim}_nbp.nc"
""" EXECUTION"""
df = xr.open_dataset(fname)
ds = TRENDY_flux.SpatialAgg(fname)

ds.latitudinal_splits()
Exemple #5
0
import xarray as xr
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd

import sys
from core import TRENDY_flux as TRENDYf

from importlib import reload
reload(TRENDYf)

fname = f"./../../../output/TRENDY/spatial/mean_all/S3/year.nc"

ds = xr.open_dataset(fname)

df = TRENDYf.Analysis(ds)

df.plot_timeseries("South_Land", slice("1770", "1792"))

df.cascading_window_trend(indep="CO2",
                          plot=True,
                          window_size=20,
                          include_pearson=False,
                          include_linreg=True)

df.psd("South_Land", fs=12, plot=True)

x = df.deseasonalise("Earth_Land")

plt.plot(df.data.Earth_Land.values)
plt.plot(x)
Exemple #6
0
from core import inv_flux as invf
from core import TEMP

from importlib import reload
reload(TEMP)
""" INPUTS """
TEMPdir = "./../../../data/temp/crudata/"
fCRUTEM = TEMPdir + "CRUTEM.4.6.0.0.anomalies.nc"
fHadSST = TEMPdir + "HadSST.3.1.1.0.median.nc"
fHadCRUT = TEMPdir + "HadCRUT.4.6.0.0.median.nc"
TEMPDF = TEMP.SpatialAve(fHadCRUT)

invfDF = invf.SpatialAgg(
    "./../../../data/inversions/fco2_Rayner-C13-2018_June2018-ext3_1992-2012_monthlymean_XYT.nc"
)
TRENDYfDF = TRENDYf.SpatialAgg(
    "./../../../data/TRENDY/models/CLASS-CTEM/S1/CLASS-CTEM_S1_nbp.nc")

fHadCRUT.split("/")[-1].split(".")[0]
""" DEVS """
dfHadSST = TEMP.SpatialAve(fHadSST)
dfHadCRUT = TEMP.SpatialAve(fHadCRUT)
dfCRUTEM = TEMP.SpatialAve(fCRUTEM)


def array_values(df, time="1959-12"):
    return df.data[df.var].sel(time=time).values.squeeze()


av_HadSST = array_values(dfHadSST)
av_HadCRUT = array_values(dfHadCRUT)
av_CRUTEM = array_values(dfCRUTEM)
Exemple #7
0
CLASSfname = './../../../data/TRENDY/models/CLASS-CTEM/S1/CLASS-CTEM_S1_nbp.nc'
JSBACHfname = './../../../data/TRENDY/models/JSBACH/S1/JSBACH_S1_nbp.nc'
OCNfname = './../../../data/TRENDY/models/OCN/S1/OCN_S1_nbp.nc'
LPJfname = './../../../data/TRENDY/models/LPJ-GUESS/S1/LPJ-GUESS_S1_nbp.nc'
CABLEfname = './../../../data/TRENDY/models/CABLE-POP/S1/CABLE-POP_S1_nbp.nc'
""" EXECUTION """
df_CLASS = xr.open_dataset(CLASSfname)
df_JSBACH = xr.open_dataset(JSBACHfname)
df_OCN = xr.open_dataset(OCNfname)

df_to_interp = df_CLASS
df_to_use = df_OCN

# Check that np arange arrays equal to OCN lat-lon arrays.
np.all(np.arange(-89.5, 90.5, 1) == df_to_use.latitude.values)
np.all(np.arange(-179.5, 180.5, 1) == df_to_use.longitude.values)

# Use the xr.DataArray.interp function as shown below.
interpdf = df_to_interp.nbp.interp(coords={
    'longitude': df_to_use.longitude,
    'latitude': df_to_use.latitude
})

# Sum of fluxes will be slightly different as a result of interpolation.
interpdf.sum()
""" TESTING """
# Test this fuctionality as implemented when a TRENDYf.SpatialAgg class is
# initialised.
Tdf = TRENDYf.SpatialAgg(CLASSfname)
Tdf.regional_cut(lats=(-30, 30), lons=(0, 40))