i += 1

with open(f + '_time_aganst_temp_and_sd_onshore_1.csv', 'w',
          newline='') as a_file:
    writer = csv.writer(a_file)
    writer.writerow(["index", "time", "temp", "temp_sd", "difference"])
    i = 0
    while i < len(temp_sd):
        if (not np.isnan(temp[i])) and (i > 285):
            writer.writerow([i + 1, t[i], temp[i], temp_sd[i], diff_arr[i]])
        i += 1

t = [st.toordinal() for st in t]
t = np.array(t)
temp = np.array(temp)
mhws = detect(t, temp, climatologyPeriod=[1992, 2020], maxPadLength=4)

start = [date.fromordinal(int(st)) for st in mhws[0]['time_start']]
end = [date.fromordinal(int(st)) for st in mhws[0]['time_end']]
duration = mhws[0]['duration']
peak_time = [date.fromordinal(int(st)) for st in mhws[0]['time_peak']]
int_max = mhws[0]['intensity_max']
int_cum = mhws[0]['intensity_cumulative']
int_mean = mhws[0]['intensity_mean']

with open(f + '_analys_onshore_1.csv', 'w', newline='') as a_file:
    writer = csv.writer(a_file)

    writer.writerow([
        "index_number", "time_start", "time_end", "duration", "peak_time",
        "max_intensity", "mean_intensity", "cumulative_intensity"
## apply the MHW code
####################################
#time vector for the mhw function!!!
# needs to be generated wth datetime format!
    MinYear = 1982
    MaxYear =2018 # warninng, so far finishes in Jan!!
    NumYears = MaxYear-MinYear+1
    MaxNumLeapYear = NumYears//4 + 1 # use only the integer (+1 is used in 
                                     # case the first year is a leap year
    NumDays = 365*NumYears + MaxNumLeapYear
# warning finishes in Feb 2018 not Dec
    dtime = np.arange(date(MinYear,1,1).toordinal(),date(MaxYear,5,19).toordinal()+1)

    sst_mean_d = np.array(ds_oisst_d)
    dT = np.array(ds_hadisst_offset)
    mhws, clim = mhw.detect(dtime, sst_mean_d, climatologyPeriod=[1982,2005], \
                            alternateClimatology=[dtime, sst_mean_d - dT]) 

# time vector for plotting with mhw
    tim_vec_plot = pd.date_range('1982-01-01','2018-05-19',name='time',freq='D')

# SSTa mean from the daily data from 
    '''
    Ind = -146
    Ind_end = -29
    SSTa_area_mean = np.mean(sst_mean_d[Ind:Ind_end+1] - clim['seas'][Ind:Ind_end+1])
    print('SSTa area mean for the period starting from ' , tim_vec_plot[Ind], 'until' , tim_vec_plot[Ind_end], ': ')
    print('SSTa = ', SSTa_area_mean)
    '''
'''
extra info for indexes when using the file finishing in Feb 2018: 
1Nov2017: -120
예제 #3
0
def simulate(t, sst_obs, seas_obs, sst_trend_obs, N_ens, params=None):
    '''
    Fit AR1 model to sst time series  and simulate MHW property trends
    t is time vector, daily
    sst_trend_obs is trend in units of decade^-1
    N_ens is Number of ensembles, per trend value
    params=(a, sig_eps) specified AR1 model parameters, None by defaule
                        which forces the AR1 model to be fit to sst data
    '''

    # Variables for AR1 process (simulated SST)

    if params == None:
        a, tmp, sig_eps = ar1fit(signal.detrend(sst_obs - seas_obs))
    else:
        a = params[0]
        sig_eps = params[1]

    tau = -1 / np.log(a)
    var_eps = sig_eps**2

    var_sst = var_eps * a / (1 - a**2)  # AR1 process variance
    sig_sst = np.sqrt(var_sst)

    # Variables for large-ensemble experiment with multiple trend values

    keys = ['count', 'intensity_mean', 'duration', 'intensity_max_max']
    N_keys = len(keys)
    trends = {}
    means = {}
    for key in keys:
        trends[key] = np.zeros((N_ens))
        means[key] = np.zeros((N_ens))

    # Loop over trend values and ensemble members, save MHW property trends

    T = len(t)
    for i_ens in range(N_ens):
        # Initialize sst and noise variables
        #sst = np.zeros(T)
        #eps = sig_eps*np.random.randn(T)
        # Initial condition of sst is Normal random variable with mean 0, variance given by theoretical AR1 variance
        #sst[0] = sig_sst*np.random.randn(1)
        # Generate AR1 process
        #for tt in range(1,T):
        #    sst[tt] = a*sst[tt-1] + eps[tt]
        sst = tsa.arima_process.arma_generate_sample([1, -a], [1],
                                                     T,
                                                     sigma=sig_eps,
                                                     burnin=100)
        # Add trend
        sst = sst + sst_trend_obs * (t - t[0]) / 10. / 365.25
        # Apply Marine Heat Wave definition
        mhws, clim = mhw.detect(t, sst)
        mhwBlock = mhw.blockAverage(t, mhws)
        mean, trend, dtrend = meanTrend_TS(mhwBlock)  #mhw.meanTrend(mhwBlock)
        # Save trends
        for key in keys:
            trends[key][i_ens] = trend[key]
            means[key][i_ens] = mean[key]

    # Output results

    return tau, sig_eps, trends, means
                 (sst[exp][which_pre2100, j, ens] <
                  -0).sum() > 0):  # check for land, ice
     continue
 # MHW detection
 if (exp == 'histNat') + (exp == 'rcp45') + (exp
                                             == 'rcp85'):
     if ens > NENS[model]['hist'] - 1:
         ens_clim = NENS[model]['hist'] - 1
     else:
         ens_clim = ens
 else:
     ens_clim = ens
 mhws, clim = mhw.detect(t[model][exp][which_pre2100],
                         sst[exp][which_pre2100, j, ens],
                         climatologyPeriod=climPeriod,
                         alternateClimatology=[
                             t[model]['hist'],
                             sst['hist'][:, j, ens_clim]
                         ])
 mhwBlock = mhw.blockAverage(t[model][exp][which_pre2100],
                             mhws,
                             temp=sst[exp][which_pre2100, j,
                                           ens],
                             clim=clim)
 #else:
 #    mhws, clim = mhw.detect(t[model][exp], sst[exp][:,j,ens], climatologyPeriod=climPeriod)
 #    mhwBlock = mhw.blockAverage(t[model][exp], mhws, temp=sst[exp][:,j,ens], clim=clim)
 # Calculate new measure of annual duration, intensity
 mhwBlock['count_new'] = np.zeros(mhwBlock['count'].shape)
 mhwBlock['duration_new'] = np.zeros(
     mhwBlock['duration'].shape)
예제 #5
0
fileobj = Dataset(file0, mode='r')
lon = fileobj.variables['lon'][:].astype(float)
lat = fileobj.variables['lat'][:].astype(float)
fill_value = fileobj.variables['sst']._FillValue.astype(float)
scale = fileobj.variables['sst'].scale_factor.astype(float)
offset = fileobj.variables['sst'].add_offset.astype(float)
fileobj.close()

#
# Size of mhwBlock variable
#

matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                    str(300).zfill(4) + '.mat')
mhws, clim = mhw.detect(t, matobj['sst_ts'][300, :])
mhwBlock = mhw.blockAverage(t, mhws)
years = mhwBlock['years_centre']
NB = len(years)

#
# initialize some variables
#

pctile = 98  # 90 # Percentile for calculation of MHWs
alpha = 0.05
X = len(lon)
Y = len(lat)
#i_which = range(0,X,10)
#j_which = range(0,Y,10)
#i_which = range(0,X,4)
예제 #6
0
    sst = matobj['sst_ts'][j, :]

# For up-to-date point loc recent data
#t, dates, T, year, month, day, do = ecj.timevector([1982,1,1], [2015,6,7])
#matobj = io.loadmat(header + 'timeseries/sst_ts_NWAtl.mat')
# t, dates, T, year, month, day, do = ecj.timevector([1982,1,1], [2015,7,22])
# matobj = io.loadmat(header + 'timeseries/sst_ts_Med.mat')
# sst = np.NaN*np.zeros((N_locs,T))
# sst[0,:] = matobj['sst_ts'][0,:]

#
# Apply Marine Heat Wave definition
#

n = 0
mhws, clim = mhw.detect(t, sst, coldSpells=coldSpells)
mhwBlock = mhw.blockAverage(t, mhws, temp=sst)


def meanTrend_OLS(mhwBlock, alpha=0.05):
    # Initialize mean and trend dictionaries
    mean = {}
    trend = {}
    dtrend = {}
    #
    # Construct matrix of predictors, first column is all ones to estimate the mean,
    # second column is the time vector, equal to zero at mid-point.
    t = mhwBlock['years_centre']
    X = np.array([np.ones(t.shape), t - t.mean()]).T
    #
    # Loop over all keys in mhwBlock
예제 #7
0
    NumYears = MaxYear-MinYear+1
    MaxNumLeapYear = NumYears//4 + 1 # use only the integer (+1 is used in 
                                 # case the first year is a leap year
    NumDays = 365*NumYears + MaxNumLeapYear
    dtime = np.arange(date(MinYear,1,1).toordinal(),date(MaxYear,12,31).toordinal()+1)

    SST = np.empty((len(dtime)))
    SST.fill(np.nan)
    SST[0:len(sst_obs)] = sst_obs
    SST[len(sst_obs):len(sst_obs)+len(ds_timeserie)] = ds_timeserie

    t_read = time.time()
# mhw framework onto the daily averaged over the little region
# !! keepin mind not area average!! but the data points are on a regular 
# quarter degree grid! should be fine
    mhws, clim_h = mhw.detect(dtime, SST, climatologyPeriod=[1982,2005]) #, \
#                          coldSpells=False)
    elapsed_mhw = time.time() - t_read
    print('elapsed time for: ', elapsed_mhw)

#######
# plot
##########
# plot SSTa or daily time series
plt.figure(figsize=(13,13))
'''
ax = plt.subplot(211)
plt.plot(tim_vec,ssta_obs)
plt.title('SSTa TAS point time series -- 42S 148E')
plt.grid()
'''
예제 #8
0
    tim_mem_plot = pd.date_range('2007-01-01',
                                 '2010-11-22',
                                 name='time',
                                 freq='D')
else:
    dtime = np.arange(date(MinYear,1,1).toordinal(),date(MaxYear,11,22). \
                                        toordinal()+1)
    # time vector for plotting with mhw
    tim_vec_plot = pd.date_range('2003-01-01',
                                 '2010-11-22',
                                 name='time',
                                 freq='D')
    sst_mean_d = np.array(sst_aa)
if WHICH != 'all':
    sst_mean_d = np.array(sst_aa)
    mhws, clim = mhw.detect(dtime, sst_mean_d, climatologyPeriod=[2003, 2006])

####################################
# Plotting
####################################
plt.figure(figsize=(13, 12))

if WHICH != 'all':
    ax = plt.subplot(211)
    plt.plot(tim_vec_plot, clim['seas'], '0.5')
    plt.plot(tim_vec_plot, sst_mean_d, 'k')
    plt.plot(tim_vec_plot, clim['thresh'], 'b')
    plt.legend(['climatology', 'SST daily', 'threshold'])
    ax.set_xlim(['2003-01-01', '2010-12-31'])
    ax.set_ylim([10, 22])
    #ax.fill_between(tim_vec_plot, sst_mean_d, \
예제 #9
0
temp_CM = data['sst'].item()
t_CM = data['t'].item()
dates_CM = data['dates'].item()
lon_CM = data['lon'].item()
lat_CM = data['lat'].item()
depth_CM = data['depth'].item()

# Sites with long enough records

sites2016 = ['Cape_Peron', 'Schouten_Island', 'Coles_Bay', 'Swansea', 'Bicheno', 'Magistrates_Point', 'Wineglass_Bay', 'George_III_Reef', 'Iron_Pot', 'Mouldies_Hole', 'One_Tree_Point', 'Wedge_Island']
mhws_CM = {}
clim_CM = {}
for site in sites2016:
    temp_CM[site] = ecj.runavg(temp_CM[site], 3)
    missing = np.isnan(temp_CM[site])
    mhws_CM[site], clim_CM[site] = mhw.detect(t_CM[site], temp_CM[site], climatologyPeriod=[dates_CM[site][0].year+1,dates_CM[site][-1].year-1])
    temp_CM[site][missing] = np.nan

#
# Load MITS
#

# BGC data
file = header + 'IMOS_NRSMAI/IMOS_NRSMAI_BGC_daily.mat'
matobj = io.loadmat(file)
t_temp = matobj['t_d'].flatten()
dates_temp = matobj['dates_d']
depth_temp = np.nanmean(matobj['depth_d'], axis=0)
temp = matobj['temp_d']
salt = matobj['salt_d']
dox1 = matobj['dox1_d']
예제 #10
0
import netCDF4 as nc
import numpy as np
from datetime import date
import marineHeatWaves as mhw
t = np.arange(date(1982, 1, 1).toordinal(), date(2016, 12, 31).toordinal() + 1)
nc_obj = nc.Dataset("sst_1_2.nc")
sst_1_2 = nc_obj.variables['sst'][:]
mhws, clim = mhw.detect(t, sst_1_2, climatologyPeriod=[1982, 2005])

da = nc.Dataset("threshold_climatology", "w", format="NETCDF4")
da.createDimension("time", 12784)
da.createVariable("threshold", "f8", ("time"))
da.createVariable("climatology", "f8", ("time"))
da.variables["threshold"][:] = clim['thresh']
da.variables['climatology'][:] = clim['seas']
da.close()
예제 #11
0
    for j in range(LAT):
        if lat_data[j] - res_data / 2. < lat.min():
            j1 = 0
        else:
            j1 = np.where(lat > lat_data[j] - res_data / 2.)[0][0]
        if lat_data[j] + res_data / 2. > lat.max():
            j2 = len(lat) - 1
        else:
            j2 = np.where(lat > lat_data[j] + res_data / 2.)[0][0]
        sst = np.nanmean(np.nanmean(sst_ts[j1:j2 + 1, :, :], axis=2), axis=0)
        if np.logical_not(np.isfinite(sst.sum())) + (
            (sst < -1).sum() > 0):  # check for land, ice
            continue
#   Count number of MHWs of each length
        mhws, clim = mhw.detect(
            t[avhrr_match],
            sst,
            climatologyPeriod=[1983, np.min([dataEnd, 2012])])
        mhwBlock = mhw.blockAverage(t[avhrr_match], mhws)
        years = mhwBlock['years_centre']
        #   Skip proxy_fit calculation if not enough data
        if ((proxy['threshCount'][avhrr, j, i] > 0).sum() <= 5) + (
            (~np.isnan(proxy['maxAnom'][avhrr, j, i])).sum() <= 5) + (
                (~np.isnan(proxy['threshAnom'][avhrr, j, i])).sum() <= 5):
            continue
# Loop over all combinations of MHW properties and proxy keys, to build and test all models
        for key in MHW_keys:
            if (key == 'total_days') + (key == 'total_icum'):
                continue
            else:
                for pkey in proxy.keys():
                    rho[key][pkey][j, i], p[key][pkey][
# Analysis
#

# MHW approach

#mhws_obs, clim = mhw.detect(t_obs[year_obs<=2005], sst_obs[year_obs<=2005], climatologyPeriod=[1982,2005])
#mhws_obs_2016, clim = mhw.detect(t_obs, sst_obs, climatologyPeriod=[1982,2005])

climPeriod = [1911, 1940]
climPeriod = [1881, 1910]
#climPeriod = [1961, 1990]
#climPeriod = [1982, 2005]

# MHW detection, using (1911-1940)->(1982-2005) warming signal frmo HadISST (dt) to offset time series, and (1982-2005) as bsae period
dt = np.nanmean(sst_had[(year_had>=1982)*(year_had<=2005)]) - np.nanmean(sst_had[(year_had>=climPeriod[0])*(year_had<=climPeriod[1])])
mhws_obs, clim = mhw.detect(t_obs[year_obs<=2005], sst_obs[year_obs<=2005]+dt, climatologyPeriod=[1982,2005], alternateClimatology=[t_obs, sst_obs])
mhws_obs_2016, clim = mhw.detect(t_obs, sst_obs+dt, climatologyPeriod=[1982,2005], alternateClimatology=[t_obs, sst_obs])

## King et al. 2015 correction to histNat
#for model in models:
#    tt_climPeriod = (year['hist'][model]>=climPeriod[0]) * (year['hist'][model]<=climPeriod[1])
#    tt_histPeriod = (year['hist'][model]>=1911) * (year['hist'][model]<=1940)
#    dT = (np.nanmean(T_ts['hist'][model][tt_histPeriod, hist, :]) - np.nanmean(T_ts['hist'][model][tt_climPeriod, hist, :])) - (np.nanmean(T_ts['hist'][model][tt_histPeriod, histNat, :]) - np.nanmean(T_ts['hist'][model][tt_climPeriod, histNat, :]))
#    T_ts['hist'][model][:,histNat,:] = T_ts['hist'][model][:,histNat,:] + dT
#    print model, dT

per = 'hist'
mhws_hist_recent = {}
mhws_hist = {}
mhws_histNat = {}
clim_hist_recent = {}
예제 #13
0
     year[tt] = date.fromordinal(t[tt]).year
     month[tt] = date.fromordinal(t[tt]).month
     day[tt] = date.fromordinal(t[tt]).day
 if sw_write:
     outfile_train.write(
         str(year.min().astype(int)) + '-' + str(year.max().astype(int)) +
         ', ')
     outfile_valid.write(
         str(year.min().astype(int)) + '-' + str(year.max().astype(int)) +
         ', ')
 # Apply Marine Heat Wave definition
 print '\n' + station + '\n'
 print 'Proportion of valid values: ' + str(100. - 100. *
                                            np.isnan(sst).sum() / len(sst))
 mhws, clim = mhw.detect(t,
                         sst,
                         climatologyPeriod=climPeriod,
                         maxPadLength=maxPadLength)
 mhwBlock = mhw.blockAverage(t, mhws, clim, removeMissing=True, temp=sst)
 mean, trend, dtrend = mhw.meanTrend(mhwBlock)
 # Number of years with no MHW events pre and post 1950
 #count_pre50 = mhwBlock['count'][(mhwBlock['years_centre']<=1950)*(~np.isnan(mhwBlock['count']))]
 #count_post50 = mhwBlock['count'][(mhwBlock['years_centre']>1950)*(~np.isnan(mhwBlock['count']))]
 #print 'Pre-1950 0-count: ' + str(1.*np.sum(count_pre50==0)/len(count_pre50)) + ' post-1950 0-count: ' + str(1.*np.sum(count_post50==0)/len(count_post50)) + ' Difference: ' + str(1.*np.sum(count_post50==0)/len(count_post50) - 1.*np.sum(count_pre50==0)/len(count_pre50))
 years = mhwBlock['years_centre']
 #Ny = len(mhwBlock['years_centre'])/2
 ttPre = (years >= 1925) * (years <= 1954)
 ttPost = (years >= 1984) * (years <= 2013)
 count_preMid = ecj.nonans(mhwBlock['count'][ttPre])
 count_postMid = ecj.nonans(mhwBlock['count'][ttPost])
 print 'Pre-midpoint 0-count: ' + str(
     1. * np.sum(count_preMid == 0) /
import numpy as np
import pandas as pd
from datetime import date
import marineHeatWaves as mhw
import pickle as pkl

# load raw data
df = pd.read_csv('data.csv', index_col=0, parse_dates=[0])

# preprocess according to quality control flags
qc_codes = {0 :'0 Blank', 2 : '', 1 : '1 Good', 3: '3 Doubtful', 
            4: '4 Erroneous', 5: '5 Changes', 
            6: '6 Acceptable', 7: '7 Off position'}
df = df[df.Q_FLAG.isin( [1,3, 5, 6])]

# resample as daily average
df_daily = df.resample('D').mean()

# create arrays a list consistent with mhw package
dates = [tt.date() for tt in df_daily.index.to_pydatetime()]
t = np.array([tt.toordinal() for tt in dates])
sst = np.array(df_daily.SSTP)

# detect marine heat waves
mhws, clim = mhw.detect(t, sst)
                  
# save results
with open('mhws_data.pkl', 'wb') as f:
    pkl.dump([dates, t, sst, mhws, clim], f)
예제 #15
0
    i = np.where(lon > locations['lon'][n])[0][0]
    j = np.where(lat > locations['lat'][n])[0][0]
    matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                        str(i + 1).zfill(4) + '.mat')
    sst[site] = matobj['sst_ts'][j, :]

#site = 'E_Tas'
site = 'WA'
site = 'Med'
site = 'NW_Atl'
site = 'Blob'

t_obs, dates_obs, T_obs, year_obs, month_obs, day_obs, doy_obs = ecj.timevector(
    [1982, 1, 1], [2016, 12, 31])
sst_obs = sst[site]  #np.loadtxt('data/sst_' + site + '.csv', delimiter=',')
mhws_obs, clim_obs = mhw.detect(t_obs, sst_obs)
mhwBlock_obs = mhw.blockAverage(t_obs, mhws_obs)
mean_obs, trend_obs, dtrend_obs = meanTrend_TS(
    mhwBlock_obs)  #mhw.meanTrend(mhwBlock_obs)

# SST trend

years = mhwBlock_obs['years_centre']
SST_block = np.zeros(years.shape)
for yr in range(len(years)):
    SST_block[yr] = np.mean(sst_obs[year_obs == years[yr]])
X = np.array([np.ones(years.shape), years - years.mean()]).T
beta = linalg.lstsq(X, SST_block)[0]
sst_trend_obs = beta[1] * 10

# Loop over SST trend values and simulate MHW property trends
예제 #16
0
fileobj = Dataset(file0, mode='r')
lon = fileobj.variables['lon'][:].astype(float)
lat = fileobj.variables['lat'][:].astype(float)
fill_value = fileobj.variables['sst']._FillValue.astype(float)
scale = fileobj.variables['sst'].scale_factor.astype(float)
offset = fileobj.variables['sst'].add_offset.astype(float)
fileobj.close()

#
# Size of mhwBlock variable
#

matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                    str(300).zfill(4) + '.mat')
mhws, clim = mhw.detect(t, matobj['sst_ts'][300, tt_1982_2005])
mhwBlock = mhw.blockAverage(t, mhws)
years = mhwBlock['years_centre']
NB = len(years)

#
# initialize some variables
#

pctile = 90  # Percentile for calculation of MHWs
X = len(lon)
Y = len(lat)
i_which = range(0, X)
j_which = range(0, Y)
DIM = (len(j_which), len(i_which))
SST_mean = np.NaN * np.zeros(DIM)
fileobj = Dataset(file0, 'r')
lon = fileobj.variables['lon'][:].astype(float)
lat = fileobj.variables['lat'][:].astype(float)
fill_value = fileobj.variables['sst']._FillValue.astype(float)
scale = fileobj.variables['sst'].scale_factor.astype(float)
offset = fileobj.variables['sst'].add_offset.astype(float)
fileobj.close()

#
# Size of mhwBlock variable
#

matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                    str(300).zfill(4) + '.mat')
mhws, clim = mhw.detect(t, matobj['sst_ts'][300, :])
mhwBlock = mhw.blockAverage(t, mhws)
years = mhwBlock['years_centre']
NB = len(years)

#
# initialize some variables
#

X = len(lon)
Y = len(lat)
i_which = range(0, X)  #,10)
j_which = range(0, Y)  #,10)
#i_which = range(0,X,4)
#j_which = range(0,Y,4)
DIM = (len(j_which), len(i_which))