def write_mhw_analyse_csv(mhws,file_name,location_type="onshore"):

    '''
    This function will create a csv file that contains some characteristics of marine heatwaves.

    Inputs:
    mhws        This is the output of detect funtion. 
                It contains all the characteristics of marine heatwaves in the box from 1993 to 2019.
    file_name   This is the name of the netCDF file.
    
    Options:
    location_type   This should be "onshore" or "offshore". It represents the location of the box.
                    The default value if "onshore".
    '''

    category = mhws['category']

    # change the intensity category into the corresponding intensity levels
    #1=strong, 2=moderate, 3=severe, 4=extreme
    intensity_level = []
    for ca in category:
        if ca == 'Strong':
            intensity_level.append(1)
        elif ca == 'Moderate':
            intensity_level.append(2)
        elif ca == 'Severe':
            intensity_level.append(3)
        elif ca == 'Extreme':
            intensity_level.append(4)
        else:
            intensity_level.append('ERROR')

    # variables(characteristics of Marine Heatwaves) in the csv file       
    start = [date.fromordinal(int(st)) for st in mhws['time_start']] # date format
    end = [date.fromordinal(int(st)) for st in mhws['time_end']]
    duration = mhws['duration']
    peak_time = [date.fromordinal(int(st)) for st in mhws['time_peak']]
    int_max = mhws['intensity_max']
    int_cum = mhws['intensity_cumulative']
    int_mean = mhws['intensity_mean']

    f = file_name+"_"+location_type
    with open(f+'_analys.csv','w',newline = '') as a_file:
        writer = csv.writer(a_file)
        writer.writerow(["index_number","time_start","time_end","duration","peak_time","max_intensity","mean_intensity","cumulative_intensity","intensity_level","intensity_category"])
        i = 0
        while i < len(start):
            writer.writerow([i+1,start[i],end[i],duration[i],peak_time[i],int_max[i],int_mean[i],int_cum[i],intensity_level[i],category[i]])
            i += 1


    mhwBlock = mhw.blockAverage(t, mhws)
    mhwBlock_3y = mhw.blockAverage(t, mhws,blockLength=3)
    mhwBlock_9y = mhw.blockAverage(t, mhws,blockLength=9)
def write_mhwBlock_csv(t,mhws,block_length,file_name,location_type="onshore"):

    '''
    This function creates a csv file that contains the characteristics of all marine heatwave in a time block.

    Inputs:
    t               The time vector.
    mhws            This is the output of detect funtion. 
                    It contains all the characteristics of marine heatwaves in the box from 1993 to 2019.
    block_length    The length of time blocks in year.
    file_name       The name of the netCDF file.

    Options:
    location_type   This should be "onshore" or "offshore". It represents the location of the box.
                    The default value is "onshore"
    '''

    mhwBlock = mhw.blockAverage(t, mhws,blockLength=block_length)
    # mhwBlock
    start_year = mhwBlock['years_start'] #          Start year blocks (inclusive)
    end_year = mhwBlock['years_end'] #              End year of blocks (inclusive)
    centre_year = mhwBlock['years_centre'] #        Decimal year at centre of blocks
    frequency = mhwBlock['count']#                  Total MHW count in each block
    duration = mhwBlock['duration']#                Average MHW duration in each block [days]
    max_intensity = mhwBlock['intensity_max']#      Average MHW "maximum (peak) intensity" in each block [deg. C]
    max_max_intensity = mhwBlock['intensity_max_max']#  Maximum MHW "maximum (peak) intensity" in each block [deg. C]
    mean_intensity = mhwBlock['intensity_mean']       #Average MHW "mean intensity" in each block [deg. C]
    intensity_var = mhwBlock['intensity_var']        #Average MHW "intensity variability" in each block [deg. C]
    cumulative_intensity = mhwBlock['intensity_cumulative'] #Average MHW "cumulative intensity" in each block [deg. C x days]
    rate_onset = mhwBlock['rate_onset']           #Average MHW onset rate in each block [deg. C / days]
    rate_decline = mhwBlock['rate_decline']         #Average MHW decline rate in each block [deg. C / days]
    mhw_days = mhwBlock['total_days']           #Total number of MHW days in each block [days]
    total_cumu_int = mhwBlock['total_icum']          # Total cumulative intensity over all MHWs in each block [deg. C x days]

    f = "average_"+block_length+"_year_"+file_name+"_"+location_type
    with open(f+'_analys.csv','w',newline = '') as a_file:
        writer = csv.writer(a_file)
        writer.writerow(["index_number","start_year","end_year","centre_year","frequency","duration","max_intensity","max_max_intensity","mean_intensity","intensity_variablity","cumulative_intensity","rate_onset","rate_decline","mhw_days","total_cumulative_intensity"])
        i = 0
        while i < len(start_year):
            writer.writerow([i+1,start_year[i],end_year[i],centre_year[i],frequency[i],duration[i],max_intensity[i],max_max_intensity[i],mean_intensity[i],intensity_var[i],cumulative_intensity[i],rate_onset[i],rate_decline[i],mhw_days[i],total_cumu_int[i]])
            i += 1
Пример #3
0
fileobj = Dataset(file0, mode='r')
lon = fileobj.variables['lon'][:].astype(float)
lat = fileobj.variables['lat'][:].astype(float)
fill_value = fileobj.variables['sst']._FillValue.astype(float)
scale = fileobj.variables['sst'].scale_factor.astype(float)
offset = fileobj.variables['sst'].add_offset.astype(float)
fileobj.close()

#
# Size of mhwBlock variable
#

matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                    str(300).zfill(4) + '.mat')
mhws, clim = mhw.detect(t, matobj['sst_ts'][300, tt_1982_2005])
mhwBlock = mhw.blockAverage(t, mhws)
years = mhwBlock['years_centre']
NB = len(years)

#
# initialize some variables
#

pctile = 90  # Percentile for calculation of MHWs
X = len(lon)
Y = len(lat)
i_which = range(0, X)
j_which = range(0, Y)
DIM = (len(j_which), len(i_which))
SST_mean = np.NaN * np.zeros(DIM)
MHW_total = np.NaN * np.zeros(DIM)
     if ens > NENS[model]['hist'] - 1:
         ens_clim = NENS[model]['hist'] - 1
     else:
         ens_clim = ens
 else:
     ens_clim = ens
 mhws, clim = mhw.detect(t[model][exp][which_pre2100],
                         sst[exp][which_pre2100, j, ens],
                         climatologyPeriod=climPeriod,
                         alternateClimatology=[
                             t[model]['hist'],
                             sst['hist'][:, j, ens_clim]
                         ])
 mhwBlock = mhw.blockAverage(t[model][exp][which_pre2100],
                             mhws,
                             temp=sst[exp][which_pre2100, j,
                                           ens],
                             clim=clim)
 #else:
 #    mhws, clim = mhw.detect(t[model][exp], sst[exp][:,j,ens], climatologyPeriod=climPeriod)
 #    mhwBlock = mhw.blockAverage(t[model][exp], mhws, temp=sst[exp][:,j,ens], clim=clim)
 # Calculate new measure of annual duration, intensity
 mhwBlock['count_new'] = np.zeros(mhwBlock['count'].shape)
 mhwBlock['duration_new'] = np.zeros(
     mhwBlock['duration'].shape)
 mhwBlock['intensity_max_max_new'] = np.zeros(
     mhwBlock['intensity_max_max'].shape)
 for ev in range(mhws['n_events']):
     # Block index for year of each MHW (MHW year defined by start year)
     iBlock = np.where((mhwBlock['years_start'] >=
                        mhws['date_start'][ev].year) *
Пример #5
0
def simulate(t, sst_obs, seas_obs, sst_trend_obs, N_ens, params=None):
    '''
    Fit AR1 model to sst time series  and simulate MHW property trends
    t is time vector, daily
    sst_trend_obs is trend in units of decade^-1
    N_ens is Number of ensembles, per trend value
    params=(a, sig_eps) specified AR1 model parameters, None by defaule
                        which forces the AR1 model to be fit to sst data
    '''

    # Variables for AR1 process (simulated SST)

    if params == None:
        a, tmp, sig_eps = ar1fit(signal.detrend(sst_obs - seas_obs))
    else:
        a = params[0]
        sig_eps = params[1]

    tau = -1 / np.log(a)
    var_eps = sig_eps**2

    var_sst = var_eps * a / (1 - a**2)  # AR1 process variance
    sig_sst = np.sqrt(var_sst)

    # Variables for large-ensemble experiment with multiple trend values

    keys = ['count', 'intensity_mean', 'duration', 'intensity_max_max']
    N_keys = len(keys)
    trends = {}
    means = {}
    for key in keys:
        trends[key] = np.zeros((N_ens))
        means[key] = np.zeros((N_ens))

    # Loop over trend values and ensemble members, save MHW property trends

    T = len(t)
    for i_ens in range(N_ens):
        # Initialize sst and noise variables
        #sst = np.zeros(T)
        #eps = sig_eps*np.random.randn(T)
        # Initial condition of sst is Normal random variable with mean 0, variance given by theoretical AR1 variance
        #sst[0] = sig_sst*np.random.randn(1)
        # Generate AR1 process
        #for tt in range(1,T):
        #    sst[tt] = a*sst[tt-1] + eps[tt]
        sst = tsa.arima_process.arma_generate_sample([1, -a], [1],
                                                     T,
                                                     sigma=sig_eps,
                                                     burnin=100)
        # Add trend
        sst = sst + sst_trend_obs * (t - t[0]) / 10. / 365.25
        # Apply Marine Heat Wave definition
        mhws, clim = mhw.detect(t, sst)
        mhwBlock = mhw.blockAverage(t, mhws)
        mean, trend, dtrend = meanTrend_TS(mhwBlock)  #mhw.meanTrend(mhwBlock)
        # Save trends
        for key in keys:
            trends[key][i_ens] = trend[key]
            means[key][i_ens] = mean[key]

    # Output results

    return tau, sig_eps, trends, means
Пример #6
0
    j = np.where(lat > locations['lat'][n])[0][0]
    matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                        str(i + 1).zfill(4) + '.mat')
    sst[site] = matobj['sst_ts'][j, :]

#site = 'E_Tas'
site = 'WA'
site = 'Med'
site = 'NW_Atl'
site = 'Blob'

t_obs, dates_obs, T_obs, year_obs, month_obs, day_obs, doy_obs = ecj.timevector(
    [1982, 1, 1], [2016, 12, 31])
sst_obs = sst[site]  #np.loadtxt('data/sst_' + site + '.csv', delimiter=',')
mhws_obs, clim_obs = mhw.detect(t_obs, sst_obs)
mhwBlock_obs = mhw.blockAverage(t_obs, mhws_obs)
mean_obs, trend_obs, dtrend_obs = meanTrend_TS(
    mhwBlock_obs)  #mhw.meanTrend(mhwBlock_obs)

# SST trend

years = mhwBlock_obs['years_centre']
SST_block = np.zeros(years.shape)
for yr in range(len(years)):
    SST_block[yr] = np.mean(sst_obs[year_obs == years[yr]])
X = np.array([np.ones(years.shape), years - years.mean()]).T
beta = linalg.lstsq(X, SST_block)[0]
sst_trend_obs = beta[1] * 10

# Loop over SST trend values and simulate MHW property trends
Пример #7
0
        else:
            j1 = np.where(lat > lat_data[j] - res_data / 2.)[0][0]
        if lat_data[j] + res_data / 2. > lat.max():
            j2 = len(lat) - 1
        else:
            j2 = np.where(lat > lat_data[j] + res_data / 2.)[0][0]
        sst = np.nanmean(np.nanmean(sst_ts[j1:j2 + 1, :, :], axis=2), axis=0)
        if np.logical_not(np.isfinite(sst.sum())) + (
            (sst < -1).sum() > 0):  # check for land, ice
            continue
#   Count number of MHWs of each length
        mhws, clim = mhw.detect(
            t[avhrr_match],
            sst,
            climatologyPeriod=[1983, np.min([dataEnd, 2012])])
        mhwBlock = mhw.blockAverage(t[avhrr_match], mhws)
        years = mhwBlock['years_centre']
        #   Skip proxy_fit calculation if not enough data
        if ((proxy['threshCount'][avhrr, j, i] > 0).sum() <= 5) + (
            (~np.isnan(proxy['maxAnom'][avhrr, j, i])).sum() <= 5) + (
                (~np.isnan(proxy['threshAnom'][avhrr, j, i])).sum() <= 5):
            continue
# Loop over all combinations of MHW properties and proxy keys, to build and test all models
        for key in MHW_keys:
            if (key == 'total_days') + (key == 'total_icum'):
                continue
            else:
                for pkey in proxy.keys():
                    rho[key][pkey][j, i], p[key][pkey][
                        j, i], MHW_ts[key][pkey][j, i, :], MHW_CI[key][pkey][
                            j, i, :, :] = proxy_fit(key,
Пример #8
0
 if sw_write:
     outfile_train.write(
         str(year.min().astype(int)) + '-' + str(year.max().astype(int)) +
         ', ')
     outfile_valid.write(
         str(year.min().astype(int)) + '-' + str(year.max().astype(int)) +
         ', ')
 # Apply Marine Heat Wave definition
 print '\n' + station + '\n'
 print 'Proportion of valid values: ' + str(100. - 100. *
                                            np.isnan(sst).sum() / len(sst))
 mhws, clim = mhw.detect(t,
                         sst,
                         climatologyPeriod=climPeriod,
                         maxPadLength=maxPadLength)
 mhwBlock = mhw.blockAverage(t, mhws, clim, removeMissing=True, temp=sst)
 mean, trend, dtrend = mhw.meanTrend(mhwBlock)
 # Number of years with no MHW events pre and post 1950
 #count_pre50 = mhwBlock['count'][(mhwBlock['years_centre']<=1950)*(~np.isnan(mhwBlock['count']))]
 #count_post50 = mhwBlock['count'][(mhwBlock['years_centre']>1950)*(~np.isnan(mhwBlock['count']))]
 #print 'Pre-1950 0-count: ' + str(1.*np.sum(count_pre50==0)/len(count_pre50)) + ' post-1950 0-count: ' + str(1.*np.sum(count_post50==0)/len(count_post50)) + ' Difference: ' + str(1.*np.sum(count_post50==0)/len(count_post50) - 1.*np.sum(count_pre50==0)/len(count_pre50))
 years = mhwBlock['years_centre']
 #Ny = len(mhwBlock['years_centre'])/2
 ttPre = (years >= 1925) * (years <= 1954)
 ttPost = (years >= 1984) * (years <= 2013)
 count_preMid = ecj.nonans(mhwBlock['count'][ttPre])
 count_postMid = ecj.nonans(mhwBlock['count'][ttPost])
 print 'Pre-midpoint 0-count: ' + str(
     1. * np.sum(count_preMid == 0) /
     len(count_preMid)) + ' post-midpoint 0-count: ' + str(
         1. * np.sum(count_postMid == 0) /
fileobj = Dataset(file0, 'r')
lon = fileobj.variables['lon'][:].astype(float)
lat = fileobj.variables['lat'][:].astype(float)
fill_value = fileobj.variables['sst']._FillValue.astype(float)
scale = fileobj.variables['sst'].scale_factor.astype(float)
offset = fileobj.variables['sst'].add_offset.astype(float)
fileobj.close()

#
# Size of mhwBlock variable
#

matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' +
                    str(300).zfill(4) + '.mat')
mhws, clim = mhw.detect(t, matobj['sst_ts'][300, :])
mhwBlock = mhw.blockAverage(t, mhws)
years = mhwBlock['years_centre']
NB = len(years)

#
# initialize some variables
#

X = len(lon)
Y = len(lat)
i_which = range(0, X)  #,10)
j_which = range(0, Y)  #,10)
#i_which = range(0,X,4)
#j_which = range(0,Y,4)
DIM = (len(j_which), len(i_which))
N_ts = np.zeros((len(j_which), len(i_which), NB))