def plot_SST_SIC_extent(sst_fname, sic_fname, hadisst_fname):

    # load the data
    sst_fh = netcdf_file(sst_fname, 'r')
    sic_fh = netcdf_file(sic_fname, 'r')
    had_fh = netcdf_file(hadisst_fname, 'r')
    
    sst_var = sst_fh.variables["sst"]
    sic_var = sic_fh.variables["sic"]
    lat_var = sst_fh.variables["latitude"]
    lon_var = sst_fh.variables["longitude"]
    had_sic_var = had_fh.variables["sic"]
    had_sst_var = had_fh.variables["sst"]
    mv = sic_var._attributes["_FillValue"]
    
    sst_data = sst_var[:]
    sic_data = numpy.array(sic_var[:])

    had_sic_data = had_sic_var[:] 
    had_sst_data = had_sst_var[:]
    lat_data = lat_var[:]
    d_lon = lon_var[1] - lon_var[0]

    # calculate the sea-ice extent
    sic_arctic, sic_antarctic = calc_sea_ice_extent(sic_data, lat_data, d_lon, mv, 1e-6)
    had_sic_arctic, had_sic_antarctic = calc_sea_ice_extent(had_sic_data, lat_data, d_lon, mv, 1e-6)
    sst_arctic, sst_antarctic = calc_sst_arctic_means(sst_data, lat_data, d_lon, mv)
    had_sst_arctic, had_sst_antarctic = calc_sst_arctic_means(had_sst_data, lat_data, d_lon, mv)
    gs = gridspec.GridSpec(2,10)
    
    sp0 = plt.subplot(gs[0,:])
    sp1 = sp0.twinx()
    sp2 = plt.subplot(gs[1,:])
    sp3 = sp2.twinx()
    x = [1900 + 1.0/12*i for i in range(0, sst_data.shape[0]-12)]
    x2 =[1850 + 1.0/12*i for i in range(0, had_sst_data.shape[0])]
    l="-"
    sic_arctic = sic_arctic[:-12]
    sic_antarctic = sic_antarctic[:-12]
#    for m in range(0,12):
    if True:
        m = 0
        print len(x[m::12]), sic_arctic[m::12].shape
        sp1.plot(x[m::12], sic_arctic[m::12]*1.1, 'r'+l)
        sp3.plot(x[m::12], sic_antarctic[m::12]*1.1, 'r'+l)
        sp1.plot(x2[m::12], had_sic_arctic[m::12], 'k'+l)
        sp3.plot(x2[m::12], had_sic_antarctic[m::12], '#808080')
            
#        sp0.plot(x[m::12], sst_arctic[m::12], 'r'+l, lw=2.0)
#        sp2.plot(x[m::12], sst_antarctic[m::12], 'b'+l, lw=2.0)
#        sp0.plot(x2[m::12], had_sst_arctic[m::12], 'k'+l, lw=2.0)
#        sp2.plot(x2[m::12], had_sst_antarctic[m::12], '#808080', lw=2.0)
#        l = "--"
        
#    sp1.set_ylim([0,1100])
#    sp3.set_ylim([0,600])
    plt.show()
    
    sst_fh.close()
    sic_fh.close()
Beispiel #2
0
def calc_sic_extent_data(run_type, ref_sy, ref_ey):
    # 1. load the data
    # 2. calculate the sea ice extent for arctic and antarctic
    # 3. store it in a numpy array
    for a in range(1, 100):
        fname = get_sic_fname(run_type, ref_start, ref_end, a)
        fh = netcdf_file(fname)
        sic = fh.variables["sic"][:]
        lats = fh.variables["latitude"][:]
        lonv = fh.variables["longitude"]
        mv = fh.variables["sic"]._attributes["_FillValue"]
        lon_d = lonv[1] - lonv[0]
        if a == 1:
            grid_areas = calc_grid_areas(lats, lon_d)
            grid_areas = grid_areas.reshape([1, grid_areas.shape[0], 1])

        arctic_sic, antarctic_sic = calc_sea_ice_extent(sic,
                                                        lats,
                                                        lon_d,
                                                        mv,
                                                        1e-3,
                                                        grid_areas=grid_areas)

        if a == 1:
            sic_all_extent = numpy.zeros([2, 99, arctic_sic.shape[0]], 'f')
        sic_all_extent[0, a - 1] = arctic_sic
        sic_all_extent[1, a - 1] = antarctic_sic
        fh.close()

    # save the file out

    out_fname = get_sic_extent_fname(run_type, ref_sy, ref_ey)
    out_fh = netcdf_file(out_fname, "w")
    regi_out_dim = out_fh.createDimension("region", sic_all_extent.shape[0])
    samp_out_dim = out_fh.createDimension("sample", sic_all_extent.shape[1])
    time_out_dim = out_fh.createDimension("time", sic_all_extent.shape[2])

    regi_out_var = out_fh.createVariable("region", sic_all_extent.dtype,
                                         ("region", ))
    samp_out_var = out_fh.createVariable("sample", sic_all_extent.dtype,
                                         ("sample", ))
    time_out_var = out_fh.createVariable("time", sic_all_extent.dtype,
                                         ("time", ))
    data_out_var = out_fh.createVariable("sic_extent", sic_all_extent.dtype, (
        "region",
        "sample",
        "time",
    ))

    # get the time data
    fname = get_sic_fname(run_type, ref_start, ref_end, 1)
    fh = netcdf_file(fname)
    time_out_var._attributes = fh.variables["time"]._attributes
    time_out_var[:] = fh.variables["time"][:]
    samp_out_var[:] = numpy.arange(1, 100)
    regi_out_var[:] = numpy.arange(0, 2)
    data_out_var[:] = sic_all_extent[:]

    fh.close()
    out_fh.close()
def plot_CMIP5_GMSST_siex_corr(run_type, ref_start, ref_end, monthly=False):
    # get the GMT/GMSST anomaly timeseries filename
    gmt_gmsst_fname = get_gmt_gmsst_anom_ts_fname(run_type, ref_start, ref_end,
                                                  monthly)
    siex_fname = get_siex_anom_ts_fname(run_type, ref_start, ref_end, monthly)

    # load the gmsst data
    fh_gmsst = netcdf_file(gmt_gmsst_fname)
    gmsst = fh_gmsst.variables["tos"][:]
    fh_gmsst.close()

    # load the sea ice extent data
    fh_siex = netcdf_file(siex_fname)
    nh_siex = fh_siex.variables["nh_siex"][:]
    sh_siex = fh_siex.variables["sh_siex"][:]
    fh_siex.close()

    y = 2050 - 1899

    # calculate the percentile values for the 1st and 99th for the sea ice extent
    p1 = numpy.percentile(nh_siex.flatten(), 1)
    p99 = numpy.percentile(nh_siex.flatten(), 99)

    y_gmsst = gmsst[:, y].flatten()
    y_nh_siex = nh_siex[:, y].flatten()
    y_nh_siex = y_nh_siex[y_gmsst < 1e10].flatten()
    y_gmsst = y_gmsst[y_gmsst < 1e10].flatten()

    plt.plot(y_gmsst, y_nh_siex, 'k.')
    s, i, r, p, err = linregress(y_gmsst, y_nh_siex)
    p = numpy.array([numpy.min(y_gmsst), numpy.max(y_gmsst)], 'f')
    plt.plot(p, p * s + i, '-', lw=2.0)
    plt.show()

    for e in range(0, nh_siex.shape[0]):
        if gmsst[e, 0] > 1e10:
            continue
        x = numpy.where((nh_siex[e] < p1))
        if x[0].shape[0] == 0:
            #            plt.plot(gmsst[e], nh_siex[e], '.')
            s, i, r, p, err = linregress(gmsst[e], nh_siex[e])
            p = numpy.array([numpy.min(gmsst[e]), numpy.max(gmsst[e])], 'f')
            plt.plot(p, p * s + i, '-', lw=2.0)
    plt.show()

    # calculate the percentile values for the 1st and 99th for the sea ice extent
    p1 = numpy.percentile(sh_siex.flatten(), 1)
    p99 = numpy.percentile(sh_siex.flatten(), 99)

    for e in range(0, sh_siex.shape[0]):
        if gmsst[e, 0] > 1e10:
            continue
        x = numpy.where((sh_siex[e] < p1))
        if x[0].shape[0] == 0:
            #            plt.plot(gmsst[e], sh_siex[e], '.')
            s, i, r, p, err = linregress(gmsst[e], sh_siex[e])
            p = numpy.array([numpy.min(gmsst[e]), numpy.max(gmsst[e])], 'f')
            plt.plot(p, p * s + i, '-', lw=2.0)
    plt.show()
def plot_CMIP5_GMSST_siex_corr(run_type, ref_start, ref_end, monthly=False):
    # get the GMT/GMSST anomaly timeseries filename
    gmt_gmsst_fname = get_gmt_gmsst_anom_ts_fname(run_type, ref_start, ref_end, monthly)
    siex_fname = get_siex_anom_ts_fname(run_type, ref_start, ref_end, monthly)

    # load the gmsst data
    fh_gmsst = netcdf_file(gmt_gmsst_fname)
    gmsst = fh_gmsst.variables["tos"][:]
    fh_gmsst.close()
    
    # load the sea ice extent data
    fh_siex = netcdf_file(siex_fname)
    nh_siex = fh_siex.variables["nh_siex"][:]
    sh_siex = fh_siex.variables["sh_siex"][:]
    fh_siex.close()
    
    y = 2050-1899
    
    # calculate the percentile values for the 1st and 99th for the sea ice extent
    p1  = numpy.percentile(nh_siex.flatten(), 1)
    p99 = numpy.percentile(nh_siex.flatten(), 99)
    
    y_gmsst = gmsst[:,y].flatten()
    y_nh_siex = nh_siex[:,y].flatten()
    y_nh_siex = y_nh_siex[y_gmsst < 1e10].flatten()
    y_gmsst = y_gmsst[y_gmsst < 1e10].flatten()
    
    plt.plot(y_gmsst, y_nh_siex, 'k.')
    s, i, r, p, err = linregress(y_gmsst, y_nh_siex)
    p = numpy.array([numpy.min(y_gmsst), numpy.max(y_gmsst)], 'f')
    plt.plot(p, p*s+i, '-', lw=2.0)
    plt.show()
    
    for e in range(0, nh_siex.shape[0]):
        if gmsst[e,0] > 1e10:
            continue
        x = numpy.where((nh_siex[e] < p1))
        if x[0].shape[0] == 0:
#            plt.plot(gmsst[e], nh_siex[e], '.')
            s, i, r, p, err = linregress(gmsst[e], nh_siex[e])
            p = numpy.array([numpy.min(gmsst[e]), numpy.max(gmsst[e])], 'f')
            plt.plot(p, p*s+i, '-', lw=2.0)
    plt.show()

    # calculate the percentile values for the 1st and 99th for the sea ice extent
    p1  = numpy.percentile(sh_siex.flatten(), 1)
    p99 = numpy.percentile(sh_siex.flatten(), 99)
    
    for e in range(0, sh_siex.shape[0]):
        if gmsst[e,0] > 1e10:
            continue
        x = numpy.where((sh_siex[e] < p1))
        if x[0].shape[0] == 0:
#            plt.plot(gmsst[e], sh_siex[e], '.')
            s, i, r, p, err = linregress(gmsst[e], sh_siex[e])
            p = numpy.array([numpy.min(gmsst[e]), numpy.max(gmsst[e])], 'f')
            plt.plot(p, p*s+i, '-', lw=2.0)
    plt.show()
def calc_sic_extent_data(run_type, ref_sy, ref_ey):
    # 1. load the data
    # 2. calculate the sea ice extent for arctic and antarctic
    # 3. store it in a numpy array
    for a in range(1,100):
        fname = get_sic_fname(run_type, ref_start, ref_end, a)
        fh = netcdf_file(fname)
        sic = fh.variables["sic"][:]
        lats = fh.variables["latitude"][:]
        lonv = fh.variables["longitude"]
        mv = fh.variables["sic"]._attributes["_FillValue"]
        lon_d = lonv[1] - lonv[0]
        if a == 1:
            grid_areas = calc_grid_areas(lats, lon_d)
            grid_areas = grid_areas.reshape([1, grid_areas.shape[0], 1])

        arctic_sic, antarctic_sic = calc_sea_ice_extent(sic, lats, lon_d, mv, 1e-3, grid_areas=grid_areas)

        if a == 1:
            sic_all_extent = numpy.zeros([2, 99, arctic_sic.shape[0]], 'f')
        sic_all_extent[0, a-1] = arctic_sic
        sic_all_extent[1, a-1] = antarctic_sic
        fh.close()
        
    # save the file out
    
    out_fname = get_sic_extent_fname(run_type, ref_sy, ref_ey)
    out_fh = netcdf_file(out_fname, "w")
    regi_out_dim = out_fh.createDimension("region", sic_all_extent.shape[0])
    samp_out_dim = out_fh.createDimension("sample", sic_all_extent.shape[1])
    time_out_dim = out_fh.createDimension("time", sic_all_extent.shape[2])
    
    regi_out_var = out_fh.createVariable("region", sic_all_extent.dtype, ("region",))
    samp_out_var = out_fh.createVariable("sample", sic_all_extent.dtype, ("sample",))
    time_out_var = out_fh.createVariable("time", sic_all_extent.dtype, ("time",))
    data_out_var = out_fh.createVariable("sic_extent", sic_all_extent.dtype, ("region", "sample", "time",))
    
    # get the time data
    fname = get_sic_fname(run_type, ref_start, ref_end, 1)
    fh = netcdf_file(fname)
    time_out_var._attributes = fh.variables["time"]._attributes
    time_out_var[:] = fh.variables["time"][:]
    samp_out_var[:] = numpy.arange(1,100)
    regi_out_var[:] = numpy.arange(0,2)
    data_out_var[:] = sic_all_extent[:]

    fh.close()
    out_fh.close()
Beispiel #6
0
def plot_cmip5_sic_extent(sp0, rcp, hemi):
    dirc = "/Users/Neil/Coding/CREDIBLE_output/output/" + rcp + "_2006_2100/sic/"
    if hemi == 0:
        fname = "atlas_sic_OImon_arctic_" + rcp + "_ens_mean_200601-210012_1x1_yrmns.nc"
    else:
        fname = "atlas_sic_OImon_antarctic_" + rcp + "_ens_mean_200601-210012_1x1_yrmns.nc"

    ncfh = netcdf_file(dirc + fname)
    sic_var = ncfh.variables["sic"]
    lat_var = ncfh.variables["latitude"]
    mv = sic_var._attributes["_FillValue"]
    sic_data = numpy.array(sic_var[:])
    sic_data[sic_data < 0] = 0
    lat_data = lat_var[:]
    sic_arctic, sic_antarctic = calc_sea_ice_extent(sic_data,
                                                    lat_data,
                                                    1.0,
                                                    mv,
                                                    S=1e-3)

    cmip_x = numpy.arange(2006, 2101)
    if hemi == 0:
        sic_extent = sic_arctic
    else:
        sic_extent = sic_antarctic
    sp0.plot(cmip_x, sic_extent, 'k-', alpha=1.0, lw=2, zorder=2)
    sp0.text(cmip_x[-1] - 5, sic_extent[-1], "CMIP5 MM", color='k')
    ncfh.close()
def smooth_concat_sst_anoms_model_means(run_type, ref_start, ref_end, start_idx, end_idx):
    # Smooth the anomaly files created in the above function using the
    # running mean, running gradient filter
    # also subtract the ensemble mean
    # get the filtered set of cmip5 models / runs
    cmip5_rcp_idx = read_cmip5_model_mean_index_file(run_type, ref_start, ref_end)
    n_ens = len(cmip5_rcp_idx)
    
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    
    # get the ensemble mean
    ens_mean_fname = get_concat_anom_sst_ens_mean_fname(run_type, ref_start, ref_end)
    ens_mean_fh = netcdf_file(ens_mean_fname)
    ens_mean = ens_mean_fh.variables["tos"][:].byteswap().newbyteorder()
    ens_mean_fh.close()
        
    for idx in range(start_idx, end_idx):
        print cmip5_rcp_idx[idx][0]
        concat_anom_fname = get_concat_anom_sst_output_fname(cmip5_rcp_idx[idx][0], 
                                                             cmip5_rcp_idx[idx][1],
                                                             run_type, ref_start, ref_end)
                                                             
        # read the file in and extract the ssts
        sst_data, lons_var, lats_var, attrs, t_var = load_3d_file(concat_anom_fname, "tos")
        sst_data = sst_data.byteswap().newbyteorder()
        depart_from_ens_mean = sst_data - ens_mean
        P = 40
        mv = attrs["_FillValue"]
        smoothed_data = running_gradient_3D(depart_from_ens_mean, P, mv)
        # save the data
        out_fname = get_concat_anom_sst_smooth_model_mean_fname(cmip5_rcp_idx[idx][0], 
                                                     cmip5_rcp_idx[idx][1],
                                                     run_type, ref_start, ref_end)
        save_3d_file(out_fname, smoothed_data, lons_var, lats_var, attrs, t_var)
def create_HadISST_monthly_smoothed(histo_sy, histo_ey, run_n):
    # create the monthly smoothed version of HadISST2
    # this consists of applying the 40 year smoother to individual months
    # i.e. apply smoother to all Januaries, all Februaries etc.
    # load the data in
    in_fname = get_HadISST_input_filename(run_n)
    out_fname = get_HadISST_month_smooth_filename(histo_sy, histo_ey, run_n)
    
    # load the data - use cdo to select the year and create a temporary file
    cdo = Cdo()
    monthly_file = cdo.selyear(str(histo_sy)+"/"+str(histo_ey)+" "+in_fname)
    fh_m = netcdf_file(monthly_file, 'r')
    lon_var = fh_m.variables["longitude"]
    lat_var = fh_m.variables["latitude"]
    t_var   = fh_m.variables["time"]
    sst_var = fh_m.variables["sst"]
    P = 40
    mv = sst_var._attributes["_FillValue"]
    hadisst_sst = numpy.array(sst_var[:])
    hadisst_sst = hadisst_sst.byteswap().newbyteorder()

    # create the output - same shape as the input
    month_smoothed_hadisst_sst = numpy.zeros(hadisst_sst.shape, hadisst_sst.dtype)
    # now do the monthly smoothing
    for m in range(0,12):
        month_smoothed_hadisst_sst[m::12] = running_gradient_3D(hadisst_sst[m::12], P, mv)
    
    # save the file
    save_3d_file(out_fname, month_smoothed_hadisst_sst, lon_var, lat_var, sst_var._attributes, t_var)

    fh_m.close()
Beispiel #9
0
def create_HadISST_monthly_smoothed(histo_sy, histo_ey, run_n):
    # create the monthly smoothed version of HadISST2
    # this consists of applying the 40 year smoother to individual months
    # i.e. apply smoother to all Januaries, all Februaries etc.
    # load the data in
    in_fname = get_HadISST_input_filename(run_n)
    out_fname = get_HadISST_month_smooth_filename(histo_sy, histo_ey, run_n)

    # load the data - use cdo to select the year and create a temporary file
    cdo = Cdo()
    monthly_file = cdo.selyear(
        str(histo_sy) + "/" + str(histo_ey) + " " + in_fname)
    fh_m = netcdf_file(monthly_file, 'r')
    lon_var = fh_m.variables["longitude"]
    lat_var = fh_m.variables["latitude"]
    t_var = fh_m.variables["time"]
    sst_var = fh_m.variables["sst"]
    P = 40
    mv = sst_var._attributes["_FillValue"]
    hadisst_sst = numpy.array(sst_var[:])
    hadisst_sst = hadisst_sst.byteswap().newbyteorder()

    # create the output - same shape as the input
    month_smoothed_hadisst_sst = numpy.zeros(hadisst_sst.shape,
                                             hadisst_sst.dtype)
    # now do the monthly smoothing
    for m in range(0, 12):
        month_smoothed_hadisst_sst[m::12] = running_gradient_3D(
            hadisst_sst[m::12], P, mv)

    # save the file
    save_3d_file(out_fname, month_smoothed_hadisst_sst, lon_var, lat_var,
                 sst_var._attributes, t_var)

    fh_m.close()
def save_3d_file(out_fname, out_data, out_lon_var, out_lat_var, out_attrs, out_t_var, out_vname="sst"):
    # open the file
    out_fh = netcdf_file(out_fname, "w")
    # create latitude and longitude dimensions - copy from the ens_mean file
    lon_data = numpy.array(out_lon_var[:])
    lat_data = numpy.array(out_lat_var[:])
    time_data = numpy.array(out_t_var[:])
    
    lon_out_dim = out_fh.createDimension("longitude", lon_data.shape[0])
    lat_out_dim = out_fh.createDimension("latitude", lat_data.shape[0])
    lon_out_var = out_fh.createVariable("longitude", lon_data.dtype, ("longitude",))
    lat_out_var = out_fh.createVariable("latitude", lat_data.dtype, ("latitude",))
    time_out_dim = out_fh.createDimension("time", time_data.shape[0])
    time_out_var = out_fh.createVariable("time", time_data.dtype, ("time",))

    lon_out_var[:] = lon_data
    lat_out_var[:] = lat_data
    time_out_var[:] = time_data
    
    lon_out_var._attributes = out_lon_var._attributes
    lat_out_var._attributes = out_lat_var._attributes
    time_out_var._attributes = out_t_var._attributes
    
    data_out_var = out_fh.createVariable(out_vname, out_data.dtype, ("time", "latitude", "longitude"))
    data_out_var[:] = out_data[:]
    data_out_var._attributes = out_attrs
    out_fh.close()  
def load_3d_file(fname, var_name="sst"):
    # get the longitude, latitude and attributes
    in_fh = netcdf_file(fname, "r")
    sst_var = in_fh.variables[var_name]
    sst_data = sst_var[:]
    # find lat and lon name
    for k in in_fh.variables.keys():
        if "lat" in k:
            lat_name = k
        if "lon" in k:
            lon_name = k
        if "time" in k:
            t_name = k
    lats_var = in_fh.variables[lat_name]
    lons_var = in_fh.variables[lon_name]
    t_var = in_fh.variables[t_name]

    # mask the array
    attrs = sst_var._attributes
    if "missing_value" in attrs.keys():
        mv = attrs["missing_value"]
    elif "_FillValue" in attrs.keys():
        mv = attrs["_FillValue"]
    sst_data = numpy.ma.masked_equal(sst_data, mv)
    return sst_data, lons_var, lats_var, attrs, t_var
Beispiel #12
0
def create_HadISST_smoothed(histo_sy, histo_ey, run_n):
    # create a smoothed version of HadISST by first taking yearly means
    # and then running the running mean / running gradient filter over these
    # yearly mean values

    # get the filenames
    in_fname = get_HadISST_input_filename(run_n)
    out_fname = get_HadISST_smooth_fname(histo_sy, histo_ey, run_n)

    # use cdo to calculate the yearly mean and return the temporary file
    cdo = Cdo()
    year_mean_file = cdo.yearmean(input=" -selyear," + str(histo_sy) + "/" +
                                  str(histo_ey) + " " + in_fname)
    cdf_fh = netcdf_file(year_mean_file, 'r')
    # get the ssts, time variable, lon and lat variables
    sst_var = cdf_fh.variables["sst"]
    time_var = cdf_fh.variables["time"]
    lon_var = cdf_fh.variables["longitude"]
    lat_var = cdf_fh.variables["latitude"]

    # have to byteswap the data out of a netcdf file
    sst_data = numpy.array(sst_var[:])
    sst_data = sst_data.byteswap().newbyteorder()
    # run the running gradient filter on this data
    P = 40
    mv = sst_var._attributes["_FillValue"]
    smoothed_data = running_gradient_3D(sst_data, P, mv)
    cdf_fh.close()
    # save the file
    save_3d_file(out_fname, smoothed_data, lon_var, lat_var,
                 sst_var._attributes, time_var)
    cdf_fh.close()
def create_HadISST_smoothed(histo_sy, histo_ey, run_n):
    # create a smoothed version of HadISST by first taking yearly means 
    # and then running the running mean / running gradient filter over these
    # yearly mean values
    
    # get the filenames
    in_fname = get_HadISST_input_filename(run_n)
    out_fname = get_HadISST_smooth_fname(histo_sy, histo_ey, run_n)
    
    # use cdo to calculate the yearly mean and return the temporary file
    cdo = Cdo()
    year_mean_file = cdo.yearmean(input=" -selyear,"+str(histo_sy)+"/"+str(histo_ey)+" "+in_fname)
    cdf_fh = netcdf_file(year_mean_file, 'r')
    # get the ssts, time variable, lon and lat variables
    sst_var  = cdf_fh.variables["sst"]
    time_var = cdf_fh.variables["time"]
    lon_var  = cdf_fh.variables["longitude"]
    lat_var  = cdf_fh.variables["latitude"]

    # have to byteswap the data out of a netcdf file
    sst_data = numpy.array(sst_var[:])
    sst_data = sst_data.byteswap().newbyteorder()
    # run the running gradient filter on this data
    P = 40
    mv = sst_var._attributes["_FillValue"]
    smoothed_data = running_gradient_3D(sst_data, P, mv)
    cdf_fh.close()
    # save the file
    save_3d_file(out_fname, smoothed_data, lon_var, lat_var, sst_var._attributes, time_var)
    cdf_fh.close()
def load_3d_file(fname, var_name="sst"):
    # get the longitude, latitude and attributes
    in_fh = netcdf_file(fname, "r")
    sst_var = in_fh.variables[var_name]
    sst_data = sst_var[:]
    # find lat and lon name
    for k in in_fh.variables.keys():
        if "lat" in k:
            lat_name = k
        if "lon" in k:
            lon_name = k
        if "time" in k:
            t_name = k
    lats_var = in_fh.variables[lat_name]
    lons_var = in_fh.variables[lon_name]
    t_var = in_fh.variables[t_name]

    # mask the array
    attrs = sst_var._attributes
    if "missing_value" in attrs.keys():
        mv = attrs["missing_value"]
    elif "_FillValue" in attrs.keys():
        mv = attrs["_FillValue"]
    sst_data = numpy.ma.masked_equal(sst_data, mv)
    return sst_data, lons_var, lats_var, attrs, t_var
Beispiel #15
0
def load_wind(wind_file):
	fh = netcdf_file(wind_file)
	lon = fh.variables["longitude1"][:]
	lat = fh.variables["latitude1"][:]
	wnd = fh.variables["field50"][:]
	print "Max " + str(numpy.max(wnd))
	
	return wnd, lon, lat, numpy.max(wnd)
Beispiel #16
0
def plot_tas_ar5(run_type, ref_start, ref_end):
    sp = plt.subplot(111)
    Y0 = 2009.0
    Y1 = 2025.5
    Y2 = 2035
    C = 0.16

    grad0 = (0.3-C)/(Y1 - Y0)
    grad1 = (0.7-C)/(Y1 - Y0)

    ym0 = grad0*(2016-Y0)+C - 0.1
    yx0 = grad1*(2016-Y0)+C + 0.1
    ym1 = grad0*(2035-Y0)+C - 0.1
    yx1 = grad1*(2035-Y0)+C + 0.1
    
    sp.plot([2016,2035,2035,2016,2016],[ym0,ym1,yx1,yx0,ym0], 'k', lw=2.0, zorder=3)
    t_var = numpy.arange(1899,2100+1)
    print t_var.shape

    for rcp in ["rcp26", "rcp45", "rcp85"]:
        if rcp == "rcp26":
            col = '#888888'
        if rcp == "rcp45":
            col = 'r'
        if rcp == "rcp85":
            col = 'c'
        out_dir = get_output_directory(rcp, ref_start, ref_end)
        out_name = out_dir + "/" + out_dir.split("/")[1] + "_tos_tas_GM_ts.nc"

        fh = netcdf_file(out_name, 'r')
        tas = numpy.array(fh.variables["tas"][:])
        smoothed_tas = numpy.zeros(tas.shape, 'f')
        
        n_ens = tas.shape[0]
        X = numpy.arange(1899,2101)
        c = 0
        for e in range(0, n_ens):
            if tas[e,0] < 1000:
                tas_e = tas[e].byteswap().newbyteorder()
                TAS = running_gradient_3D(tas_e.reshape(tas_e.shape[0],1,1), 10)
                smoothed_tas[c] = TAS.flatten()
                c += 1
        tas_min = numpy.min(smoothed_tas[:c], axis=0)
        tas_max = numpy.max(smoothed_tas[:c], axis=0)
        tas_5 = numpy.percentile(smoothed_tas[:c], 5, axis=0)
        tas_95 = numpy.percentile(smoothed_tas[:c], 95, axis=0)
        tas_50 = numpy.percentile(smoothed_tas[:c], 50, axis=0)
        sp.plot(t_var, tas_min, '-', c=col, lw=2.0, zorder=0, alpha=0.5)
        sp.plot(t_var, tas_max, '-', c=col, lw=2.0, zorder=0, alpha=0.5)
        sp.plot(t_var, tas_50, '-', c=col, lw=2.0, zorder=2, alpha=0.5)
        sp.fill_between(t_var, tas_5, tas_95, edgecolor=col, facecolor=col, zorder=1, alpha=0.5)

    plt.gca().set_ylim([-0.5,2.5])
    plt.gca().set_xlim([1986,2050])
    f = plt.gcf()
    f.set_size_inches(10.5, 5.0)
    plt.savefig("ar5_ch11_fig25.pdf")
def get_HadISST_lon_lat_vars(rn):
    start = 1899
    end = 2010
    sst_fname, sic_fname = get_HadISST_monthly_anomaly_filenames(start, end, rn)

    fh = netcdf_file(sst_fname)
    lon_var = fh.variables["longitude"]
    lat_var = fh.variables["latitude"]
    return lon_var, lat_var
Beispiel #18
0
def load_original_grid(orig_mesh_file, orig_mesh_var):
	nc_fh = netcdf_file(orig_mesh_file)
	nc_var = nc_fh.variables[orig_mesh_var]
	# get lat / lon dimension - assume 4D variable
	# should do this via looking at the axis info.
	lat_dim = nc_var.dimensions[2]
	lon_dim = nc_var.dimensions[3]
	lon_vals = nc_fh.variables[lon_dim][:]
	lat_vals = nc_fh.variables[lat_dim][:]
	return lat_vals, lon_vals
def plot_sic_extents(sp0, time_data, sic_extent_fname, h=0):
    # load the 99 percentiles of the sic_extent data
    fh = netcdf_file(sic_extent_fname)
    sic_data = fh.variables["sic_extent"][:]
    sp0.fill_between(time_data, sic_data[h,0], sic_data[h,-1], facecolor='r', alpha=0.2, zorder=0)
    for a in range(0, sic_data.shape[1]):
        ldata = sic_data[h,a]
        sp0.plot(time_data, ldata, 'r-', lw=1, alpha=0.5, zorder=0)
    
    fh.close()
Beispiel #20
0
def get_HadISST_lon_lat_time_vars():
    rn = 400
    start = 1899
    end = 2010
    sst_fname, sic_fname = get_HadISST_monthly_anomaly_filenames(
        start, end, rn)

    fh = netcdf_file(sst_fname)
    lon_var = fh.variables["longitude"]
    lat_var = fh.variables["latitude"]
    time_var = fh.variables["time"]
    return lon_var, lat_var, time_var
Beispiel #21
0
def save_eigenvalues(out_fname, out_data, out_attrs):
    # open the file
    out_fh = netcdf_file(out_fname, "w")
    mon_dim = out_fh.createDimension("month", out_data.shape[0])
    eig_out_dim = out_fh.createDimension("eigenvalue", out_data.shape[1])
    mon_var = out_fh.createVariable("month", numpy.dtype('i4'), ("month",))
    eig_out_var = out_fh.createVariable("eigenvalue", numpy.dtype('i4'), ("eigenvalue",))
    mon_var[:] = numpy.arange(0, out_data.shape[0])
    eig_out_var[:] = numpy.arange(0, out_data.shape[1])
    sst_out_var = out_fh.createVariable("sst", out_data.dtype, ("month", "eigenvalue",))
    sst_out_var[:] = out_data[:]
    sst_out_var._attributes = out_attrs
    out_fh.close()
Beispiel #22
0
def plot_sic_extents(sp0, time_data, sic_extent_fname, h=0):
    # load the 99 percentiles of the sic_extent data
    fh = netcdf_file(sic_extent_fname)
    sic_data = fh.variables["sic_extent"][:]
    sp0.fill_between(time_data,
                     sic_data[h, 0],
                     sic_data[h, -1],
                     facecolor='r',
                     alpha=0.2,
                     zorder=0)
    for a in range(0, sic_data.shape[1]):
        ldata = sic_data[h, a]
        sp0.plot(time_data, ldata, 'r-', lw=1, alpha=0.5, zorder=0)

    fh.close()
Beispiel #23
0
def save_pcs_scale(fname, offset, scale, t_var):
    out_fh = netcdf_file(fname, 'w')
    n_eofs = scale.shape[1]
    pc_out_dim  = out_fh.createDimension("principal_component", n_eofs)
    t_out_dim   = out_fh.createDimension("time", t_var.shape[0])
    pc_out_var  = out_fh.createVariable("principal_component", numpy.dtype('i4'), ("principal_component",))
    t_out_var   = out_fh.createVariable("time", t_var[:].dtype, ("time",))
    pc_out_var[:]  = numpy.arange(0, n_eofs)
    t_out_var[:]   = t_var[:]
    t_out_var._attributes = t_var._attributes
    scale_var  = out_fh.createVariable("sst_scale", numpy.dtype('f4'), ("time", "principal_component",))
    offset_var = out_fh.createVariable("sst_offset", numpy.dtype('f4'), ("time", "principal_component",))
    scale_var[:] = scale
    offset_var[:] = offset
    out_fh.close()
Beispiel #24
0
def save_pcs(out_fname, out_data, out_attrs):
    # open the file
    out_fh = netcdf_file(out_fname, "w")
    mon_dim = out_fh.createDimension("month", out_data.shape[0])
    ens_mem_dim = out_fh.createDimension("ensemble_member", out_data.shape[1])
    pc_out_dim  = out_fh.createDimension("principal_component", out_data.shape[2])
    ens_mem_var = out_fh.createVariable("ensemble_member", numpy.dtype('i4'), ("ensemble_member",))
    mon_var = out_fh.createVariable("month", numpy.dtype('i4'), ("month",))
    pc_out_var  = out_fh.createVariable("principal_component", numpy.dtype('i4'), ("principal_component",))
    mon_var[:] = numpy.arange(0, out_data.shape[0])
    ens_mem_var[:] = numpy.arange(0, out_data.shape[1])
    pc_out_var[:] = numpy.arange(0, out_data.shape[2])
    out_var = out_fh.createVariable("sst", out_data.dtype, ("month", "ensemble_member", "principal_component"))
    out_var[:] = out_data[:]
    out_fh.close()
Beispiel #25
0
def calc_CMIP5_EOFs(run_type, ref_start, ref_end, eof_year, model_mean=False, monthly=False):

    # get the lats / lons from the first ensemble member
    if model_mean:
        cmip5_rcp_idx = read_cmip5_model_mean_index_file(run_type, ref_start, ref_end)
        concat_smooth_fname = get_concat_anom_sst_smooth_model_mean_fname(cmip5_rcp_idx[0][0], 
                                                               cmip5_rcp_idx[0][1], 
                                                               run_type, ref_start, ref_end,
                                                               monthly)
    else:
        cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)
        concat_smooth_fname = get_concat_anom_sst_smooth_fname(cmip5_rcp_idx[0][0], 
                                                               cmip5_rcp_idx[0][1], 
                                                               run_type, ref_start, ref_end,
                                                               monthly)
                                                               
    fh = netcdf_file(concat_smooth_fname, 'r')
    lons_var = fh.variables["longitude"]
    lats_var = fh.variables["latitude"]
    attrs = fh.variables["sst"]._attributes
    mv = attrs["_FillValue"]

    eof_solvers = calc_EOFs(run_type, ref_start, ref_end, eof_year, model_mean, monthly)

    # n_eofs = None - get all eofs
    n_eofs = None
    # get the principal components, eofs and eigenvalues
    pcs = []
    eofs = []
    evs = []
    for eof_solver in eof_solvers:
        pcs.append(eof_solver.pcs(pcscaling=0, npcs=n_eofs))
        eofs.append(eof_solver.eofs(eofscaling=0, neofs=n_eofs))
        evs.append(eof_solver.eigenvalues(neigs=n_eofs))
    # convert the lists to numpy arrays
    pcs = numpy.array(pcs)
    eofs = numpy.array(eofs)
    print eofs.shape
    evs = numpy.array(evs)
    # save the principal components
    pcs_fname = get_cmip5_PC_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    save_pcs(pcs_fname, pcs, attrs)
    # save the eigenvalues
    eig_fname = get_cmip5_eigen_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    save_eigenvalues(eig_fname, evs, attrs)
    # save the Eofs
    eof_fname = get_cmip5_EOF_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    save_eofs(eof_fname, eofs, attrs, lats_var, lons_var)
def create_concat_sst_anoms_ens_mean_smoothed(run_type, ref_start, ref_end, monthly):
    in_fname = get_concat_anom_sst_ens_mean_fname(run_type, ref_start, ref_end, monthly)
    out_fname = get_concat_anom_sst_ens_mean_smooth_fname(run_type, ref_start, ref_end, monthly)
    
    # load the input netcdf file
    in_fh = netcdf_file(in_fname)
    in_var = in_fh.variables["tos"]
    lon_var = in_fh.variables["longitude"]
    lat_var = in_fh.variables["latitude"]
    t_var = in_fh.variables["time"]
    mv = in_var._attributes["_FillValue"]
    
    in_data = in_var[:].byteswap().newbyteorder()
    P = 40
    smoothed_data = running_gradient_3D(in_data, P, mv)
    save_3d_file(out_fname, smoothed_data, lon_var, lat_var, in_var._attributes, t_var)
    in_fh.close()
Beispiel #27
0
def save_pcs_ts(pc_fname, out_data, n_eofs, t_var, n_ens):
    # create the file with ensemble_member, eof_number and time
    # ensemble member is the unlimited dimension
    out_fh = netcdf_file(pc_fname, 'w')
    ens_mem_dim = out_fh.createDimension("ensemble_member", n_ens)
    pc_out_dim  = out_fh.createDimension("principal_component", n_eofs)
    t_out_dim   = out_fh.createDimension("time", t_var.shape[0])
    ens_mem_var = out_fh.createVariable("ensemble_member", numpy.dtype('i4'), ("ensemble_member",))
    pc_out_var  = out_fh.createVariable("principal_component", numpy.dtype('i4'), ("principal_component",))
    t_out_var   = out_fh.createVariable("time", t_var[:].dtype, ("time",))
    ens_mem_var[:] = numpy.arange(0, n_ens)
    pc_out_var[:]  = numpy.arange(0, n_eofs)
    t_out_var[:]   = t_var[:]
    t_out_var._attributes = t_var._attributes
    out_var = out_fh.createVariable("sst_pc", numpy.dtype('f4'), ("ensemble_member", "time", "principal_component",))
    out_var[:] = out_data
    out_fh.close()
Beispiel #28
0
def calc_HadISST_SIC_corr(sp0, rn):
    hadisst_name = get_HadISST_input_filename(rn)
    print hadisst_name
    nc_fh = netcdf_file(hadisst_name)
    sst_var = nc_fh.variables["sst"]
    sic_var = nc_fh.variables["sic"]
    
    start = 1850
    d1 = 1978
    d2 = 2010
    nm=12
    s = (d1-start)*nm
    e = (d2-start)*nm

    stderr = numpy.zeros([sst_var.shape[2], sst_var.shape[1]], 'f')

    c = ['ko','bo','go','ro','yo','co','mo','ks','bs','gs','rs','ys']
    lines = []
    mons=['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec']
    for m in range(0, 12):
        sst_data = numpy.array(sst_var[s+m:e+m:nm,14,14] - 273.15) # wonky with deg 4
        sic_data = numpy.array(sic_var[s+m:e+m:nm,14,14])          # wonky
#        sst_data = numpy.array(sst_var[s+m:e+m:nm,33,121] - 273.15)
#        sic_data = numpy.array(sic_var[s+m:e+m:nm,33,121])
        sic_idx = numpy.where(sic_data > 0.0)
        sst_data = sst_data[sic_idx]
        sic_data = sic_data[sic_idx]
        sst_idx = numpy.where(sst_data < -1.75)
        sic_data[sst_idx] = 1.0
        if sst_data.shape[0] > 5:
            deg = find_polyfit(sst_data, sic_data)
#            deg = 4
            pf = numpy.polyfit(sst_data, sic_data, deg)
            R = numpy.max(sst_data)+0.1 - numpy.min(sst_data)-0.1
            S = R/100
            if S < 0.001:
                S = 0.001
            ip = numpy.arange(numpy.min(sst_data)-0.1, numpy.max(sst_data)+0.1, S)
            pp = calc_polynomial(pf, ip, deg)
            pp[pp>1.0] = 1.0
            pp[pp<0.0] = 0.0
            sp0.plot(ip,pp,c[m][0]+"-")
        l = sp0.plot(sst_data,sic_data, c[m])
        lines.append(l[0])
    sp0.legend(lines, mons)
def smooth_concat_sst_anoms(run_type, ref_start, ref_end, start_idx, end_idx,
                            monthly):
    # Smooth the anomaly files created in the above function using the
    # running mean, running gradient filter
    # also subtract the ensemble mean
    # get the filtered set of cmip5 models / runs
    cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)
    n_ens = len(cmip5_rcp_idx)

    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()

    # get the ensemble mean
    ens_mean_fname = get_concat_anom_sst_ens_mean_fname(
        run_type, ref_start, ref_end, monthly)
    ens_mean_fh = netcdf_file(ens_mean_fname)
    ens_mean = ens_mean_fh.variables["tos"][:].byteswap().newbyteorder()
    ens_mean_fh.close()

    for idx in range(start_idx, end_idx):
        print cmip5_rcp_idx[idx][0]
        concat_anom_fname = get_concat_anom_sst_output_fname(
            cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1], run_type, ref_start,
            ref_end, monthly)

        # read the file in and extract the ssts
        sst_data, lons_var, lats_var, attrs, t_var = load_3d_file(
            concat_anom_fname, "tos")
        sst_data = sst_data.byteswap().newbyteorder()
        depart_from_ens_mean = sst_data - ens_mean
        P = 40
        mv = attrs["_FillValue"]
        if monthly:
            smoothed_data = running_gradient_3D_monthly(
                depart_from_ens_mean, P, mv)
        else:
            smoothed_data = running_gradient_3D(depart_from_ens_mean, P, mv)
        # save the data
        out_fname = get_concat_anom_sst_smooth_fname(cmip5_rcp_idx[idx][0],
                                                     cmip5_rcp_idx[idx][1],
                                                     run_type, ref_start,
                                                     ref_end, monthly)
        save_3d_file(out_fname, smoothed_data, lons_var, lats_var, attrs,
                     t_var)
def plot_hadisst_sic_extent(sp0, hemi):
    # load the hadisst_sic
    fname = "/Users/Neil/ClimateData/HadISST2/HadISST.2.1.0.0_realisation_dec2010_400_yrmn.nc"
    ncfh = netcdf_file(fname)
    sic_var = ncfh.variables["sic"]
    lat_var = ncfh.variables["latitude"]
    mv = sic_var._attributes["_FillValue"]
    sic_data = sic_var[:]
    print numpy.max(sic_data)
    lat_data = lat_var[:]
    sic_arctic, sic_antarctic = calc_sea_ice_extent(sic_data, lat_data, 1.0, mv, S=1e-3)
    
    hadisst_x = numpy.arange(1850,2011)
    if hemi == 0:
        sic_extent = sic_arctic
    else:
        sic_extent = sic_antarctic
    sp0.plot(hadisst_x, sic_extent, 'b-', alpha=1.0, lw=2, zorder=2)
    sp0.text(hadisst_x[-1]+2, sic_extent[-1], "HadISST2", color='b')
    ncfh.close()
def create_concat_sst_anoms_ens_mean_smoothed(run_type, ref_start, ref_end,
                                              monthly):
    in_fname = get_concat_anom_sst_ens_mean_fname(run_type, ref_start, ref_end,
                                                  monthly)
    out_fname = get_concat_anom_sst_ens_mean_smooth_fname(
        run_type, ref_start, ref_end, monthly)

    # load the input netcdf file
    in_fh = netcdf_file(in_fname)
    in_var = in_fh.variables["tos"]
    lon_var = in_fh.variables["longitude"]
    lat_var = in_fh.variables["latitude"]
    t_var = in_fh.variables["time"]
    mv = in_var._attributes["_FillValue"]

    in_data = in_var[:].byteswap().newbyteorder()
    P = 40
    smoothed_data = running_gradient_3D(in_data, P, mv)
    save_3d_file(out_fname, smoothed_data, lon_var, lat_var,
                 in_var._attributes, t_var)
    in_fh.close()
Beispiel #32
0
def calc_CMIP5_PC_proj_scaling(run_type, ref_start, ref_end, eof_year, model_mean=False, monthly=False):
    # load the previously calculated PCs
    pc_fname = get_cmip5_proj_PC_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    pcs = load_data(pc_fname, "sst_pc")
    fh = netcdf_file(pc_fname, 'r')
    t_var = fh.variables["time"]
    
    # get the anomalies in the decade centred on the eof_year (-5/+4 inc.)
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    # calculate the start and end index into the netCDF files
    ri = eof_year-histo_sy
    if monthly:
        ri *= 12
    
    # For each year regress the pcs on the pcs in 2050 to determine the
    # relationship between the eof_year and the years in the time series
    npcs  = pcs.shape[2]
    n_t   = pcs.shape[1]
    offset = numpy.zeros([n_t, npcs], 'f')
    scale  = numpy.zeros([n_t, npcs], 'f')
    
    for t in range(0, n_t):
        tts_pcs = pcs[:,t,:].squeeze()
        # which month are we in?
        if monthly:
            mon = t % 12
        else:
            ref_pcs = pcs[:,ri,:].squeeze()
        for pc in range(0, npcs):
            # get the reference pcs in the eof_year - if monthly we want to get 12 
            # reference pcs
            if monthly:
                ref_pcs = pcs[:,ri+mon,:].squeeze()
            s, i, r, p, err = scipy.stats.linregress(ref_pcs[:,pc], tts_pcs[:,pc])
            scale [t,pc] = s
            offset[t,pc] = i
    
    # save the scalings
    out_name = get_cmip5_proj_PC_scale_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    save_pcs_scale(out_name, offset, scale, t_var)
Beispiel #33
0
def plot_hadisst_sic_extent(sp0, hemi):
    # load the hadisst_sic
    fname = "/Users/Neil/ClimateData/HadISST2/HadISST.2.1.0.0_realisation_dec2010_400_yrmn.nc"
    ncfh = netcdf_file(fname)
    sic_var = ncfh.variables["sic"]
    lat_var = ncfh.variables["latitude"]
    mv = sic_var._attributes["_FillValue"]
    sic_data = sic_var[:]
    print numpy.max(sic_data)
    lat_data = lat_var[:]
    sic_arctic, sic_antarctic = calc_sea_ice_extent(sic_data,
                                                    lat_data,
                                                    1.0,
                                                    mv,
                                                    S=1e-3)

    hadisst_x = numpy.arange(1850, 2011)
    if hemi == 0:
        sic_extent = sic_arctic
    else:
        sic_extent = sic_antarctic
    sp0.plot(hadisst_x, sic_extent, 'b-', alpha=1.0, lw=2, zorder=2)
    sp0.text(hadisst_x[-1] + 2, sic_extent[-1], "HadISST2", color='b')
    ncfh.close()
def plot_cmip5_sic_extent(sp0, rcp, hemi):
    dirc = "/Users/Neil/Coding/CREDIBLE_output/output/"+rcp+"_2006_2100/sic/"
    if hemi == 0:
        fname = "atlas_sic_OImon_arctic_"+rcp+"_ens_mean_200601-210012_1x1_yrmns.nc"
    else:
        fname = "atlas_sic_OImon_antarctic_"+rcp+"_ens_mean_200601-210012_1x1_yrmns.nc"

    ncfh = netcdf_file(dirc+fname)
    sic_var = ncfh.variables["sic"]
    lat_var = ncfh.variables["latitude"]
    mv = sic_var._attributes["_FillValue"]
    sic_data = numpy.array(sic_var[:])
    sic_data[sic_data < 0] = 0
    lat_data = lat_var[:]
    sic_arctic, sic_antarctic = calc_sea_ice_extent(sic_data, lat_data, 1.0, mv, S=1e-3)
    
    cmip_x = numpy.arange(2006,2101)
    if hemi == 0:
        sic_extent = sic_arctic
    else:
        sic_extent = sic_antarctic
    sp0.plot(cmip_x, sic_extent, 'k-', alpha=1.0, lw=2, zorder=2)
    sp0.text(cmip_x[-1]-5, sic_extent[-1], "CMIP5 MM", color='k')
    ncfh.close()
def save_3d_file(out_fname,
                 out_data,
                 out_lon_var,
                 out_lat_var,
                 out_attrs,
                 out_t_var,
                 out_vname="sst"):
    # open the file
    out_fh = netcdf_file(out_fname, "w")
    # create latitude and longitude dimensions - copy from the ens_mean file
    lon_data = numpy.array(out_lon_var[:])
    lat_data = numpy.array(out_lat_var[:])
    time_data = numpy.array(out_t_var[:])

    lon_out_dim = out_fh.createDimension("longitude", lon_data.shape[0])
    lat_out_dim = out_fh.createDimension("latitude", lat_data.shape[0])
    lon_out_var = out_fh.createVariable("longitude", lon_data.dtype,
                                        ("longitude", ))
    lat_out_var = out_fh.createVariable("latitude", lat_data.dtype,
                                        ("latitude", ))
    time_out_dim = out_fh.createDimension("time", time_data.shape[0])
    time_out_var = out_fh.createVariable("time", time_data.dtype, ("time", ))

    lon_out_var[:] = lon_data
    lat_out_var[:] = lat_data
    time_out_var[:] = time_data

    lon_out_var._attributes = out_lon_var._attributes
    lat_out_var._attributes = out_lat_var._attributes
    time_out_var._attributes = out_t_var._attributes

    data_out_var = out_fh.createVariable(out_vname, out_data.dtype,
                                         ("time", "latitude", "longitude"))
    data_out_var[:] = out_data[:]
    data_out_var._attributes = out_attrs
    out_fh.close()
Beispiel #36
0
def save_eofs(out_fname, out_data, out_attrs, in_lats, in_lons):
    # open the file
    out_fh = netcdf_file(out_fname, "w")
    # create latitude and longitude dimensions - copy from the ens_mean file
    lon_data = numpy.array(in_lons[:])
    lat_data = numpy.array(in_lats[:])
    lon_out_dim = out_fh.createDimension("longitude", lon_data.shape[0])
    lat_out_dim = out_fh.createDimension("latitude", lat_data.shape[0])
    lon_out_var = out_fh.createVariable("longitude", lon_data.dtype, ("longitude",))
    lat_out_var = out_fh.createVariable("latitude", lat_data.dtype, ("latitude",))
    ens_out_dim = out_fh.createDimension("ensemble_member", out_data.shape[1])
    ens_out_var = out_fh.createVariable("ensemble_member", numpy.dtype('i4'), ("ensemble_member",))
    mon_out_dim = out_fh.createDimension("month", out_data.shape[0])
    mon_out_var = out_fh.createVariable("month", numpy.dtype('i4'), ("month",))
    lon_out_var[:] = lon_data
    lat_out_var[:] = lat_data
    lon_out_var._attributes = in_lons._attributes
    lat_out_var._attributes = in_lats._attributes
    ens_out_var[:] = numpy.arange(0, out_data.shape[1])
    mon_out_var[:] = numpy.arange(0, out_data.shape[0])
    data_out_var = out_fh.createVariable("sst", out_data.dtype, ("month", "ensemble_member", "latitude", "longitude"))
    data_out_var[:] = out_data[:]
    data_out_var._attributes = out_attrs
    out_fh.close() 
def calc_GMT_GMSST_anom_ts(run_type, ref_start, ref_end, monthly=False, lat=-1.0, lon=-1.0):
    # get the filtered list of CMIP5 ensemble members
    cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)

    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()

    n_ens = len(cmip5_rcp_idx)
    if monthly:
        f = 12
    else:
        f = 1
    n_t = (rcp_ey - histo_sy + 1)*f
    all_tos = numpy.zeros([n_ens, n_t], 'f')
    all_tas = numpy.zeros([n_ens, n_t], 'f')
    t_attr = None
    t_vals = None
    
    for idx in range(0, n_ens):
        cdo = Cdo()
        print cmip5_rcp_idx[idx][0]
        # get the tos filenames for the rcp and historical simulation
        tos_rcp_fname = get_cmip5_tos_fname(run_type, cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])
        tos_histo_fname = get_cmip5_tos_fname("historical", cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])

        # get the tas filenames for the rcp and historical simulation
        tas_rcp_fname = get_cmip5_tas_fname(run_type, cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])
        tas_histo_fname = get_cmip5_tas_fname("historical", cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])

        # create the reference files
        tos_ref_fname = create_tmp_ref_file(tos_histo_fname, ref_start, ref_end, "tos", monthly, lat, lon)
        tas_ref_fname = create_tmp_ref_file(tas_histo_fname, ref_start, ref_end, "tas", monthly, lat, lon)
        
        # do the anomalies
        tos_histo_anom_fname = create_tmp_anom_file(tos_histo_fname, tos_ref_fname, histo_sy, histo_ey, "tos", monthly, lat, lon)
        tos_rcp_anom_fname = create_tmp_anom_file(tos_rcp_fname, tos_ref_fname, rcp_sy, rcp_ey, "tos", monthly, lat, lon)
        cdo.cat(input = tos_histo_anom_fname + " " + tos_rcp_anom_fname, output = "tos_temp.nc")
        
        tas_histo_anom_fname = create_tmp_anom_file(tas_histo_fname, tas_ref_fname, histo_sy, histo_ey, "tas", monthly, lat, lon)
        tas_rcp_anom_fname = create_tmp_anom_file(tas_rcp_fname, tas_ref_fname, rcp_sy, rcp_ey, "tas", monthly, lat, lon)
        cdo.cat(input = tas_histo_anom_fname + " " + tas_rcp_anom_fname, output = "tas_temp.nc")
                
        # read the temporary files in and add to the numpy array
        fh_tos = netcdf_file("tos_temp.nc")
        fh_tas = netcdf_file("tas_temp.nc")
        try:
            all_tos[idx] = fh_tos.variables["tos"][:].squeeze()
            all_tas[idx] = fh_tas.variables["tas"][:].squeeze()
        except:
            all_tos[idx] = 1e20
            all_tas[idx] = 1e20
            
        # get the time values / attributes
        if idx == 0:#n_ens-1:
            t_vals = numpy.array(fh_tos.variables["time"][:])
            t_attr = fh_tos.variables["time"]._attributes
            
        os.remove("tas_temp.nc")
        os.remove(tas_ref_fname)
        os.remove(tas_histo_anom_fname)
        os.remove(tas_rcp_anom_fname)
        os.remove("tos_temp.nc")
        os.remove(tos_ref_fname)
        os.remove(tos_histo_anom_fname)
        os.remove(tos_rcp_anom_fname)
        fh_tos.close()
        fh_tas.close()

    # clean up last
    
    # save the all tos / all tas file
    out_name = get_gmt_gmsst_anom_ts_fname(run_type, ref_start, ref_end, monthly, lat, lon)
    out_fh = netcdf_file(out_name, "w")
    # create dimensions and variables
    time_out_dim = out_fh.createDimension("time", t_vals.shape[0])
    time_out_var = out_fh.createVariable("time", t_vals.dtype, ("time",))
    ens_out_dim = out_fh.createDimension("ens", n_ens)
    ens_out_var = out_fh.createVariable("ens", 'f', ("ens",))
    tos_out_var = out_fh.createVariable("tos", all_tos.dtype, ("ens", "time",))
    tas_out_var = out_fh.createVariable("tas", all_tas.dtype, ("ens", "time",))
    # write out variables
    time_out_var._attributes = t_attr
    time_out_var[:] = t_vals[:]
    ens_out_var[:] = numpy.arange(0, n_ens)
    # write out data
    tos_out_var[:] = all_tos[:]
    tas_out_var[:] = all_tas[:]
    
    out_fh.close()
    hadisst_ey = 2010

    ref_start = 1986
    ref_end = 2005
    rn = 400
    hadisst_deg = 1
    RCP = "rcp45"

    hadisst_fit_file = get_HadISST_SST_SIC_mapping_fname(hadisst_sy,
                                                         hadisst_ey,
                                                         rn,
                                                         hadisst_deg,
                                                         anoms=True)
    cmip5_fit_file = get_CMIP5_SST_SIC_mapping_fname(RCP)

    fh = netcdf_file(hadisst_fit_file)
    hadisst_fit_data = fh.variables["polyfit"][:]
    fh.close()

    fh = netcdf_file(cmip5_fit_file)
    cmip5_fit_data = fh.variables["polyfit"][:]
    fh.close()

    print hadisst_fit_data.shape, cmip5_fit_data.shape

    years = get_year_intervals()
    sp0 = plt.subplot(111)
    sp1 = sp0.twinx()
    sy = years[0][0]
    ey = years[-1][1]
    mv = -1e30
                               ['run_type=', 'ref_start=', 'ref_end=',
                                'eof_year=', 'monthly'])

    for opt, val in opts:
        if opt in ['--run_type', '-r']:
            run_type = val
        if opt in ['--ref_start', '-s']:
            ref_start = int(val)
        if opt in ['--ref_end', '-e']:
            ref_end = int(val)
        if opt in ['--monthly', '-m']:
            monthly = True

    # read the (already computed) yearly mean cmip5 anomalies for this RCP scenario
    cmip5_tos_tas_ts_fname = get_gmt_gmsst_anom_ts_fname(run_type, ref_start, ref_end, monthly=monthly)
    fh = netcdf_file(cmip5_tos_tas_ts_fname)
    tos_data = fh.variables["tos"][:]
    tos_data = tos_data.byteswap().newbyteorder()
    fh.close()
    
    # load HadISST data
    hadisst_fname = "/Users/Neil/Coding/CREDIBLE_output/output/HadISST_1899_2010_400/hadisst_hist_1899_2010_1986_2005_400_anoms_gmsst_decmn.nc"
    fh = netcdf_file(hadisst_fname)
    hadisst_data = fh.variables["sst"][:].byteswap().newbyteorder()
    fh.close()
    
    # plot the CMIP5 timeseries data
    sp0 = plt.subplot(111)
    time_data = numpy.arange(1899,2101)
    plot_CMIP5_timeseries(sp0, tos_data, time_data)
    # plot the HadISST data
def plot_SST_SIC_extent(sst_fname, sic_fname, hadisst_fname):

    # load the data
    sst_fh = netcdf_file(sst_fname, 'r')
    sic_fh = netcdf_file(sic_fname, 'r')
    had_fh = netcdf_file(hadisst_fname, 'r')

    sst_var = sst_fh.variables["sst"]
    sic_var = sic_fh.variables["sic"]
    lat_var = sst_fh.variables["latitude"]
    lon_var = sst_fh.variables["longitude"]
    had_sic_var = had_fh.variables["sic"]
    had_sst_var = had_fh.variables["sst"]
    mv = sic_var._attributes["_FillValue"]

    sst_data = sst_var[:]
    sic_data = numpy.array(sic_var[:])

    had_sic_data = had_sic_var[:]
    had_sst_data = had_sst_var[:]
    lat_data = lat_var[:]
    d_lon = lon_var[1] - lon_var[0]

    # calculate the sea-ice extent
    sic_arctic, sic_antarctic = calc_sea_ice_extent(sic_data, lat_data, d_lon,
                                                    mv, 1e-6)
    had_sic_arctic, had_sic_antarctic = calc_sea_ice_extent(
        had_sic_data, lat_data, d_lon, mv, 1e-6)
    sst_arctic, sst_antarctic = calc_sst_arctic_means(sst_data, lat_data,
                                                      d_lon, mv)
    had_sst_arctic, had_sst_antarctic = calc_sst_arctic_means(
        had_sst_data, lat_data, d_lon, mv)
    gs = gridspec.GridSpec(2, 10)

    sp0 = plt.subplot(gs[0, :])
    sp1 = sp0.twinx()
    sp2 = plt.subplot(gs[1, :])
    sp3 = sp2.twinx()
    x = [1900 + 1.0 / 12 * i for i in range(0, sst_data.shape[0] - 12)]
    x2 = [1850 + 1.0 / 12 * i for i in range(0, had_sst_data.shape[0])]
    l = "-"
    sic_arctic = sic_arctic[:-12]
    sic_antarctic = sic_antarctic[:-12]
    #    for m in range(0,12):
    if True:
        m = 0
        print len(x[m::12]), sic_arctic[m::12].shape
        sp1.plot(x[m::12], sic_arctic[m::12] * 1.1, 'r' + l)
        sp3.plot(x[m::12], sic_antarctic[m::12] * 1.1, 'r' + l)
        sp1.plot(x2[m::12], had_sic_arctic[m::12], 'k' + l)
        sp3.plot(x2[m::12], had_sic_antarctic[m::12], '#808080')

#        sp0.plot(x[m::12], sst_arctic[m::12], 'r'+l, lw=2.0)
#        sp2.plot(x[m::12], sst_antarctic[m::12], 'b'+l, lw=2.0)
#        sp0.plot(x2[m::12], had_sst_arctic[m::12], 'k'+l, lw=2.0)
#        sp2.plot(x2[m::12], had_sst_antarctic[m::12], '#808080', lw=2.0)
#        l = "--"

#    sp1.set_ylim([0,1100])
#    sp3.set_ylim([0,600])
    plt.show()

    sst_fh.close()
    sic_fh.close()
def calc_sst_sic_corr():
    # calculate the correlation between the sea-ice concentration and the
    # sea-surface temperature in the CMIP5 simulations

    # get HadISST file
    fname = get_HadISST2_filepath()

    # create the output bin
    sic_bw = 0.025
    tos_bw = 0.1
    tos_min = -2
    tos_max = 2
    tos_range = tos_max - tos_min
    n_tos = int(tos_range / tos_bw)
    n_sic = int((1 + sic_bw) / sic_bw)
    out_bin_nh = numpy.zeros([n_tos, n_sic])
    out_bin_sh = numpy.zeros([n_tos, n_sic])
    sic_vals = numpy.array([float(x) * sic_bw for x in range(0, n_sic)], 'f')
    tos_vals = numpy.array([x * tos_bw + tos_min for x in range(0, n_tos)],
                           'f')

    # loop through each ensemble member
    #
    # read the files in
    fh_hadisst = netcdf_file(fname, 'r')

    # get the variables from the files
    sic_hadisst = fh_hadisst.variables["sic"][:]
    tos_hadisst = fh_hadisst.variables["sst"][:]

    # get the missing value - assume it's the same for each file in
    # the (individual) model ensemble
    mv = 1000.0

    for t in range(0, 100):  #sic_hadisst.shape[0]):
        for y in range(0, sic_hadisst.shape[1]):
            for x in range(0, sic_hadisst.shape[2]):
                cell_sic = sic_hadisst[t, y, x]
                cell_tos = tos_hadisst[t, y, x]
                # determine where the sea ice is > 0.0 and get the corresponding ssts
                if abs(cell_tos) > mv or abs(cell_sic) > mv:
                    continue
                if cell_sic == 0.0:
                    continue
                sic_idx = int(cell_sic / sic_bw)
                tos_idx = int(((cell_tos - 273.13) - tos_min) / tos_bw + 0.5)
                if tos_idx >= 0 and sic_idx >= 0 and tos_idx < n_tos and sic_idx < n_sic:
                    # split into hemispheres
                    if y < sic_hadisst.shape[1] / 2:
                        out_bin_nh[tos_idx, sic_idx] += 1
                    else:
                        out_bin_sh[tos_idx, sic_idx] += 1

    fh_hadisst.close()

    # missing values
    out_bin_nh[out_bin_nh == 0.0] = -1e20
    out_bin_sh[out_bin_sh == 0.0] = -1e20

    # save the output file

    out_bin = out_bin_nh
    fname = "output/sea_ice_tos_corr_HadISST_nh.nc"
    for i in range(0, 2):
        out_fh = netcdf_file(fname, 'w')
        # create the dimensions
        sic_vals = numpy.array([x * sic_bw for x in range(0, n_sic)])
        tos_vals = numpy.array([x * tos_bw + tos_min for x in range(0, n_tos)])
        tos_out_dim = out_fh.createDimension("tos", tos_vals.shape[0])
        sic_out_dim = out_fh.createDimension("sic", sic_vals.shape[0])
        tos_out_var = out_fh.createVariable("tos", tos_vals.dtype, ("tos", ))
        sic_out_var = out_fh.createVariable("sic", 'f', ("sic", ))

        tos_out_var[:] = tos_vals
        sic_out_var[:] = sic_vals

        data_out_var = out_fh.createVariable("freq", out_bin.dtype,
                                             ("tos", "sic"))
        data_out_var._attributes = {
            "_FillValue": -1e20,
            "missing_value": -1e20
        }
        data_out_var[:] = out_bin[:]

        out_fh.close()
        fname = "output/sea_ice_tos_corr_HadISST_sh.nc"
        out_bin = out_bin_sh
import sys
from netcdf_file import *
import matplotlib.pyplot as plt
import numpy

fh = netcdf_file("../CREDIBLE_output/output/HadISST_1899_2005_rcp45_2006_2100_r1986_2005_y2050/varmon/sic/HadISST_1899_2005_rcp45_2006_2100_r1986_2005_y2050_f2050_n6_a50_varmon_sic_mon.nc")
fh_sst = netcdf_file("../CREDIBLE_output/output/HadISST_1899_2005_rcp45_2006_2100_r1986_2005_y2050/varmon/sst/HadISST_1899_2005_rcp45_2006_2100_r1986_2005_y2050_f2050_n6_a50_varmon_ssts_mon.nc")
sic_var = fh.variables["sic"]
sst_var = fh_sst.variables["sst"]
m=3
sic_ts_data = sic_var[m::12,0:60,:]
sst_ts_data = sst_var[m::12,0:60,:]
dates = numpy.arange(1899,2100)
sic_ms_data = numpy.ma.masked_less(sic_ts_data, -1)
sst_ms_data = numpy.ma.masked_less(sst_ts_data, -1)
sp0 = plt.subplot(211)
sp1 = plt.subplot(212)
sp0.plot(dates, sic_ms_data[:,90-68,4:15])
sp1.plot(dates, sst_ms_data[:-1,90-68,4:15])

#for x in range(0, 360):
#    v1 = sic_ms_data[2004-1899,90-68,x:x+1]
#    v2 = sic_ms_data[2010-1899,90-68,x:x+1]
#    if not v1.mask:
#        print x, v2-v1
    
plt.show()
fh.close()
fh_sst.close()
def create_siex_anoms_ts(run_type, ref_start, ref_end, monthly):
    # get the filtered list of CMIP5 ensemble members
    cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    
    # create the storage
    n_ens = len(cmip5_rcp_idx)
    if monthly:
        f = 12
    else:
        f = 1
    n_t = (rcp_ey - histo_sy + 1)*f
    nh_siex = numpy.zeros([n_ens, n_t], 'f')
    sh_siex = numpy.zeros([n_ens, n_t], 'f')
    
    # create the area - load the lat and lon coords in
    fn = get_concat_anom_sic_output_fname(cmip5_rcp_idx[0][0], 
                                          cmip5_rcp_idx[0][1], run_type,
                                          ref_start, ref_end)
    fh = netcdf_file(fn, 'r')
    lon = fh.variables['longitude'][:]
    lat = fh.variables['latitude'][:]
    t_attr = fh.variables['time']._attributes
    t_vals = fh.variables['time'][:]
    fh.close()
    
    areas = numpy.zeros([lat.shape[0], lon.shape[0]], 'f')
    # loop through each latitude
    lat_d = lat[1] - lat[0]
    lon_d = lon[1] - lon[0]
    for y in range(0, lat.shape[0]):
        areas[y,:] = gA(lat[y]-lat_d, 0.0, lat[y]+lat_d, lon_d) / (1000**2)
    eq = lat.shape[0]*0.5
    
    for idx in range(0, n_ens):
        print cmip5_rcp_idx[idx][0] + ", " + cmip5_rcp_idx[idx][1] + ", " +str(idx)

        fn = get_concat_anom_sic_output_fname(cmip5_rcp_idx[idx][0], 
                                              cmip5_rcp_idx[idx][1], run_type,
                                              ref_start, ref_end)
        fh = netcdf_file(fn, 'r')
        sic_var = fh.variables['sic']
        mv = sic_var._attributes["_FillValue"]
        sic = numpy.ma.masked_equal(sic_var[:], mv)
        # sic expressed in percentages, hence 0.01
        ext = sic * 0.01 * areas
        nh_siex[idx] = numpy.sum(numpy.sum(ext[:,0:eq], axis=1), axis=1)
        sh_siex[idx] = numpy.sum(numpy.sum(ext[:,eq:], axis=1), axis=1)
        fh.close()
        
    # save the northern and southern hemisphere sea ice extent anomaly timeseries
    out_name = get_siex_anom_ts_fname(run_type, ref_start, ref_end, monthly)
    out_fh = netcdf_file(out_name, "w")
    # create dimensions and variables
    time_out_dim = out_fh.createDimension("time", t_vals.shape[0])
    time_out_var = out_fh.createVariable("time", t_vals.dtype, ("time",))
    ens_out_dim = out_fh.createDimension("ens", n_ens)
    ens_out_var = out_fh.createVariable("ens", 'f', ("ens",))
    nh_out_var = out_fh.createVariable("nh_siex", nh_siex.dtype, ("ens", "time",))
    sh_out_var = out_fh.createVariable("sh_siex", sh_siex.dtype, ("ens", "time",))
    # write out variables
    time_out_var._attributes = t_attr
    time_out_var[:] = t_vals[:]
    ens_out_var[:] = numpy.arange(0, n_ens)
    # write out data
    nh_out_var[:] = sh_siex[:]
    sh_out_var[:] = nh_siex[:]
    
    out_fh.close()
Beispiel #44
0
def calc_sst_sic_corr(run_type):
    # calculate the correlation between the sea-ice concentration and the
    # sea-surface temperature in the CMIP5 simulations

    # get the index file
    ref_start = 1986
    ref_end = 2005
    cmip5_idx = read_cmip5_index_file(run_type, ref_start, ref_end)

    # create the output bin
    sic_bw = 2.5
    tos_bw = 0.1
    tos_min = -2
    tos_max = 2
    tos_range = tos_max - tos_min
    n_tos = int(tos_range / tos_bw)
    n_sic = int((100 + sic_bw) / sic_bw)
    out_bin_nh = numpy.zeros([n_tos, n_sic])
    out_bin_sh = numpy.zeros([n_tos, n_sic])
    sic_vals = numpy.array([float(x) * sic_bw for x in range(0, n_sic)], 'f')
    tos_vals = numpy.array([x * tos_bw + tos_min for x in range(0, n_tos)],
                           'f')

    # loop through each ensemble member
    for ens_mem in cmip5_idx:
        print ens_mem[0]
        # get the filenames for all the files we're using
        rcp_sic_fname = get_cmip5_sic_fname(run_type, ens_mem[0], ens_mem[1])
        hist_sic_fname = get_cmip5_sic_fname("historical", ens_mem[0],
                                             ens_mem[1])
        rcp_tos_fname = get_cmip5_tos_fname(run_type, ens_mem[0], ens_mem[1])
        hist_tos_fname = get_cmip5_tos_fname("historical", ens_mem[0],
                                             ens_mem[1])
        #
        if os.path.exists(rcp_sic_fname) and os.path.exists(rcp_tos_fname):
            # read the files in
            fh_sic_rcp = netcdf_file(rcp_sic_fname, 'r')
            fh_tos_rcp = netcdf_file(rcp_tos_fname, 'r')

            # start / end indices
            stidx = (2050 - 2006) * 12
            edidx = stidx + 12

            # get the variables from the files
            sic_rcp = fh_sic_rcp.variables["sic"][stidx:edidx, :, :]
            tos_rcp = fh_tos_rcp.variables["tos"][stidx:edidx, :, :]

            # get the missing value - assume it's the same for each file in
            # the (individual) model ensemble
            mv = 1000.0

            if sic_rcp.shape != tos_rcp.shape:
                continue

            for t in range(0, sic_rcp.shape[0]):
                for y in range(0, sic_rcp.shape[1]):
                    for x in range(0, sic_rcp.shape[2]):
                        cell_sic = sic_rcp[t, y, x]
                        cell_tos = tos_rcp[t, y, x]
                        # determine where the sea ice is > 0.0 and get the corresponding ssts
                        if abs(cell_tos) > mv:
                            continue
                        if cell_sic < 1.0:
                            continue
                        sic_idx = int(cell_sic / sic_bw + 0.5)
                        tos_idx = int((
                            (cell_tos - 273.13) - tos_min) / tos_bw + 0.5)
                        if tos_idx >= 0 and sic_idx >= 0 and tos_idx < n_tos and sic_idx < n_sic:
                            # split into hemispheres
                            if y < sic_rcp.shape[1] / 2:
                                out_bin_nh[tos_idx, sic_idx] += 1
                            else:
                                out_bin_sh[tos_idx, sic_idx] += 1

            fh_sic_rcp.close()
            fh_tos_rcp.close()
    # save the output file

    out_bin_nh[out_bin_nh == 0.0] = -1e20
    out_bin_sh[out_bin_sh == 0.0] = -1e20

    out_bin = out_bin_nh
    fname = "output/sea_ice_tos_corr_CMIP5_nh.nc"
    for i in range(0, 2):
        out_fh = netcdf_file(fname, 'w')
        # create the dimensions
        sic_vals = numpy.array([x * sic_bw for x in range(0, n_sic)])
        tos_vals = numpy.array([x * tos_bw + tos_min for x in range(0, n_tos)])
        tos_out_dim = out_fh.createDimension("tos", tos_vals.shape[0])
        sic_out_dim = out_fh.createDimension("sic", sic_vals.shape[0])
        tos_out_var = out_fh.createVariable("tos", tos_vals.dtype, ("tos", ))
        sic_out_var = out_fh.createVariable("sic", 'f', ("sic", ))

        tos_out_var[:] = tos_vals
        sic_out_var[:] = sic_vals

        data_out_var = out_fh.createVariable("freq", out_bin.dtype,
                                             ("tos", "sic"))
        data_out_var._attributes = {
            "_FillValue": -1e20,
            "missing_value": -1e20
        }
        print data_out_var.shape, out_bin.shape
        data_out_var[:] = out_bin[:]

        out_fh.close()
        fname = "output/sea_ice_tos_corr_CMIP5_sh.nc"
        out_bin = out_bin_sh
def create_concat_sst_anoms(run_type, ref_start, ref_end, start_idx, end_idx, monthly):
    # Build a time series of concatenated sst anomalies (wrt 1986->2005)
    # from 1899->2100

    # get the filtered set of cmip5 models / runs
    cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)
    n_ens = len(cmip5_rcp_idx)

    # create the cdo object
    cdo = Cdo()
    
    # variable name
    sst_var_name = "tos"
    sic_var_name = "sic"
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    
    for idx in range(start_idx, end_idx):
        try:
            print cmip5_rcp_idx[idx][0]
            out_path = get_concat_anom_sst_output_fname(cmip5_rcp_idx[idx][0], 
                                                    cmip5_rcp_idx[idx][1], 
                                                    run_type, ref_start, ref_end,
                                                    monthly)
            print out_path
            # get the tos filenames for the rcp and historical simulation
            sst_rcp_fname = get_cmip5_tos_fname(run_type, cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])
            sst_histo_fname = get_cmip5_tos_fname("historical", cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])
        
            sic_rcp_fname = get_cmip5_sic_fname(run_type, cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])
            sic_histo_fname = get_cmip5_sic_fname("historical", cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1])

            if "HadGEM2-" in cmip5_rcp_idx[idx][0]:
                rcp_sy -= 1     # met office files run from 2005/12-> for rcp scenarios        
            sst_rcp_remap_string   = create_remapped_field(sst_rcp_fname, rcp_sy, rcp_ey, sst_var_name, monthly)
            sst_histo_remap_string = create_remapped_field(sst_histo_fname, histo_sy, histo_ey, sst_var_name, monthly)
            sic_rcp_remap_string   = create_remapped_field(sic_rcp_fname, rcp_sy, rcp_ey, sic_var_name, monthly)
            sic_histo_remap_string = create_remapped_field(sic_histo_fname, histo_sy, histo_ey, sic_var_name, monthly)

            print "SIC histo:" + sic_histo_fname

            sst_rcp_remap_temp = cdo.addc(0,input=sst_rcp_remap_string)
            sst_histo_remap_temp = cdo.addc(0,input=sst_histo_remap_string)
            sic_rcp_remap_temp = cdo.addc(0,input=sic_rcp_remap_string)
            sic_histo_remap_temp = cdo.addc(0,input=sic_histo_remap_string)
        
            # cat the files together
            tmp_sst_name = rand_string() + "_tmp_sst.nc"
            tmp_sic_name = rand_string() + "_tmp_sic.nc"
            cdo.cat(input=sst_histo_remap_temp + " " + sst_rcp_remap_temp, output=tmp_sst_name)
            cdo.cat(input=sic_histo_remap_temp + " " + sic_rcp_remap_temp, output=tmp_sic_name)

            # fix the file to replace missing value in sst with -1.8 if sic > 0
            sst_fh = netcdf_file(tmp_sst_name, 'r')
            sic_fh = netcdf_file(tmp_sic_name, 'r')
            sst_var = sst_fh.variables[sst_var_name]
            sst_data = numpy.array(sst_var[:])
            lon_var = sst_fh.variables["lon"]
            lat_var = sst_fh.variables["lat"]
            t_var = sst_fh.variables["time"]
            sic_data = sic_fh.variables[sic_var_name][:]
            mv = sst_var._attributes["_FillValue"]
        
        # replace
#            for t in range(0, sic_data.shape[0]):
#                sic_data_idx = numpy.where(sic_data[t] > 1)
#                sst_data[t][sic_data_idx] = 273.15 - (1.8 * sic_data[t][sic_data_idx] * 0.01)
            sst_fh.close()
            sic_fh.close()
        
            # save the file
            tmp_sst2_name = rand_string() + "_tmp_sst2.nc"
            tmp_sst3_name = rand_string() + "_tmp_sst3.nc"
            save_3d_file(tmp_sst2_name, sst_data, lon_var, lat_var, sst_var._attributes, t_var, sst_var_name)

            # add the lsm from hadisst
            lsm_path = "/soge-home/staff/coml0118/LSM/HadISST2_lsm.nc"
            cdo.add(input=" -smooth9 "+tmp_sst2_name+ " " +lsm_path, output=tmp_sst3_name)

            # calculate the temporary reference file
            tmp_ref_name = create_tmp_ref_field(tmp_sst3_name, ref_start, ref_end, sst_var_name, monthly)
        
            # calculate the timeseries of anomalies for both the historical
            # and RCP runs as running decadal means
            anom_string = get_calc_anom_string(tmp_ref_name,
                                               tmp_sst3_name, histo_sy, rcp_ey, sst_var_name,
                                               monthly)
            cdo.addc(0, input=anom_string, output=out_path)
            os.remove(tmp_sst_name)
            os.remove(tmp_sic_name)
            os.remove(tmp_sst2_name)
            os.remove(tmp_sst3_name)
            os.remove(tmp_ref_name)
        except:
            pass
Beispiel #46
0
def create_siex_anoms_ts(run_type, ref_start, ref_end, monthly):
    # get the filtered list of CMIP5 ensemble members
    cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()

    # create the storage
    n_ens = len(cmip5_rcp_idx)
    if monthly:
        f = 12
    else:
        f = 1
    n_t = (rcp_ey - histo_sy + 1) * f
    nh_siex = numpy.zeros([n_ens, n_t], 'f')
    sh_siex = numpy.zeros([n_ens, n_t], 'f')

    # create the area - load the lat and lon coords in
    fn = get_concat_anom_sic_output_fname(cmip5_rcp_idx[0][0],
                                          cmip5_rcp_idx[0][1], run_type,
                                          ref_start, ref_end)
    fh = netcdf_file(fn, 'r')
    lon = fh.variables['longitude'][:]
    lat = fh.variables['latitude'][:]
    t_attr = fh.variables['time']._attributes
    t_vals = fh.variables['time'][:]
    fh.close()

    areas = numpy.zeros([lat.shape[0], lon.shape[0]], 'f')
    # loop through each latitude
    lat_d = lat[1] - lat[0]
    lon_d = lon[1] - lon[0]
    for y in range(0, lat.shape[0]):
        areas[y, :] = gA(lat[y] - lat_d, 0.0, lat[y] + lat_d, lon_d) / (1000**
                                                                        2)
    eq = lat.shape[0] * 0.5

    for idx in range(0, n_ens):
        print cmip5_rcp_idx[idx][0] + ", " + cmip5_rcp_idx[idx][
            1] + ", " + str(idx)

        fn = get_concat_anom_sic_output_fname(cmip5_rcp_idx[idx][0],
                                              cmip5_rcp_idx[idx][1], run_type,
                                              ref_start, ref_end)
        fh = netcdf_file(fn, 'r')
        sic_var = fh.variables['sic']
        mv = sic_var._attributes["_FillValue"]
        sic = numpy.ma.masked_equal(sic_var[:], mv)
        # sic expressed in percentages, hence 0.01
        ext = sic * 0.01 * areas
        nh_siex[idx] = numpy.sum(numpy.sum(ext[:, 0:eq], axis=1), axis=1)
        sh_siex[idx] = numpy.sum(numpy.sum(ext[:, eq:], axis=1), axis=1)
        fh.close()

    # save the northern and southern hemisphere sea ice extent anomaly timeseries
    out_name = get_siex_anom_ts_fname(run_type, ref_start, ref_end, monthly)
    out_fh = netcdf_file(out_name, "w")
    # create dimensions and variables
    time_out_dim = out_fh.createDimension("time", t_vals.shape[0])
    time_out_var = out_fh.createVariable("time", t_vals.dtype, ("time", ))
    ens_out_dim = out_fh.createDimension("ens", n_ens)
    ens_out_var = out_fh.createVariable("ens", 'f', ("ens", ))
    nh_out_var = out_fh.createVariable("nh_siex", nh_siex.dtype, (
        "ens",
        "time",
    ))
    sh_out_var = out_fh.createVariable("sh_siex", sh_siex.dtype, (
        "ens",
        "time",
    ))
    # write out variables
    time_out_var._attributes = t_attr
    time_out_var[:] = t_vals[:]
    ens_out_var[:] = numpy.arange(0, n_ens)
    # write out data
    nh_out_var[:] = sh_siex[:]
    sh_out_var[:] = nh_siex[:]

    out_fh.close()
    for opt, val in opts:
        if opt in ['--run_type', '-r']:
            run_type = val
        if opt in ['--ref_start', '-s']:
            ref_start = int(val)
        if opt in ['--ref_end', '-e']:
            ref_end = int(val)
        if opt in ['--monthly', '-m']:
            monthly = True

    # read the (already computed) yearly mean cmip5 anomalies for this RCP scenario
    cmip5_tos_tas_ts_fname = get_gmt_gmsst_anom_ts_fname(run_type,
                                                         ref_start,
                                                         ref_end,
                                                         monthly=monthly)
    fh = netcdf_file(cmip5_tos_tas_ts_fname)
    tos_data = fh.variables["tos"][:]
    tos_data = tos_data.byteswap().newbyteorder()
    fh.close()

    # load HadISST data
    hadisst_fname = "/Users/Neil/Coding/CREDIBLE_output/output/HadISST_1899_2010_400/hadisst_hist_1899_2010_1986_2005_400_anoms_gmsst_decmn.nc"
    fh = netcdf_file(hadisst_fname)
    hadisst_data = fh.variables["sst"][:].byteswap().newbyteorder()
    fh.close()

    # plot the CMIP5 timeseries data
    sp0 = plt.subplot(111)
    time_data = numpy.arange(1899, 2101)
    plot_CMIP5_timeseries(sp0, tos_data, time_data)
    # plot the HadISST data
def create_concat_sst_anoms(run_type, ref_start, ref_end, start_idx, end_idx,
                            monthly):
    # Build a time series of concatenated sst anomalies (wrt 1986->2005)
    # from 1899->2100

    # get the filtered set of cmip5 models / runs
    cmip5_rcp_idx = read_cmip5_index_file(run_type, ref_start, ref_end)
    n_ens = len(cmip5_rcp_idx)

    # create the cdo object
    cdo = Cdo()

    # variable name
    sst_var_name = "tos"
    sic_var_name = "sic"
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()

    for idx in range(start_idx, end_idx):
        try:
            print cmip5_rcp_idx[idx][0]
            out_path = get_concat_anom_sst_output_fname(
                cmip5_rcp_idx[idx][0], cmip5_rcp_idx[idx][1], run_type,
                ref_start, ref_end, monthly)
            print out_path
            # get the tos filenames for the rcp and historical simulation
            sst_rcp_fname = get_cmip5_tos_fname(run_type,
                                                cmip5_rcp_idx[idx][0],
                                                cmip5_rcp_idx[idx][1])
            sst_histo_fname = get_cmip5_tos_fname("historical",
                                                  cmip5_rcp_idx[idx][0],
                                                  cmip5_rcp_idx[idx][1])

            sic_rcp_fname = get_cmip5_sic_fname(run_type,
                                                cmip5_rcp_idx[idx][0],
                                                cmip5_rcp_idx[idx][1])
            sic_histo_fname = get_cmip5_sic_fname("historical",
                                                  cmip5_rcp_idx[idx][0],
                                                  cmip5_rcp_idx[idx][1])

            if "HadGEM2-" in cmip5_rcp_idx[idx][0]:
                rcp_sy -= 1  # met office files run from 2005/12-> for rcp scenarios
            sst_rcp_remap_string = create_remapped_field(
                sst_rcp_fname, rcp_sy, rcp_ey, sst_var_name, monthly)
            sst_histo_remap_string = create_remapped_field(
                sst_histo_fname, histo_sy, histo_ey, sst_var_name, monthly)
            sic_rcp_remap_string = create_remapped_field(
                sic_rcp_fname, rcp_sy, rcp_ey, sic_var_name, monthly)
            sic_histo_remap_string = create_remapped_field(
                sic_histo_fname, histo_sy, histo_ey, sic_var_name, monthly)

            print "SIC histo:" + sic_histo_fname

            sst_rcp_remap_temp = cdo.addc(0, input=sst_rcp_remap_string)
            sst_histo_remap_temp = cdo.addc(0, input=sst_histo_remap_string)
            sic_rcp_remap_temp = cdo.addc(0, input=sic_rcp_remap_string)
            sic_histo_remap_temp = cdo.addc(0, input=sic_histo_remap_string)

            # cat the files together
            tmp_sst_name = rand_string() + "_tmp_sst.nc"
            tmp_sic_name = rand_string() + "_tmp_sic.nc"
            cdo.cat(input=sst_histo_remap_temp + " " + sst_rcp_remap_temp,
                    output=tmp_sst_name)
            cdo.cat(input=sic_histo_remap_temp + " " + sic_rcp_remap_temp,
                    output=tmp_sic_name)

            # fix the file to replace missing value in sst with -1.8 if sic > 0
            sst_fh = netcdf_file(tmp_sst_name, 'r')
            sic_fh = netcdf_file(tmp_sic_name, 'r')
            sst_var = sst_fh.variables[sst_var_name]
            sst_data = numpy.array(sst_var[:])
            lon_var = sst_fh.variables["lon"]
            lat_var = sst_fh.variables["lat"]
            t_var = sst_fh.variables["time"]
            sic_data = sic_fh.variables[sic_var_name][:]
            mv = sst_var._attributes["_FillValue"]

            # replace
            #            for t in range(0, sic_data.shape[0]):
            #                sic_data_idx = numpy.where(sic_data[t] > 1)
            #                sst_data[t][sic_data_idx] = 273.15 - (1.8 * sic_data[t][sic_data_idx] * 0.01)
            sst_fh.close()
            sic_fh.close()

            # save the file
            tmp_sst2_name = rand_string() + "_tmp_sst2.nc"
            tmp_sst3_name = rand_string() + "_tmp_sst3.nc"
            save_3d_file(tmp_sst2_name, sst_data, lon_var, lat_var,
                         sst_var._attributes, t_var, sst_var_name)

            # add the lsm from hadisst
            lsm_path = "/soge-home/staff/coml0118/LSM/HadISST2_lsm.nc"
            cdo.add(input=" -smooth9 " + tmp_sst2_name + " " + lsm_path,
                    output=tmp_sst3_name)

            # calculate the temporary reference file
            tmp_ref_name = create_tmp_ref_field(tmp_sst3_name, ref_start,
                                                ref_end, sst_var_name, monthly)

            # calculate the timeseries of anomalies for both the historical
            # and RCP runs as running decadal means
            anom_string = get_calc_anom_string(tmp_ref_name, tmp_sst3_name,
                                               histo_sy, rcp_ey, sst_var_name,
                                               monthly)
            cdo.addc(0, input=anom_string, output=out_path)
            os.remove(tmp_sst_name)
            os.remove(tmp_sic_name)
            os.remove(tmp_sst2_name)
            os.remove(tmp_sst3_name)
            os.remove(tmp_ref_name)
        except:
            pass
Beispiel #49
0
def process_netcdf(in_ncf,base_path,field):

	try:

		in_ncf_end=os.path.basename(in_ncf)
		o_field_name = get_output_field_name(field)
		umid,datestamp=in_ncf_end[:-3].split(field[0])
		out_name = base_path + "/" + field[0] + "/" + o_field_name + "/" + o_field_name +"_" + umid + "_" + um_to_timestamp(datestamp)  + ".nc"
	#	print in_ncf,out_name
	
		# open as netCDF to a temporary file
		nc_in_file = netcdf_file(in_ncf,'r')
		# create the output netCDF file
		# check whether it exists
		if os.path.exists(out_name):
			return out_name
		# get the variable from the input
		if not field[1] in nc_in_file.variables.keys():
			print "Could not extract field: " + field[1] + " from file: " + in_ncf_end
			return
		nc_out_file = netcdf_file(out_name, "w")
	
		nc_in_var = nc_in_file.variables[field[1]]
		in_dimensions = []
		process = field[3]
		v_min = field[4]
		v_max = field[5]
		# now copy the dimensions from input netcdf
		for d in nc_in_var.dimensions:
			# get the input dimension and the data
			dim_in_var = nc_in_file.variables[d]
			dim_in_data = dim_in_var[:]
			in_dimensions.append([d, dim_in_data])

		# get the rotated pole definition	
		plon, plat = get_rotated_pole(nc_in_var._attributes, nc_in_file)
		# subset the dimensions to create the out_dimensions
		out_dims, subset_dims, lon_lat_idxs, remap_data = subset_dimensions(in_dimensions, field, plon, plat)
		# if the longitude and latitude indexes are < 0 then we need to remap the data so that 0deg is
		# in the middle of the field, not at the beginning
		if remap_data:
			in_data = nc_in_var[:,:,lon_lat_idxs[1]:lon_lat_idxs[3],:]	# get the input data - can subset latitude early
			new_data = numpy.zeros(in_data.shape, 'f') # create a new store
			d_len = in_data.shape[3]					# get the longitude length
			d_len_d2 = d_len / 2						# lon length div 2
			new_data[:,:,:,0:d_len_d2] = in_data[:,:,:,d_len_d2:d_len]	# copy right hand half to left hand
			new_data[:,:,:,d_len_d2:d_len] = in_data[:,:,:,0:d_len_d2]  # copy left hand half to right hand
			var_out_data = new_data[:,:,:,lon_lat_idxs[0]:lon_lat_idxs[2]] # get the subset data
		else:		
			var_out_data = nc_in_var[:,:,lon_lat_idxs[1]:lon_lat_idxs[3], lon_lat_idxs[0]:lon_lat_idxs[2]]
	
		# if the data is going to be processed then do the processing here
		if process != "all":
			# get the missing value first
			mv = get_missing_value(nc_in_var._attributes)

			var_out_data = process_data(var_out_data, process, mv, plon, plat, subset_dims, v_min, v_max)
		
		for d in out_dims:
			# create the output dimension and variable
			nc_out_file.createDimension(d[0], d[1].shape[0])
			dim_out_var = nc_out_file.createVariable(d[0], d[1].dtype, (d[0],))
			# assign the output variable data and attributes from the input
			if d[0] in nc_in_file.variables.keys():
				dim_in_var = nc_in_file.variables[d[0]]
				dim_out_var._attributes = dim_in_var._attributes
			elif d[0] == "pt":
				# if it's the "pt" dimension then create an attribute indicating the domain of the
				# mean-ed / max-ed / min-ed variable
				dom_str = ""
				if field[2] == []:
					dom_str = "global  "
				else:
					for i in range(0, 4):
						dom_str += str(field[2][i]) + ", "
				dim_out_var._attributes["domain"] = dom_str[:-2]
			dim_out_var[:] = d[1][:]
		
		# create the variable
		out_dim_names = [d[0] for d in out_dims]
		nc_out_var = nc_out_file.createVariable(field[1], var_out_data.dtype, out_dim_names)
		# assign the attributes
		nc_out_var._attributes = nc_in_var._attributes
		# remove the grid mapping and coordinates from the dictionary if they exist and process is not all
		if process != "all":
			if "grid_mapping" in nc_out_var._attributes:
				del nc_out_var._attributes["grid_mapping"]
			if "coordinates" in nc_out_var._attributes:
				del nc_out_var._attributes["coordinates"]
			if "cell_method" in nc_out_var._attributes:
				nc_out_var._attributes["cell_method"] += ", area: " + process + " "
			
		# assign the data
		nc_out_var[:] = var_out_data
	
		# check for rotated pole and copy variable if it exists
		if "grid_mapping" in nc_out_var._attributes and len(out_dims) == 4:
			grid_map_name = nc_out_var._attributes["grid_mapping"]
			grid_map_var = nc_in_file.variables[grid_map_name]
			grid_map_out_var = nc_out_file.createVariable(grid_map_name, 'c', ())
			grid_map_out_var._attributes = grid_map_var._attributes
			# get the global longitude / global latitude vars
			coords = (nc_out_var._attributes["coordinates"]).split(" ");
			global_lon_var = nc_in_file.variables[coords[0]]
			global_lat_var = nc_in_file.variables[coords[1]]
			global_lon_data = global_lon_var[lon_lat_idxs[1]:lon_lat_idxs[3], lon_lat_idxs[0]:lon_lat_idxs[2]]
			global_lat_data = global_lat_var[lon_lat_idxs[1]:lon_lat_idxs[3], lon_lat_idxs[0]:lon_lat_idxs[2]]
			# create the global latitude / global longitude variables
			out_global_lon_var = nc_out_file.createVariable(coords[0], global_lon_data.dtype, (out_dims[2][0], out_dims[3][0]))
			out_global_lon_var[:] = global_lon_data
			out_global_lat_var = nc_out_file.createVariable(coords[1], global_lat_data.dtype, (out_dims[2][0], out_dims[3][0]))
			out_global_lat_var[:] = global_lat_data
			out_global_lon_var._attributes = global_lon_var._attributes
			out_global_lat_var._attributes = global_lat_var._attributes
		
		nc_out_file.close()
		nc_in_file.close()
	except Exception,e:
		print 'Failed to create netcdf file',os.path.basename(out_name)
		print e
		if os.path.exists(out_name):
			os.remove(out_name)
			return False