def test_obs_ncobsplatform_xylocsa(): # Load platform obs = NcObsPlatform(NCFILE_OBS_PROFILES, name='profiles') # Orig lons_orig = obs.lons.copy() lats_orig = obs.lats.copy() # Init output saout = obs.xylocsa_init_pert_output() assert saout.shape == (4, len(lons_orig)) # Loop on indices for i in obs.xylocsa_get_pert_indices_iter(): # X # - activate obs.xylocsa_activate_pert('+x', i) assert abs(obs.lons - lons_orig).ptp() > 0.0001 # - deactivate obs.xylocsa_deactivate_pert() assert_allclose(obs.lons, lons_orig) # Y # - activate obs.xylocsa_activate_pert('+y', i) assert abs(obs.lats - lats_orig).ptp() > 0.0001 # - deactivate obs.xylocsa_deactivate_pert() assert_allclose(obs.lats, lats_orig) return
def test_fcore_computeensstats(): """Test the :func:`sonat._fcore.f_computeens` function""" # Inits nfiles = 5 nstate = 4 inpath = 'inputs' infile = 'fieldA_{it}.txt' states = N.zeros((nstate, nfiles), order='f') element = 0 # Read states for it in range(1, nfiles + 1): states[:, it - 1] = N.loadtxt( os.path.join(THIS_DIR, inpath, infile).format(**locals())) # Anomaly meanstate = states.mean(axis=1) states -= meanstate.reshape(1, nstate).T # Compute stats skewness, kurtosis, status = f_computeensstats(element, meanstate, states) # Checks assert_allclose(status, 0) assert_allclose([skewness, kurtosis], [-1.2529E+00, -1.3728E+00], rtol=1e-4)
def test_arm_arm_indirect_spectrum(): # Load ARM arm = get_arm() # Indirect spectrum pcs = N.dot(arm.S.T, arm.raw_arm) spect = (pcs**2).sum(axis=0) # Compare with direct spectrum assert_allclose(spect, arm.raw_spect, atol=1e-7)
def test_obs_obsmanager_project_model(): # Load manager manager = test_obs_obsmanager_init_surf() # Load model f = DS(NCFILE_MANGA0, 'mars', level='surf', logger_level='error') temp = f('temp') f.close() temp.id += '_surf' # Interpolate model otemp = manager.project_model(temp) assert_allclose(otemp[1][0], [12.91558515, 10.58179214]) return manager
def test_fcore_eofcovar(): """Test the :func:`sonat._fcore.f_eofcovar` function""" # Inits nfiles = 5 nstate = 4 inpath = 'inputs' infile = 'fieldA_{it}.txt' outfile_eof = 'sangoma_eofcovar_uv_eof_{}.txt' # Files holding EOFs outfile_svals = 'sangoma_eofcovar_uv_svals.txt' # Files holding singular values outfile_mstate = 'sangoma_eofcovar_uv_meanstate.txt' # Files holding mean state states = N.zeros((nstate, nfiles), order='f') dim_fields = N.array([1]) offsets = 1 remove_mstate = 1 do_mv = 0 meanstate = N.zeros(states.shape[0], order='f') # Read states for it in range(1, nfiles + 1): states[:, it - 1] = N.loadtxt( os.path.join(THIS_DIR, inpath, infile).format(**locals())) # EOF decomposition stddev, svals, svec, status = f_eofcovar(dim_fields, offsets, remove_mstate, do_mv, states, meanstate) # True values stddev_true = 1. svals_true = N.loadtxt(os.path.join(THIS_DIR, outfile_svals)) svec_true = N.array([ N.loadtxt(os.path.join(THIS_DIR, outfile_eof.format(i + 1))) for i in xrange(nfiles - 1) ]).T meanstate_true = N.loadtxt(os.path.join(THIS_DIR, outfile_mstate)) # Checks assert_allclose(status, 0) assert_allclose(stddev, stddev_true) assert_allclose(svals[:nfiles - 1], svals_true) assert_allclose(svec[:, :nfiles - 1], svec_true) assert_allclose(meanstate, meanstate_true)
def test_arm_arm_init(): # Load ensemble ens = Ensemble.from_file(ENS_NCFILE, checkvars=True, logger=LOGGER) # Load observations obs0 = NcObsPlatform(NCFILE_OBS_HFRADARS) obs1 = NcObsPlatform(NCFILE_OBS_PROFILES) obs2 = NcObsPlatform(NCFILE_OBS_SATSST) obsmanager = ObsManager([obs0, obs1, obs2]) # Bathymetry bathy = get_bathy()[::2, ::2] # Init ARM arm = ARM(ens, obsmanager, syncnorms=True, bathy=bathy) # Checks assert_allclose(arm.obsmanager[2].norms + arm.obsmanager[1].norms[:1], arm.ens.norms[arm.ens.varnames.index('temp')]) return arm
def test_ens_ensemble_init(): # Load file from previous routine ncfile = ENS_NCFILE varnames = ['sal', 'temp', 'temp_surf', 'u_surf', 'v_surf'] vars = [] f = cdms2.open(ncfile) for vname in f.listvariables(): if vname in varnames: vars.append(f(vname)) f.close() varnames = [v.id for v in vars] # Init from variables ensv = Ensemble(vars, checkvars=True) # Init from file ensf = Ensemble.from_file(ncfile, checkvars=True, logger=LOGGER) # Checks assert [v.id for v in ensv.variables] == varnames assert [v.id for v in ensf.variables] == varnames assert_allclose(ensv.stacked_data, ensf.stacked_data)
def test_obs_obsmanager_init_surf(): # Load and stack surface obs obs_surf0 = NcObsPlatform(NCFILE_OBS_SURF, lon=(-7, -5), varnames=['temp'], norms=0.2, name='surf_west') obs_surf1 = NcObsPlatform(NCFILE_OBS_SURF, lon=(-5, 0), varnames=['temp', 'sal'], norms=[0.2, 0.1], name='surf_east') # Setup manager manager = ObsManager([obs_surf0, obs_surf1]) # Checks stacked = manager.stacked_data assert_allclose(stacked, [1., 2.5, 1.5, 4., 1.5, 4.]) assert_allclose(manager.lons, [-5.8, -5.7, -4.6, -2.8]) assert_allclose(manager.lats, [48.1, 47.5, 47.4, 47.3]) assert manager.varnames == ['temp', 'sal'] model_specs = manager.get_model_specs() assert sorted(model_specs.keys()) == ['depths', 'lat', 'lon', 'varnames'] assert model_specs['varnames'] == ['temp', 'sal'] assert model_specs['depths']['temp'] == ('surf', ) assert_allclose(model_specs['lat'][:2], (47.3, 48.1)) assert_allclose(model_specs['lon'][:2], (-5.8, -2.8)) # Renorm by name manager.set_named_norms(temp=0.1) assert manager.stacked_data[0] == 2 * stacked[0] CACHE['manager_surf'] = manager return manager
def test_fcore_arm(): """Test the :func:`sonat._fcore.f_arm` function""" # Sample states ssamples = N.array([[0.24, -0.38, 0.14], [0.51, -0.75, 0.24]], order='F') nens = ssamples.shape[1] # Observation operator H = N.array([[0.0, 1.0], [1.0, 0.0]], order='F') # Generate data-space samples dsamples = N.dot(H, ssamples) # Observation error R = N.asfortranarray(N.diag([0.1, 0.1])) # Call ARM ndof = min(dsamples.shape) arm_spect, arm, arm_rep, status = f_arm(ndof, ssamples, dsamples, R) # PCs pcs = N.dot(dsamples.T / N.sqrt(0.1 * (nens - 1)), arm) # True values arm_spect_true = N.loadtxt(os.path.join( THIS_DIR, 'sangoma_example_arm_output--RM_spectrum'), usecols=[1]) arm_true = N.array([ N.loadtxt(os.path.join( THIS_DIR, 'sangoma_example_arm_output--mode{:04d}'.format(i + 1)), usecols=[1]) for i in xrange(dsamples.shape[0]) ]).T arm_rep_true = N.array([ N.loadtxt(os.path.join( THIS_DIR, 'sangoma_example_arm_output--modrep{:04d}'.format(i + 1)), usecols=[1]) for i in xrange(ssamples.shape[0]) ]).T # Checks assert_allclose(status, 0) assert_allclose(arm_spect, arm_spect_true) assert_allclose(arm, arm_true) assert_allclose(arm_rep, arm_rep_true) assert_allclose(N.dot(arm.T, arm), N.diag(N.ones(ndof)), atol=1e-7) assert_allclose((pcs**2).sum(axis=0), arm_spect)
def test_obs_ncobsplatform_surf(): # Load and stack obs obs = NcObsPlatform(NCFILE_OBS_SURF, logger=LOGGER, lat=(45, 47.8), norms=[0.2, 0.1]) stacked = obs.stacked_data.copy() assert obs.lons.shape == (3, ) assert obs.ns == 6 assert obs.ndim == 1 assert_allclose(obs.means, [0, 0], atol=1e-7) assert obs.depths == 'surf' assert_allclose(stacked, [2.5, 1.5, 4., 1., 1.5, 4.]) # Named norms notnormed = obs.set_named_norms(temp=0.1) assert obs.norms == [0.1, 0.1] assert_allclose(obs.stacked_data[:3], 2 * stacked[:3]) assert_allclose(obs.stacked_data[3:], stacked[3:]) obs.set_named_norms(temp=0.2) # Interpolate model f = DS(NCFILE_MANGA0, 'mars', level=obs.depths, logger_level='error') temp = f('temp') sal = f('sal') f.close() temp.id += '_surf' sal.id += '_surf' otemp = obs.project_model(temp) osal = obs.project_model(sal) otem_true = [12.97311556, 12.91558515, 10.58179214] assert_allclose(otemp[0], otem_true) # Stack model otemp[:] -= 11.5 osal[:] -= 35.5 stacked_data = obs.restack([otemp, osal]) assert stacked_data.shape == (6, 15) assert_allclose(stacked_data[:3, 0] * obs.norms[0] + 11.5, otem_true) return obs
def test_stack_mv2_scattered_without_record_fixed_norm(): # Fake data # - first array norm0 = 20. lons, lats, data0 = create_mv2_scattered_xyzt(nt=0) data0[:, 3:5] = MV2.masked # - second array norm1 = 8. lons, lats, data1 = create_mv2_scattered_xyzt(nt=0, np=20, nz=0) data1[10:12] = MV2.masked # Stack: fixed norm, no record dim, no anomaly stacker = Stacker([data0, data1], norms=[norm0, norm1], nordim=True, mean=False, logger=LOGGER) stacked = stacker.stacked_data # Unstack unstacked0, unstacked1 = stacker.unstack(stacker.stacked_data) # Restack restacked = stacker.restack([data0, data1]) # Renorm and norm back norms = stacker.norms stacker.set_norms([(norm*2) for norm in norms]) renormed = stacker.stacked_data.copy() stacker.set_norms(norms) backnormed = stacker.stacked_data.copy() # Checks assert_allclose(stacked.shape, ((~data0.mask).sum()+(~data1.mask).sum(),)) assert_allclose(stacked, N.concatenate((data0.compressed()/norm0, data1.compressed()/norm1))) assert_allclose(unstacked0, data0) assert_allclose(unstacked1, data1) assert_allclose(restacked, stacked) assert_allclose(renormed, stacked/2) assert_allclose(backnormed, stacked) return stacker
def test_stack_mv2_with_record(): # Fake data # - first array nt = 5 data0 = create_mv2_gridder_xyzt(nt=nt, rotate=30) data0.id = 'data0' data0.units = 'units0' data0[:, :, 3:5, 2:4] = MV2.masked raxis = data0.getTime() del raxis.axis del raxis.units raxis.id = 'member' # - second array data1 = create_mv2_gridder_xyzt(rotate=0, nx=10, ny=9, nz=4) data1.id = 'data1' data1.long_name = 'long_name1' data1[:, :, 5:7, 6:7] = MV2.masked data1.setAxis(0, raxis) # Stack stacker = Stacker([data0, data1], nordim=False, logger=LOGGER) stacked = stacker.stacked_data # Unstack unstacked0, unstacked1 = stacker.unstack(stacker.stacked_data, format=2) unstacked0b, unstacked1b = stacker.unstack(stacker.stacked_data[:, :nt/2]) # Restack restacked = stacker.restack([data0, data1]) # Renorm and norm back norms = stacker.norms stacker.set_norms([(norm*2) for norm in norms]) renormed = stacker.stacked_data.copy() stacker.set_norms(norms) backnormed = stacker.stacked_data.copy() # Checks assert_allclose(stacked.shape, ((~data0[0].mask).sum()+(~data1[0].mask).sum(), data0.shape[0])) assert_allclose(unstacked0, data0) assert unstacked0.id == 'data0' assert unstacked0.units == 'units0' assert_allclose(unstacked1, data1) assert_allclose(unstacked0[:nt/2], unstacked0b) assert_allclose(unstacked1[:nt/2], unstacked1b) assert_allclose(restacked, stacked) assert_allclose(renormed, stacked/2) assert_allclose(backnormed, stacked) return stacker
def test_ens_generate_pseudo_ensemble(): # Specs ncpat = NCPAT_MANGA varnames = ['temp', 'sal', 'u', 'v'] time = ('2014-01-01 13', '2014-01-25 12') nrens = 14 # nrens = 2 enrich = 1.5 dtfile = (15, 'day') ncfile = ENS_NCFILE level = {'temp':('3d', 'surf'), 'u':'surf', 'v':'surf'} depths = create_dep([-40., -30, -20, -10, 0.]) # Direct enrich = 0 # <= 1 (temp, temp_surf, sal, u_surf, v_surf) = generate_pseudo_ensemble(ncpat, nrens=nrens, enrich=enrich, time=time, varnames=varnames, dtfile=dtfile, logger=LOGGER, anomaly=False, level=level, depths=depths) assert temp.shape[0]==nrens assert sal.shape[0]==nrens assert temp.ndim==4 assert temp.shape[1]==len(depths) assert temp_surf.ndim==3 assert v_surf.ndim==3 f = cdms2.open(NCFILE_MANGA0) temp0 = f('TEMP', time=slice(1, 2), level=slice(-1, None), squeeze=1) f.close() assert_allclose(temp0, temp_surf[0]) tsum = temp.sum() # Enrichment enrich = 1.5 ens = generate_pseudo_ensemble(ncpat, nrens=nrens, enrich=enrich, varnames=varnames, time=time, dtfile=dtfile, logger=LOGGER, getmodes=True, level=level, depths=depths) (temp, temp_surf, sal, u_surf, v_surf), modes = ens (temp_eof, temp_surf_eof, sal_eof, u_surf_eof, v_surf_eof) = modes['eofs'] ev = modes['eigenvalues'] temp_var, temp_surf_var, sal_var, u_surf_var, v_surf_var = modes['variance'] assert tsum!=temp.sum() assert temp.shape[0]==nrens assert sal.shape[0]==nrens eof0 = N.concatenate( (temp_eof[0].compressed(), temp_surf_eof[0].compressed(), sal_eof[0].compressed(), u_surf_eof[0].compressed(), v_surf_eof[0].compressed())) assert_allclose((eof0**2).sum(), 1) eof1 = N.concatenate( (temp_eof[1].compressed(), temp_surf_eof[1].compressed(), sal_eof[1].compressed(), u_surf_eof[1].compressed(), v_surf_eof[1].compressed())) assert_allclose((eof0*eof1).sum(), 0, atol=1e-7) assert_allclose(ev.total_variance, eof0.size) expv = (ev**2).sum()/ev.total_variance assert expv > .8 and expv < 1 expvm = temp.var(axis=0).mean()/temp_var.mean() assert expvm > .8 and expvm < 1 # Save ensemble f = cdms2.open(ncfile, 'w') for var in ( temp, temp_surf, sal, u_surf, v_surf, temp_eof, temp_surf_eof, sal_eof, u_surf_eof, v_surf_eof, temp_var, temp_surf_var, sal_var, u_surf_var, v_surf_var, ev ): f.write(var) f.close()
def test_pack_mv2_scattered_without_record_fixed_norm(): # Fake data lons, lats, data = create_mv2_scattered_xyzt(nt=0) data[:, 3:5] = MV2.masked data[:] *= data norm = data.std()*2. # Pack packer = Packer(data, nordim=True, logger=LOGGER, mean=False, norm=norm) packed = packer.packed_data.copy() # Unpacked unpacked = packer.unpack(packer.packed_data) # Repack repacked = packer.repack(data) # Renorm and norm back norm = packer.norm packer.set_norm(norm*2) renormed = packer.packed_data.copy() packer.set_norm(norm) backnormed = packer.packed_data.copy() # Checks svalid = ~data.mask cdata = data.compressed() cnorm = cdata.std()*2 cdata /= cnorm assert_allclose(packer.good, svalid) assert_allclose(packer.sshape, data.shape) assert_allclose(packer.norm, data.asma().std()*2) assert_allclose(packer.mean, 0) assert_allclose(packed.shape, svalid.sum()) assert_allclose(packed, cdata) assert_allclose(unpacked, data) assert_allclose(repacked, packer.packed_data) assert_allclose(renormed, packed/2) assert_allclose(backnormed, packed) return packer
def test_pack_mv2_curved_with_record(): # Fake data nt = 5 data = create_mv2_gridder_xyzt(nt=nt, rotate=30) data.id = 'mydata' data.long_name = 'My long name' data[:, :, 3:5, 2:4] = MV2.masked raxis = data.getTime() del raxis.axis del raxis.units raxis.id = 'member' # Pack packer = Packer(data, nordim=False, logger=LOGGER) packed = packer.packed_data.copy() # Unpacked unpacked = packer.unpack(packed, format=2) unpacked2 = packer.unpack(packed[:, :nt/2]) # Repack repacked = packer.repack(data) # Renorm and norm back norm = packer.norm packer.set_norm(norm*2) renormed = packer.packed_data.copy() packer.set_norm(norm) backnormed = packer.packed_data.copy() # Checks svalid = ~data[0].mask cdata = data.asma().reshape(data.shape[0], -1).compress(svalid.ravel(), axis=1) cdata -= cdata.mean(axis=0) cnorm = cdata.std() cdata /= cnorm dmean = data.asma().mean(axis=0) assert_allclose(packer.good, svalid) assert_allclose(packer.sshape, data.shape[1:]) assert_allclose(packer.mean, dmean) assert_allclose(packer.norm, (data.asma()-dmean).std()) assert_allclose(packed.shape, (svalid.sum(), data.shape[0])) assert_allclose(packed, cdata.T) assert_allclose(unpacked, data) assert unpacked.id == 'mydata' assert unpacked.long_name == 'My long name' assert_allclose(unpacked2, unpacked[:nt/2]) assert_allclose(repacked, packer.packed_data) assert_allclose(renormed, packed/2) assert_allclose(backnormed, packed) return packer