def test_rasterize_init(self): x = Data(None, None) x._init_sample_object(ny=1, nx=272) x.lon = np.random.random(272) * 10. + 5. # 5 ... 15 x.lat = np.random.random(272) * 20. + 0. # 0 ... 20 lon = np.random.random((10, 20)) lat = np.random.random((30, 20)) with self.assertRaises(ValueError): x._rasterize(lon, lat, radius=0.1) lon = np.random.random((10, 20)) lat = np.random.random((10, 20)) with self.assertRaises(ValueError): x._rasterize(lon, lat, radius=None)
def test_rasterize_init(self): x = Data(None, None) x._init_sample_object(ny=1, nx=272) x.lon = np.random.random(272)*10. + 5. # 5 ... 15 x.lat = np.random.random(272)*20. + 0. # 0 ... 20 lon = np.random.random((10,20)) lat = np.random.random((30,20)) with self.assertRaises(ValueError): x._rasterize(lon, lat, radius=0.1) lon = np.random.random((10,20)) lat = np.random.random((10,20)) with self.assertRaises(ValueError): x._rasterize(lon, lat, radius=None)
def test_rasterize_data(self): """ testdataset +---+---+---+ |1.2|2.3| | +---+---+---+ | | |0.7| +---+---+---+ | |5.2| | +---+---+---+ """ x = Data(None, None) x._init_sample_object(ny=1, nx=272) x.lon = np.asarray([2.25, 2.45, 1.8, 3.6]) x.lat = np.asarray([11.9, 10.1, 10.2, 11.3]) x.data = np.asarray([5.2, 2.3, 1.2, 0.7]) # target grid lon = np.asarray([1.5, 2.5, 3.5]) lat = np.asarray([10., 11., 12.]) LON, LAT = np.meshgrid(lon, lat) # rasterize data # no valid data res = x._rasterize(LON, LAT, radius=0.000001, return_object=True) self.assertEqual(res.data.mask.sum(), np.prod(LON.shape)) with self.assertRaises(ValueError): res = x._rasterize(LON, LAT, radius=0.000001, return_object=False) # check valid results res = x._rasterize(LON, LAT, radius=0.5, return_object=True) self.assertEqual(res.data[0,0], 1.2) self.assertEqual(res.data[0,1], 2.3) self.assertEqual(res.data[1,2], 0.7) self.assertEqual(res.ny*res.nx - res.data.mask.sum(), 4)
def test_rasterize_data(self): """ testdataset +---+---+---+ |1.2|2.3| | +---+---+---+ | | |0.7| +---+---+---+ | |5.2| | +---+---+---+ """ x = Data(None, None) x._init_sample_object(ny=1, nx=272) x.lon = np.asarray([2.25, 2.45, 1.8, 3.6]) x.lat = np.asarray([11.9, 10.1, 10.2, 11.3]) x.data = np.asarray([5.2, 2.3, 1.2, 0.7]) # target grid lon = np.asarray([1.5, 2.5, 3.5]) lat = np.asarray([10., 11., 12.]) LON, LAT = np.meshgrid(lon, lat) # rasterize data # no valid data res = x._rasterize(LON, LAT, radius=0.000001, return_object=True) self.assertEqual(res.data.mask.sum(), np.prod(LON.shape)) with self.assertRaises(ValueError): res = x._rasterize(LON, LAT, radius=0.000001, return_object=False) # check valid results res = x._rasterize(LON, LAT, radius=0.5, return_object=True) self.assertEqual(res.data[0, 0], 1.2) self.assertEqual(res.data[0, 1], 2.3) self.assertEqual(res.data[1, 2], 0.7) self.assertEqual(res.ny * res.nx - res.data.mask.sum(), 4)
def test_lomb_basic(self): def _sample_data(t, w, A, B): e = np.random.random(len(t))*0. y = A * np.cos(w*self.t + B) return y, e def _test_ratio(x,y, thres=0.05): r = np.abs(1. - x / y) print r, x/y self.assertTrue(r <= thres) # accuracy of ration by 5% # test with single frequency p_ref = 10. w = 2.*np.pi / p_ref y, e = _sample_data(self.t, w, 5., 0.1) P = np.arange(2., 20., 2.) # target period [days] Ar, Br = lomb_scargle_periodogram(self.t, P, y+e, corr=False) _test_ratio(Ar[4], 5.) _test_ratio(Br[4], 0.1) Ar, Br, Rr, Pr = lomb_scargle_periodogram(self.t, P, y) _test_ratio(Ar[4], 5.) _test_ratio(Br[4], 0.1) #~ self.assertEqual(Rr[4], 1.) #~ self.assertEqual(Pr[4], 0.) # test for functions with overlapping frequencies p_ref1 = 365. p_ref2 = 365. w1 = 2.*np.pi / p_ref1 w2 = 2.*np.pi / p_ref2 y1, e1 = _sample_data(self.t, w1, 4., 0.1) y2, e2 = _sample_data(self.t, w2, 3.6, 0.1) P = np.arange(1., 366., 1.) # target period [days] Ar, Br = lomb_scargle_periodogram(self.t, P, y1+e1+y2+e2, corr=False) _test_ratio(Ar[-1], 7.6) _test_ratio(Br[-1], 0.1) # overlapping frequencies 2 p_ref1 = 100. p_ref2 = 200. w1 = 2.*np.pi / p_ref1 w2 = 2.*np.pi / p_ref2 y1, e1 = _sample_data(self.t, w1, 2., np.pi*0.3) # don't choose pi for phase, as this will result in an optimization with negative amplitude and zero phase (= sin) y2, e2 = _sample_data(self.t, w2, 3., np.pi*0.5) P = np.arange(1., 366., 1.) # target period [days] hlp = y1+e1+y2+e2 Ar, Br = lomb_scargle_periodogram(self.t, P, hlp, corr=False) # sample data object D = Data(None, None) D._init_sample_object(nt=len(y), ny=1, nx=1) D.data[:,0,0] = np.ma.array(hlp, mask=hlp!=hlp) D.time = self.t D_dummy = Data(None, None) D_dummy._init_sample_object(nt=len(y), ny=1, nx=1) with self.assertRaises(ValueError): D_dummy.time_str = 'hours since 2001-01-01' # only days currently supported! xx, yy = D_dummy.lomb_scargle_periodogram(P, return_object=False) AD, BD = D.lomb_scargle_periodogram(P, return_object=False, corr=False) AD1, BD1 = D.lomb_scargle_periodogram(P, return_object=True, corr=False) self.assertEqual(AD.shape, BD.shape) self.assertEqual(D.ny, AD.shape[1]) self.assertEqual(D.nx, AD.shape[2]) _test_ratio(Ar[99], 2.) _test_ratio(AD[99,0,0], 2.) _test_ratio(AD1.data[99, 0,0], 2.) _test_ratio(Ar[199], 3.) _test_ratio(AD[199,0,0], 3.) _test_ratio(AD1.data[199,0,0], 3.) _test_ratio(Br[99], np.pi*0.3) _test_ratio(BD[99,0,0], np.pi*0.3) _test_ratio(BD1.data[99,0,0], np.pi*0.3) _test_ratio(Br[199], np.pi*0.5) _test_ratio(BD[199,0,0], np.pi*0.5) _test_ratio(BD1.data[199,0,0], np.pi*0.5) # test for data with gaps # tests are not very robust yet as results depend on noise applied! p_ref1 = 100. p_ref2 = 200. w1 = 2.*np.pi / p_ref1 w2 = 2.*np.pi / p_ref2 y1, e1 = _sample_data(self.t, w1, 2., np.pi*0.3) # don't choose pi for phase, as this will result in an optimization with negative amplitude and zero phase (= sin) y2, e2 = _sample_data(self.t, w2, 3., np.pi*0.5) P = np.arange(1., 366., 1.) # target period [days] ran = np.random.random(len(self.t)) msk = ran > 0.1 tmsk = self.t[msk] yref = y1+e1+y2+e2 ymsk = yref[msk] Ar, Br = lomb_scargle_periodogram(tmsk, P, ymsk, corr=False)
class TestPycmbsPlots(unittest.TestCase): def setUp(self): self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=1, nx=1) self._tmpdir = tempfile.mkdtemp() def test_ReichlerPlotGeneral(self): RP = ReichlerPlot() for i in xrange(10): RP.add([i * 12.], 'test' + str(i)) RP.simple_plot() RP.bar(title='some title', vmin=-10., vmax=10.) #~ RP.circle_plot() def test_ScatterPlot_General(self): x = self.D S = ScatterPlot(x) S.plot(x) S.legend() def test_rotate_ticks(self): f = plt.figure() ax = f.add_subplot(111) ax.plot(np.random.random(1000)) rotate_ticks(ax, 20.) def test_correlation_analysis(self): x = self.D y = self.D C = CorrelationAnalysis(x, y) C.do_analysis() def test_ScatterPlot_GeneralWithNormalization(self): x = self.D S = ScatterPlot(x, normalize_data=True) S.plot(x) S.legend() def test_ScatterPlot_FldemeanFalse(self): x = self.D S = ScatterPlot(x) S.plot(x, fldmean=False) S.legend() def test_ScatterPlot_InvalidShape(self): x = self.D S = ScatterPlot(x) y = self.D.copy() y.data = np.random.random((10, 20, 30, 40)) with self.assertRaises(ValueError): S.plot(y, fldmean=False) def test_LinePlot_General(self): x = self.D L = LinePlot() L1 = LinePlot(regress=True) L.plot(x) L1.plot(x) def test_LinePlot_WithAxis(self): x = self.D f = plt.figure() ax = f.add_subplot(111) L = LinePlot(ax=ax) L.plot(x) def test_HistogrammPlot_General(self): H = HistogrammPlot(normalize=True) H.plot(self.D, bins=10, shown=True) def test_ZonalPlot(self): Z = ZonalPlot() Z.plot(self.D) def test_map_difference_General(self): map_difference(self.D, self.D) def test_GlecklerPlot_InvalidNumberOfObservations(self): G = GlecklerPlot() G.add_model('echam5') G.add_model('mpi-esm') G.add_variable('ta') G.add_data('ta', 'echam5', 0.5, pos=1) G.add_data('ta', 'echam5', 0.25, pos=2) G.add_data('ta', 'echam5', -0.25, pos=3) G.add_data('ta', 'mpi-esm', -0.25, pos=4) G.add_data('ta', 'mpi-esm', -0.25, pos=5) with self.assertRaises(ValueError): G.plot() def test_GlecklerPlot_4obs(self): G = GlecklerPlot() G.add_model('echam5') G.add_model('mpi-esm') G.add_variable('ta') G.add_variable('P') G.add_data('P', 'echam5', 0.5, pos=1) G.add_data('P', 'echam5', 0.25, pos=2) G.add_data('P', 'echam5', -0.25, pos=3) G.add_data('P', 'mpi-esm', -0.25, pos=4) G.plot() def test_GlecklerPlot(self): G = GlecklerPlot() G.add_model('echam5') G.add_model('mpi-esm') G.add_variable('ta') G.add_variable('P') G.add_data('ta', 'echam5', 0.5, pos=1) G.add_data('P', 'echam5', 0.25, pos=1) G.add_data('P', 'echam5', -0.25, pos=2) G.add_data('P', 'mpi-esm', -0.25, pos=1) G.plot() G.plot_model_error('ta') G.plot_model_ranking('ta') G.write_ranking_table('ta', self._tmpdir + os.sep + 'nix.tex', fmt='latex') self.assertTrue(os.path.exists(self._tmpdir + os.sep + 'nix.tex')) if os.path.exists(self._tmpdir + os.sep + 'nix.tex'): os.remove(self._tmpdir + os.sep + 'nix.tex') G.write_ranking_table('ta', self._tmpdir + os.sep + 'nix1', fmt='latex') self.assertTrue(os.path.exists(self._tmpdir + os.sep + 'nix1.tex')) if os.path.exists(self._tmpdir + os.sep + 'nix1.tex'): os.remove(self._tmpdir + os.sep + 'nix1.tex') def test_old_map_plot(self): xx_map_plot(self.D) #~ xx_map_plot(self.D, use_basemap=True) def test_HstackTimeSeries(self): HT = HstackTimeseries() for i in xrange(15): x = np.random.random(100) * 2. - 1. HT.add_data(x, 'model' + str(i).zfill(3)) HT.plot(cmap='RdBu_r', interpolation='nearest', vmin=-1., vmax=1., nclasses=15, title='Testtitle') def test_HstackTimeSeries_invalidData(self): HT = HstackTimeseries() x = np.random.random((50, 60)) * 2. - 1. with self.assertRaises(ValueError): HT.add_data(x, 'test') def test_HstackTimeSeries_inconsistent_data_geometries(self): HT = HstackTimeseries() x = np.random.random(100) * 2. - 1. y = np.random.random(1000) * 2. - 1. HT.add_data(x, 'A') with self.assertRaises(ValueError): HT.add_data(y, 'B') def test_HstackTimeSeries_duplicate_keys(self): HT = HstackTimeseries() x = np.random.random(100) * 2. - 1. HT.add_data(x, 'A') with self.assertRaises(ValueError): HT.add_data(x, 'A') def test_violin_plot(self): Violin_example() def test_globalmeanplot(self): G = GlobalMeanPlot() with self.assertRaises(ValueError): G.plot(self.D, stat_type='no_stat_type') G.plot(self.D, show_std=True) G.plot_mean_result() def test_HstackTimeSeries_monthly_ticks(self): HT = HstackTimeseries() f = plt.figure() ax = f.add_subplot(111) ax.plot(np.arange(12)) HT._set_monthly_xtick_labels(ax) def test_HstackTimeSeries_noplotdone(self): HT = HstackTimeseries() with self.assertRaises(ValueError): HT._add_colorbar() def test_GlobalMean(self): GM1 = GlobalMeanPlot(climatology=False) self.assertEqual(GM1.nplots, 1) f = plt.figure() ax1 = f.add_subplot(111) axA = f.add_subplot(211) axB = f.add_subplot(212) with self.assertRaises(ValueError): GM2 = GlobalMeanPlot(climatology=True, ax=ax1) GM2 = GlobalMeanPlot(climatology=True, ax=axA, ax1=axB) self.assertEqual(GM2.nplots, 2) def test_Hovmoeller(self): H = HovmoellerPlot(self.D) with self.assertRaises(ValueError): H.plot() H.plot(climits=[0., 1.])
class TestPycmbsBenchmarkingModels(unittest.TestCase): def setUp(self): self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=1, nx=1) # generate dummy Model object data_dir = './test/' varmethods = {'albedo':'get_albedo()', 'sis': 'get_sis()'} self.model = models.Model(data_dir, varmethods, name='testmodel', intervals='monthly') sis = self.D.copy() sis.mulc(5., copy=False) sis.label='sisdummy' alb = self.D.copy() alb.label='albedodummy' # add some dummy data variable self.model.variables = {'albedo':alb, 'sis':sis} def test_save_prefix_missing(self): m = self.model odir = tempfile.mkdtemp() + os.sep with self.assertRaises(ValueError): m.save(odir) def test_save_create_odir(self): m = self.model odir = tempfile.mkdtemp() + os.sep if os.path.exists(odir): os.system('rm -rf ' + odir) m.save(odir, prefix='test') self.assertTrue(os.path.exists(odir)) os.system('rm -rf ' + odir) def test_save(self): m = self.model odir = tempfile.mkdtemp() + os.sep sisfile = odir + 'testoutput_SIS.nc' albfile = odir + 'testoutput_ALBEDO.nc' if os.path.exists(sisfile): os.remove(sisfile) if os.path.exists(albfile): os.remove(albfile) m.save(odir, prefix='testoutput') self.assertTrue(os.path.exists(sisfile)) self.assertTrue(os.path.exists(albfile)) if os.path.exists(sisfile): os.remove(sisfile) if os.path.exists(albfile): os.remove(albfile) os.system('rm -rf ' + odir) def test_cmip5_init_singlemember(self): data_dir = tempfile.mkdtemp() # invalid model identifier with self.assertRaises(ValueError): M = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR1', 'amip', {}, intervals='monthly') with self.assertRaises(ValueError): M = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#1#2', 'amip', {}, intervals='monthly') M1 = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#1', 'amip', {}, intervals='monthly') M2 = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#728', 'amip', {}, intervals='monthly') self.assertEqual(M1.ens_member, 1) self.assertEqual(M2.ens_member, 728) def test_cmip5_singlemember_filename(self): data_dir = tempfile.mkdtemp() # generate testfile testfile = data_dir + os.sep + 'MPI-M' + os.sep + 'MPI-ESM-LR' + os.sep + 'amip' + os.sep + 'mon' + os.sep + 'atmos' + os.sep + 'Amon' + os.sep + 'r1i1p1' + os.sep + 'ta' + os.sep + 'ta_Amon_MPI-ESM-LR_amip_r1i1p1_197901-200812.nc' os.makedirs(os.path.dirname(testfile)) os.system('touch ' + testfile) self.assertTrue(os.path.exists(testfile)) M = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#1', 'amip', {}, intervals='monthly') f = M.get_single_ensemble_file('ta', mip='Amon', realm='atmos') self.assertTrue(os.path.exists(f)) self.assertEqual(f, testfile)
class TestPycmbsPlots(unittest.TestCase): def setUp(self): self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=1, nx=1) self._tmpdir = tempfile.mkdtemp() def test_ReichlerPlotGeneral(self): RP = ReichlerPlot() for i in xrange(10): RP.add([i*12.], 'test'+str(i)) RP.simple_plot() RP.bar(title='some title', vmin=-10., vmax=10.) #~ RP.circle_plot() def test_ScatterPlot_General(self): x = self.D S = ScatterPlot(x) S.plot(x) S.legend() def test_rotate_ticks(self): f = plt.figure() ax=f.add_subplot(111) ax.plot(np.random.random(1000)) rotate_ticks(ax, 20.) def test_correlation_analysis(self): x = self.D y = self.D C = CorrelationAnalysis(x, y) C.do_analysis() def test_ScatterPlot_GeneralWithNormalization(self): x = self.D S = ScatterPlot(x, normalize_data=True) S.plot(x) S.legend() def test_ScatterPlot_FldemeanFalse(self): x = self.D S = ScatterPlot(x) S.plot(x, fldmean=False) S.legend() def test_ScatterPlot_InvalidShape(self): x = self.D S = ScatterPlot(x) y = self.D.copy() y.data = np.random.random((10,20,30,40)) with self.assertRaises(ValueError): S.plot(y, fldmean=False) def test_LinePlot_General(self): x = self.D L = LinePlot() L1 = LinePlot(regress=True) L.plot(x) L1.plot(x) def test_LinePlot_WithAxis(self): x = self.D f = plt.figure() ax = f.add_subplot(111) L = LinePlot(ax=ax) L.plot(x) def test_HistogrammPlot_General(self): H = HistogrammPlot(normalize=True) H.plot(self.D, bins=10, shown=True) def test_ZonalPlot(self): Z = ZonalPlot() Z.plot(self.D) def test_map_difference_General(self): map_difference(self.D, self.D) def test_GlecklerPlot_InvalidNumberOfObservations(self): G = GlecklerPlot() G.add_model('echam5') G.add_model('mpi-esm') G.add_variable('ta') G.add_data('ta', 'echam5', 0.5,pos=1) G.add_data('ta', 'echam5',0.25,pos=2) G.add_data('ta', 'echam5',-0.25,pos=3) G.add_data('ta', 'mpi-esm',-0.25,pos=4) G.add_data('ta', 'mpi-esm',-0.25,pos=5) with self.assertRaises(ValueError): G.plot() def test_GlecklerPlot_4obs(self): G = GlecklerPlot() G.add_model('echam5') G.add_model('mpi-esm') G.add_variable('ta') G.add_variable('P') G.add_data('P', 'echam5', 0.5,pos=1) G.add_data('P', 'echam5',0.25,pos=2) G.add_data('P', 'echam5',-0.25,pos=3) G.add_data('P', 'mpi-esm',-0.25,pos=4) G.plot() def test_GlecklerPlot(self): G = GlecklerPlot() G.add_model('echam5') G.add_model('mpi-esm') G.add_variable('ta') G.add_variable('P') G.add_data('ta', 'echam5', 0.5,pos=1) G.add_data('P', 'echam5',0.25,pos=1) G.add_data('P', 'echam5',-0.25,pos=2) G.add_data('P', 'mpi-esm',-0.25,pos=1) G.plot() G.plot_model_error('ta') G.plot_model_ranking('ta') G.write_ranking_table('ta', self._tmpdir + os.sep + 'nix.tex', fmt='latex') self.assertTrue(os.path.exists(self._tmpdir + os.sep + 'nix.tex')) if os.path.exists(self._tmpdir + os.sep + 'nix.tex'): os.remove(self._tmpdir + os.sep + 'nix.tex') G.write_ranking_table('ta', self._tmpdir + os.sep + 'nix1', fmt='latex') self.assertTrue(os.path.exists(self._tmpdir + os.sep + 'nix1.tex')) if os.path.exists(self._tmpdir + os.sep + 'nix1.tex'): os.remove(self._tmpdir + os.sep + 'nix1.tex') def test_old_map_plot(self): xx_map_plot(self.D) #~ xx_map_plot(self.D, use_basemap=True) def test_HstackTimeSeries(self): HT = HstackTimeseries() for i in xrange(15): x = np.random.random(100)*2.-1. HT.add_data(x, 'model' + str(i).zfill(3) ) HT.plot(cmap='RdBu_r', interpolation='nearest', vmin=-1., vmax=1., nclasses=15, title='Testtitle') def test_HstackTimeSeries_invalidData(self): HT = HstackTimeseries() x = np.random.random((50, 60))*2.-1. with self.assertRaises(ValueError): HT.add_data(x, 'test' ) def test_HstackTimeSeries_inconsistent_data_geometries(self): HT = HstackTimeseries() x = np.random.random(100)*2.-1. y = np.random.random(1000)*2.-1. HT.add_data(x, 'A') with self.assertRaises(ValueError): HT.add_data(y, 'B') def test_HstackTimeSeries_duplicate_keys(self): HT = HstackTimeseries() x = np.random.random(100)*2.-1. HT.add_data(x, 'A') with self.assertRaises(ValueError): HT.add_data(x, 'A') def test_violin_plot(self): Violin_example() def test_globalmeanplot(self): G = GlobalMeanPlot() with self.assertRaises(ValueError): G.plot(self.D, stat_type='no_stat_type') G.plot(self.D, show_std=True) G.plot_mean_result() def test_HstackTimeSeries_monthly_ticks(self): HT = HstackTimeseries() f = plt.figure() ax = f.add_subplot(111) ax.plot(np.arange(12)) HT._set_monthly_xtick_labels(ax) def test_HstackTimeSeries_noplotdone(self): HT = HstackTimeseries() with self.assertRaises(ValueError): HT._add_colorbar() def test_GlobalMean(self): GM1 = GlobalMeanPlot(climatology=False) self.assertEqual(GM1.nplots, 1) f = plt.figure() ax1 = f.add_subplot(111) axA = f.add_subplot(211) axB = f.add_subplot(212) with self.assertRaises(ValueError): GM2 = GlobalMeanPlot(climatology=True, ax=ax1) GM2 = GlobalMeanPlot(climatology=True, ax=axA, ax1=axB) self.assertEqual(GM2.nplots, 2) def test_Hovmoeller(self): H = HovmoellerPlot(self.D) with self.assertRaises(ValueError): H.plot() H.plot(climits=[0., 1.])
class TestMapPlotGeneric(unittest.TestCase): def setUp(self): self.map_plot = mapping.MapPlotGeneric() self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=10, nx=20) self._tmpdir = tempfile.mkdtemp() def test_SingleMap_Init(self): try: import cartopy.crs as ccrs except: return True #no testing if cartopy not installed # just test if things pass SM1 = mapping.SingleMap(self.D) SM2 = mapping.SingleMap(self.D, stat_type='sum') proj_prop = {'projection': 'robin'} SM3 = mapping.SingleMap(self.D, backend='basemap', stat_type='median') SM4 = mapping.SingleMap(self.D, backend='cartopy') SM1.plot(show_zonal=True) SM2.plot(show_zonal=True, colorbar_orientation='horizontal') SM3.plot(show_zonal=True, colorbar_orientation='horizontal', proj_prop=proj_prop) SM4.plot(show_zonal=True, colorbar_orientation='horizontal', proj_prop=proj_prop) def test_SingleMap_WithoutColorbar(self): SM = mapping.SingleMap(self.D) SM.plot(show_colorbar=False) def test_invalid_colorbar_orientation(self): SM = mapping.SingleMap(self.D) with self.assertRaises(ValueError): SM.plot(colorbar_orientation='something') def test_SingleMap_WithPredefinedAxis(self): f = plt.figure() ax = f.add_subplot(2,1,1) SM1 = mapping.SingleMap(self.D, ax=ax) def test_SingleMap_WithPredefinedAxisButWhichIsNone(self): ax = None SM1 = mapping.SingleMap(self.D, ax=ax) def test_SingleMap_InitWithInvalidBackend(self): # just test if things pass with self.assertRaises(ValueError): SM = mapping.SingleMap(self.D, backend='some_invalid_backend') def test_SingleMap_InitWithMissingProjectionProperties(self): # just test if things pass with self.assertRaises(ValueError): SM = mapping.SingleMap(self.D, backend='cartopy') SM.plot() def test_SingleMap_Save(self): SM = mapping.SingleMap(self.D, savefile=self._tmpdir + os.sep + 'my_test_save_file.nc') SM.save(save_mean=True, save_all=True) self.assertTrue(os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc')) self.assertTrue(os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc_all.nc')) if os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc'): os.remove(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc') if os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc_all.nc'): os.remove(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc_all.nc') @unittest.skip('skip as only for local testing') def test_SingleMap_add_cyclic(self): file='/home/m300028/shared/data/SEP/variables/land/Ta_2m/cru_ts_3_00.1901.2006.tmp_miss_t63.nc' ofile = 'world.png' if os.path.exists(ofile): os.remove(ofile) d=Data(file,'tmp',read=True) map_plot(d, use_basemap=True, savegraphicfile=ofile) if os.path.exists(ofile): os.remove(ofile)
class TestData(unittest.TestCase): def setUp(self): self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=1, nx=1) def test_is_closed(self): poly = [(150., 20.), (-160., 30.), (-170., 10.), (170., 10.)] P = Polygon(3, poly) self.assertFalse(P.is_closed()) poly1 = [(150., 20.), (-160., 30.), (-170., 10.), (170., 10.), (150., 20.)] P1 = Polygon(3, poly1) self.assertTrue(P1.is_closed()) @unittest.skip( 'OGR import causes trouble in import locally, therefore currently skipping this test' ) def test_convertOGR(self): poly = [(150., 20.), (-160., 30.), (-170., 10.), (170., 10.)] P = Polygon(3, poly) A = P.convertToOGRPolygon() B = P.convertToOGRPolygon(ensure_positive=True) def test_shift(self): poly3 = [(150., 20.), (-160., 30.), (-170., 10.), (170., 10.)] P3 = Polygon(3, poly3) P3._shift_coordinates() # shift longitudes by 200 degree self.assertEqual(P3.poly[0][0], 150.) self.assertEqual(P3.poly[1][0], 200.) self.assertEqual(P3.poly[2][0], 190.) self.assertEqual(P3.poly[3][0], 170.) def test_point_in_polygon(self): x = 1 y = 1 poly = [(0, 0), (2, 0), (2, 2), (0, 2)] P = Polygon(1, poly) self.assertTrue(P.point_in_poly(x, y)) x = 4 y = 4 self.assertFalse(P.point_in_poly(x, y)) @unittest.skip( 'OGR import causes trouble in import locally, therefore currently skipping this test' ) def test_point_in_polygon_latlon(self): # test for point in polygon across dateline x1 = -175. y1 = 50. poly1 = [(150., 60.), (-160., 60.), (-170., 45.), (170., 45.)] P1 = Polygon(1, poly1) self.assertTrue(P1.point_in_poly_latlon(x1, y1)) def test_polygon_min_max(self): x = 1 y = 1 poly = [(-5, 0), (2, 0), (2, 2), (0, 6)] P = Polygon(1, poly) bbox = P.bbox() self.assertEqual(bbox[0], -5.) self.assertEqual(bbox[1], 2.) self.assertEqual(bbox[2], 0.) self.assertEqual(bbox[3], 6.) def test_raster_wrong_geometry(self): lon = np.random.random((10, 20)) lat = np.random.random((11, 20)) with self.assertRaises(ValueError): R = Raster(lon, lat) def test_raster_wrong_latlon(self): lon = np.random.random(10) lat = np.random.random(10) print lon.ndim with self.assertRaises(ValueError): R = Raster(lon, lat) def test_raster_no_Polygon(self): lon = np.random.random((10, 20)) lat = np.random.random((10, 20)) R = Raster(lon, lat) with self.assertRaises(ValueError): P = np.arange(10) R._rasterize_single_polygon(P) def test_raster_single_polygon(self): lon = np.linspace(-180., 180., 361) lat = np.linspace(-90., 90., 181) LON, LAT = np.meshgrid(lon, lat) # test a single polygon poly = [(-10., -10.), (-10., 20), (15., 0.), (0., -25.)] P = Polygon(5, poly) R = Raster(LON, LAT) R.mask = np.zeros(LON.shape) * np.nan R._rasterize_single_polygon(P) print np.unique(R.mask) R.mask = np.ma.array(R.mask, mask=np.isnan(R.mask)) u = np.unique(R.mask[~R.mask.mask]) print R.mask print u self.assertTrue(len(u) == 1) self.assertTrue(5. in u) #~ xxxxxxxxdef test_raster_single_polygon_fast(self): #~ lon = np.linspace(-180., 180., 361) #~ lat = np.linspace(-90., 90., 181) #~ LON,LAT=np.meshgrid(lon, lat) #~ #~ # test a single polygon #~ poly = [(-10.,-10.), (-10.,20), (15.,0.), (0.,-25.)] #~ P = Polygon(5, poly) #~ R=Raster(LON,LAT) #~ R.mask = np.zeros(LON.shape)*np.nan #~ R._rasterize_single_polygon(P, method='fast') #~ R.mask = np.ma.array(R.mask, mask=np.isnan(R.mask)) #~ #~ u = np.unique(R.mask[~R.mask.mask]) #~ self.assertTrue(len(u) == 1) #~ self.assertTrue(5. in u) def test_raster_multiple_polygon(self): # this is quite slow! lon = np.linspace(-180., 180., 361) lat = np.linspace(-90., 90., 181) LON, LAT = np.meshgrid(lon, lat) #~ #~ # test a single polygon poly = [] poly1 = [(-10., -10.), (-10., 20), (15., 0.), (0., -15.)] poly.append(Polygon(1, poly1)) #~ #~ poly2 = [(-50., -80.), (-50., -70.), (-40., -70.), (-40., -75.)] poly.append(Polygon(2, poly2)) #~ #~ R = Raster(LON, LAT) R.rasterize_polygons(poly) #~ #~ u = np.unique(R.mask[~R.mask.mask]) self.assertTrue(len(u) == 2) self.assertTrue(1 in u) self.assertTrue(2 in u)
class TestPycmbsBenchmarkingModels(unittest.TestCase): def setUp(self): self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=1, nx=1) # generate dummy Model object data_dir = '.' + os.sep + 'test' + os.sep varmethods = {'albedo': 'get_albedo()', 'sis': 'get_sis()'} self.model = models.Model(data_dir, varmethods, name='testmodel', intervals='monthly') sis = self.D.copy() sis.mulc(5., copy=False) sis.label = 'sisdummy' alb = self.D.copy() alb.label = 'albedodummy' # add some dummy data variable self.model.variables = {'albedo': alb, 'sis': sis} def test_save_prefix_missing(self): m = self.model odir = tempfile.mkdtemp() + os.sep with self.assertRaises(ValueError): m.save(odir) def test_save_create_odir(self): m = self.model odir = tempfile.mkdtemp() + os.sep if os.path.exists(odir): os.system('rm -rf ' + odir) m.save(odir, prefix='test') self.assertTrue(os.path.exists(odir)) os.system('rm -rf ' + odir) def test_save(self): m = self.model odir = tempfile.mkdtemp() + os.sep sisfile = odir + 'testoutput_SIS.nc' albfile = odir + 'testoutput_ALBEDO.nc' if os.path.exists(sisfile): os.remove(sisfile) if os.path.exists(albfile): os.remove(albfile) m.save(odir, prefix='testoutput') self.assertTrue(os.path.exists(sisfile)) self.assertTrue(os.path.exists(albfile)) if os.path.exists(sisfile): os.remove(sisfile) if os.path.exists(albfile): os.remove(albfile) os.system('rm -rf ' + odir) def test_cmip5_init_singlemember(self): data_dir = tempfile.mkdtemp() # invalid model identifier with self.assertRaises(ValueError): M = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR1', 'amip', {}, intervals='monthly') with self.assertRaises(ValueError): M = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#1#2', 'amip', {}, intervals='monthly') M1 = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#1', 'amip', {}, intervals='monthly') M2 = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#728', 'amip', {}, intervals='monthly') self.assertEqual(M1.ens_member, 1) self.assertEqual(M2.ens_member, 728) def test_cmip5_singlemember_filename(self): data_dir = tempfile.mkdtemp() # generate testfile testfile = data_dir + os.sep + 'MPI-M' + os.sep + 'MPI-ESM-LR' + os.sep + 'amip' + os.sep + 'mon' + os.sep + 'atmos' + os.sep + 'Amon' + os.sep + 'r1i1p1' + os.sep + 'ta' + os.sep + 'ta_Amon_MPI-ESM-LR_amip_r1i1p1_197901-200812.nc' os.makedirs(os.path.dirname(testfile)) os.system('touch ' + testfile) self.assertTrue(os.path.exists(testfile)) M = models.CMIP5RAW_SINGLE(data_dir, 'MPI-M:MPI-ESM-LR#1', 'amip', {}, intervals='monthly') kwargs = { 'CMIP5RAWSINGLE': { 'mip': 'Amon', 'realm': 'atmos', 'temporal_resolution': 'mon' } } f = M.get_raw_filename('ta', **kwargs) self.assertTrue(os.path.exists(f)) self.assertEqual(f, testfile)
class TestData(unittest.TestCase): def setUp(self): self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=1, nx=1) def test_is_closed(self): poly = [(150.,20.), (-160.,30.), (-170.,10.), (170.,10.)] P = Polygon(3, poly) self.assertFalse(P.is_closed()) poly1 = [(150.,20.), (-160.,30.), (-170.,10.), (170.,10.), (150.,20.)] P1 = Polygon(3, poly1) self.assertTrue(P1.is_closed()) @unittest.skip('OGR import causes trouble in import locally, therefore currently skipping this test') def test_convertOGR(self): poly = [(150.,20.), (-160.,30.), (-170.,10.), (170.,10.)] P = Polygon(3, poly) A = P.convertToOGRPolygon() B = P.convertToOGRPolygon(ensure_positive=True) def test_shift(self): poly3 = [(150.,20.), (-160.,30.), (-170.,10.), (170.,10.)] P3 = Polygon(3, poly3) P3._shift_coordinates() # shift longitudes by 200 degree self.assertEqual(P3.poly[0][0], 150.) self.assertEqual(P3.poly[1][0], 200.) self.assertEqual(P3.poly[2][0], 190.) self.assertEqual(P3.poly[3][0], 170.) def test_point_in_polygon(self): x = 1 y = 1 poly = [(0,0), (2,0), (2,2), (0,2)] P = Polygon(1, poly) self.assertTrue(P.point_in_poly(x,y)) x = 4 y = 4 self.assertFalse(P.point_in_poly(x,y)) @unittest.skip('OGR import causes trouble in import locally, therefore currently skipping this test') def test_point_in_polygon_latlon(self): # test for point in polygon across dateline x1 = -175. y1 = 50. poly1= [(150.,60.), (-160.,60.), (-170.,45.), (170.,45.)] P1 = Polygon(1, poly1) self.assertTrue(P1.point_in_poly_latlon(x1,y1)) def test_polygon_min_max(self): x = 1 y = 1 poly = [(-5,0), (2,0), (2,2), (0,6)] P = Polygon(1, poly) bbox = P.bbox() self.assertEqual(bbox[0], -5.) self.assertEqual(bbox[1], 2.) self.assertEqual(bbox[2], 0.) self.assertEqual(bbox[3], 6.) def test_raster_wrong_geometry(self): lon = np.random.random((10,20)) lat = np.random.random((11,20)) with self.assertRaises(ValueError): R = Raster(lon, lat) def test_raster_wrong_latlon(self): lon = np.random.random(10) lat = np.random.random(10) print lon.ndim with self.assertRaises(ValueError): R = Raster(lon, lat) def test_raster_no_Polygon(self): lon = np.random.random((10,20)) lat = np.random.random((10,20)) R = Raster(lon, lat) with self.assertRaises(ValueError): P = np.arange(10) R._rasterize_single_polygon(P) def test_raster_single_polygon(self): lon = np.linspace(-180., 180., 361) lat = np.linspace(-90., 90., 181) LON,LAT=np.meshgrid(lon, lat) # test a single polygon poly = [(-10.,-10.), (-10.,20), (15.,0.), (0.,-25.)] P = Polygon(5, poly) R=Raster(LON,LAT) R.mask = np.zeros(LON.shape)*np.nan R._rasterize_single_polygon(P) print np.unique(R.mask) R.mask = np.ma.array(R.mask, mask=np.isnan(R.mask)) u = np.unique(R.mask[~R.mask.mask]) print R.mask print u self.assertTrue(len(u)==1) self.assertTrue(5. in u) #~ xxxxxxxxdef test_raster_single_polygon_fast(self): #~ lon = np.linspace(-180., 180., 361) #~ lat = np.linspace(-90., 90., 181) #~ LON,LAT=np.meshgrid(lon, lat) #~ #~ # test a single polygon #~ poly = [(-10.,-10.), (-10.,20), (15.,0.), (0.,-25.)] #~ P = Polygon(5, poly) #~ R=Raster(LON,LAT) #~ R.mask = np.zeros(LON.shape)*np.nan #~ R._rasterize_single_polygon(P, method='fast') #~ R.mask = np.ma.array(R.mask, mask=np.isnan(R.mask)) #~ #~ u = np.unique(R.mask[~R.mask.mask]) #~ self.assertTrue(len(u) == 1) #~ self.assertTrue(5. in u) def test_raster_multiple_polygon(self): # this is quite slow! lon = np.linspace(-180., 180., 361) lat = np.linspace(-90., 90., 181) LON,LAT=np.meshgrid(lon, lat) #~ #~ # test a single polygon poly=[] poly1 = [(-10.,-10.), (-10.,20), (15.,0.), (0.,-15.)] poly.append(Polygon(1, poly1)) #~ #~ poly2 = [(-50.,-80.), (-50.,-70.), (-40.,-70.), (-40.,-75.)] poly.append(Polygon(2, poly2)) #~ #~ R=Raster(LON,LAT) R.rasterize_polygons(poly) #~ #~ u = np.unique(R.mask[~R.mask.mask]) self.assertTrue(len(u)==2) self.assertTrue(1 in u) self.assertTrue(2 in u)
class TestMapPlotGeneric(unittest.TestCase): def setUp(self): self.map_plot = mapping.MapPlotGeneric() self.D = Data(None, None) self.D._init_sample_object(nt=1000, ny=10, nx=20) self._tmpdir = tempfile.mkdtemp() def test_SingleMap_Init(self): try: import cartopy.crs as ccrs except: return True #no testing if cartopy not installed # just test if things pass SM1 = mapping.SingleMap(self.D) SM2 = mapping.SingleMap(self.D, stat_type='sum') proj_prop = {'projection': 'robin'} SM3 = mapping.SingleMap(self.D, backend='basemap', stat_type='median') SM4 = mapping.SingleMap(self.D, backend='cartopy') SM1.plot(show_zonal=True) SM2.plot(show_zonal=True, colorbar_orientation='horizontal') SM3.plot(show_zonal=True, colorbar_orientation='horizontal', proj_prop=proj_prop) SM4.plot(show_zonal=True, colorbar_orientation='horizontal', proj_prop=proj_prop) def test_SingleMap_WithoutColorbar(self): SM = mapping.SingleMap(self.D) SM.plot(show_colorbar=False) def test_invalid_colorbar_orientation(self): SM = mapping.SingleMap(self.D) with self.assertRaises(ValueError): SM.plot(colorbar_orientation='something') def test_SingleMap_WithPredefinedAxis(self): f = plt.figure() ax = f.add_subplot(2, 1, 1) SM1 = mapping.SingleMap(self.D, ax=ax) def test_SingleMap_WithPredefinedAxisButWhichIsNone(self): ax = None SM1 = mapping.SingleMap(self.D, ax=ax) def test_SingleMap_InitWithInvalidBackend(self): # just test if things pass with self.assertRaises(ValueError): SM = mapping.SingleMap(self.D, backend='some_invalid_backend') def test_SingleMap_InitWithMissingProjectionProperties(self): # just test if things pass with self.assertRaises(ValueError): SM = mapping.SingleMap(self.D, backend='cartopy') SM.plot() def test_SingleMap_Save(self): SM = mapping.SingleMap(self.D, savefile=self._tmpdir + os.sep + 'my_test_save_file.nc') SM.save(save_mean=True, save_all=True) self.assertTrue( os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc')) self.assertTrue( os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc_all.nc')) if os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc'): os.remove(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc') if os.path.exists(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc_all.nc'): os.remove(self._tmpdir + os.sep + 'my_test_save_file.nc_timmean.nc_all.nc') @unittest.skip('skip as only for local testing') def test_SingleMap_add_cyclic(self): file = '/home/m300028/shared/data/SEP/variables/land/Ta_2m/cru_ts_3_00.1901.2006.tmp_miss_t63.nc' ofile = 'world.png' if os.path.exists(ofile): os.remove(ofile) d = Data(file, 'tmp', read=True) map_plot(d, use_basemap=True, savegraphicfile=ofile) if os.path.exists(ofile): os.remove(ofile)
def test_lomb_basic(self): def _sample_data(t, w, A, B): e = np.random.random(len(t)) * 0. y = A * np.cos(w * self.t + B) return y, e def _test_ratio(x, y, thres=0.05): r = np.abs(1. - x / y) print r, x / y self.assertTrue(r <= thres) # accuracy of ration by 5% # test with single frequency p_ref = 10. w = 2. * np.pi / p_ref y, e = _sample_data(self.t, w, 5., 0.1) P = np.arange(2., 20., 2.) # target period [days] Ar, Br = lomb_scargle_periodogram(self.t, P, y + e, corr=False) _test_ratio(Ar[4], 5.) _test_ratio(Br[4], 0.1) Ar, Br, Rr, Pr = lomb_scargle_periodogram(self.t, P, y) _test_ratio(Ar[4], 5.) _test_ratio(Br[4], 0.1) #~ self.assertEqual(Rr[4], 1.) #~ self.assertEqual(Pr[4], 0.) # test for functions with overlapping frequencies p_ref1 = 365. p_ref2 = 365. w1 = 2. * np.pi / p_ref1 w2 = 2. * np.pi / p_ref2 y1, e1 = _sample_data(self.t, w1, 4., 0.1) y2, e2 = _sample_data(self.t, w2, 3.6, 0.1) P = np.arange(1., 366., 1.) # target period [days] Ar, Br = lomb_scargle_periodogram(self.t, P, y1 + e1 + y2 + e2, corr=False) _test_ratio(Ar[-1], 7.6) _test_ratio(Br[-1], 0.1) # overlapping frequencies 2 p_ref1 = 100. p_ref2 = 200. w1 = 2. * np.pi / p_ref1 w2 = 2. * np.pi / p_ref2 y1, e1 = _sample_data( self.t, w1, 2., np.pi * 0.3 ) # don't choose pi for phase, as this will result in an optimization with negative amplitude and zero phase (= sin) y2, e2 = _sample_data(self.t, w2, 3., np.pi * 0.5) P = np.arange(1., 366., 1.) # target period [days] hlp = y1 + e1 + y2 + e2 Ar, Br = lomb_scargle_periodogram(self.t, P, hlp, corr=False) # sample data object D = Data(None, None) D._init_sample_object(nt=len(y), ny=1, nx=1) D.data[:, 0, 0] = np.ma.array(hlp, mask=hlp != hlp) D.time = self.t D_dummy = Data(None, None) D_dummy._init_sample_object(nt=len(y), ny=1, nx=1) with self.assertRaises(ValueError): D_dummy.time_str = 'hours since 2001-01-01' # only days currently supported! xx, yy = D_dummy.lomb_scargle_periodogram(P, return_object=False) AD, BD = D.lomb_scargle_periodogram(P, return_object=False, corr=False) AD1, BD1 = D.lomb_scargle_periodogram(P, return_object=True, corr=False) self.assertEqual(AD.shape, BD.shape) self.assertEqual(D.ny, AD.shape[1]) self.assertEqual(D.nx, AD.shape[2]) _test_ratio(Ar[99], 2.) _test_ratio(AD[99, 0, 0], 2.) _test_ratio(AD1.data[99, 0, 0], 2.) _test_ratio(Ar[199], 3.) _test_ratio(AD[199, 0, 0], 3.) _test_ratio(AD1.data[199, 0, 0], 3.) _test_ratio(Br[99], np.pi * 0.3) _test_ratio(BD[99, 0, 0], np.pi * 0.3) _test_ratio(BD1.data[99, 0, 0], np.pi * 0.3) _test_ratio(Br[199], np.pi * 0.5) _test_ratio(BD[199, 0, 0], np.pi * 0.5) _test_ratio(BD1.data[199, 0, 0], np.pi * 0.5) # test for data with gaps # tests are not very robust yet as results depend on noise applied! p_ref1 = 100. p_ref2 = 200. w1 = 2. * np.pi / p_ref1 w2 = 2. * np.pi / p_ref2 y1, e1 = _sample_data( self.t, w1, 2., np.pi * 0.3 ) # don't choose pi for phase, as this will result in an optimization with negative amplitude and zero phase (= sin) y2, e2 = _sample_data(self.t, w2, 3., np.pi * 0.5) P = np.arange(1., 366., 1.) # target period [days] ran = np.random.random(len(self.t)) msk = ran > 0.1 tmsk = self.t[msk] yref = y1 + e1 + y2 + e2 ymsk = yref[msk] Ar, Br = lomb_scargle_periodogram(tmsk, P, ymsk, corr=False)
class TestData(unittest.TestCase): def setUp(self): self.nx = 20 self.ny = 10 self.tempfile = tempfile.mktemp(suffix='.nc') self.gfile1 = tempfile.mktemp(suffix='.nc') self.gfile2 = tempfile.mktemp(suffix='.nc') self.gfile3 = tempfile.mktemp(suffix='.nc') self.x = Data(None, None) self.x._init_sample_object(nt=10, ny=self.ny, nx=self.nx) self.x.save(self.tempfile, varname='myvar') # generate some arbitrary geometry file F = NetCDFHandler() F.open_file(self.gfile1, 'w') F.create_dimension('ny', size=self.ny) F.create_dimension('nx', size=self.nx) F.create_variable('lat', 'd', ('ny', 'nx')) F.create_variable('lon', 'd', ('ny', 'nx')) F.assign_value('lat', np.ones((self.ny, self.nx)) * 5.) F.assign_value('lon', np.ones((self.ny, self.nx)) * 3.) F.close() F = NetCDFHandler() F.open_file(self.gfile2, 'w') F.create_dimension('ny', size=self.ny) F.create_dimension('nx', size=self.nx) F.create_variable('latitude', 'd', ('ny', 'nx')) F.create_variable('longitude', 'd', ('ny', 'nx')) F.assign_value('latitude', np.ones((self.ny, self.nx)) * 7.) F.assign_value('longitude', np.ones((self.ny, self.nx)) * 8.) F.close() F = NetCDFHandler() F.open_file(self.gfile3, 'w') F.create_dimension('ny', size=self.ny * 2) F.create_dimension('nx', size=self.nx * 3) F.create_variable('latitude', 'd', ('ny', 'nx')) F.create_variable('longitude', 'd', ('ny', 'nx')) F.assign_value('latitude', np.ones((self.ny * 2, self.nx * 3)) * 7.) F.assign_value('longitude', np.ones((self.ny * 2, self.nx * 3)) * 8.) F.close() def test_read_coordinates(self): # read data normal x1 = Data(self.tempfile, 'myvar', read=True) self.assertEqual(x1.nx, self.nx) self.assertEqual(x1.ny, self.ny) # read data with separate geometry file 'lat', 'lon' names x2 = Data(self.tempfile, 'myvar', read=True, geometry_file=self.gfile1) self.assertTrue(np.all(x2.lat == 5.)) self.assertTrue(np.all(x2.lon == 3.)) # read data with separate geometry file 'latitude', 'longitude' names x3 = Data(self.tempfile, 'myvar', read=True, geometry_file=self.gfile2) self.assertTrue(np.all(x3.lat == 7.)) self.assertTrue(np.all(x3.lon == 8.)) # read data with separate geometry file 'lat', 'lon' names, invalid geometry with self.assertRaises(ValueError): x4 = Data(self.tempfile, 'myvar', read=True, geometry_file=self.gfile3)
class TestData(unittest.TestCase): def setUp(self): self.nx = 20 self.ny = 10 self.tempfile = tempfile.mktemp(suffix='.nc') self.gfile1 = tempfile.mktemp(suffix='.nc') self.gfile2 = tempfile.mktemp(suffix='.nc') self.gfile3 = tempfile.mktemp(suffix='.nc') self.x = Data(None, None) self.x._init_sample_object(nt=10, ny=self.ny, nx=self.nx) self.x.save(self.tempfile, varname='myvar') # generate some arbitrary geometry file F = NetCDFHandler() F.open_file(self.gfile1, 'w') F.create_dimension('ny', size=self.ny) F.create_dimension('nx', size=self.nx) F.create_variable('lat', 'd', ('ny', 'nx')) F.create_variable('lon', 'd', ('ny', 'nx')) F.assign_value('lat', np.ones((self.ny,self.nx)) * 5.) F.assign_value('lon', np.ones((self.ny,self.nx)) * 3.) F.close() F = NetCDFHandler() F.open_file(self.gfile2, 'w') F.create_dimension('ny', size=self.ny) F.create_dimension('nx', size=self.nx) F.create_variable('latitude', 'd', ('ny', 'nx')) F.create_variable('longitude', 'd', ('ny', 'nx')) F.assign_value('latitude', np.ones((self.ny,self.nx)) * 7.) F.assign_value('longitude', np.ones((self.ny,self.nx)) * 8.) F.close() F = NetCDFHandler() F.open_file(self.gfile3, 'w') F.create_dimension('ny', size=self.ny*2) F.create_dimension('nx', size=self.nx*3) F.create_variable('latitude', 'd', ('ny', 'nx')) F.create_variable('longitude', 'd', ('ny', 'nx')) F.assign_value('latitude', np.ones((self.ny*2,self.nx*3)) * 7.) F.assign_value('longitude', np.ones((self.ny*2,self.nx*3)) * 8.) F.close() def test_read_coordinates(self): # read data normal x1 = Data(self.tempfile, 'myvar', read=True) self.assertEqual(x1.nx,self.nx) self.assertEqual(x1.ny,self.ny) # read data with separate geometry file 'lat', 'lon' names x2 = Data(self.tempfile, 'myvar', read=True, geometry_file=self.gfile1) self.assertTrue(np.all(x2.lat == 5.)) self.assertTrue(np.all(x2.lon == 3.)) # read data with separate geometry file 'latitude', 'longitude' names x3 = Data(self.tempfile, 'myvar', read=True, geometry_file=self.gfile2) self.assertTrue(np.all(x3.lat == 7.)) self.assertTrue(np.all(x3.lon == 8.)) # read data with separate geometry file 'lat', 'lon' names, invalid geometry with self.assertRaises(ValueError): x4 = Data(self.tempfile, 'myvar', read=True, geometry_file=self.gfile3)