def get_dataset_reader(self): cfg = io.StringIO() self.handler.server.cfg.write(cfg) qmacro = self.request.values.get("qmacro", "").strip() if not qmacro: return arki.make_merged_dataset(cfg.getvalue()) return arki.make_qmacro_dataset( "url = " + self.handler.server.url, cfg.getvalue(), qmacro, self.request.values.get("query", "").strip())
def get_dataset_reader(self): cfg = io.StringIO() self.handler.server.cfg.write(cfg) # if (macroname.empty()) # // Create a merge dataset with all we have # ds.reset(new dataset::AutoMerged(req.arki_conf)); return arki.make_qmacro_dataset( "url = " + self.handler.server.url, cfg.getvalue(), self.request.values.get("qmacro", "").strip(), self.request.values.get("query", "").strip() )
def test_noopcopy(self): # Querymacro script that simply passes through the queries, making temporary copies of data with self.datasets() as env: reader = arki.make_qmacro_dataset(arki.cfg.Section(), env.config, "noopcopy", "testds") mdc = reader.query_data() self.assertEqual(len(mdc), 9) self.assertTrue(mdc[0].has_source()) self.assertTrue(mdc[1].has_source()) self.assertTrue(mdc[2].has_source()) s = reader.query_summary() self.assertEqual(s.count, 9)
def test_expa(self): with self.datasets() as env: reader = arki.make_qmacro_dataset( arki.cfg.Section(), env.config, "expa", "ds:testds. d:2009-08-07. t:0000. s:AN. l:G00. v:GRIB1/200/140/229.\n" "ds:testds. d:2009-08-07. t:0000. s:GRIB1/1. l:MSL. v:GRIB1/80/2/2.\n" ) mdc = reader.query_data() self.assertEqual(len(mdc), 2) self.assertTrue(mdc[0].has_source()) self.assertTrue(mdc[1].has_source()) s = reader.query_summary() self.assertEqual(s.count, 2)
def test_expa_inline(self): # Try "expa" matchers with inline option with self.datasets() as env: reader = arki.make_qmacro_dataset( arki.cfg.Section(), env.config, "expa", "ds:testds. d:2009-08-07. t:0000. s:AN. l:G00. v:GRIB1/200/140/229.\n" "ds:testds. d:2009-08-07. t:0000. s:GRIB1/1. l:MSL. v:GRIB1/80/2/2.\n" ) mdc = reader.query_data(with_data=True) self.assertEqual(len(mdc), 2) # Ensure that data is reachable self.assertEqual(len(mdc[0].data), mdc[0].data_size) self.assertEqual(len(mdc[1].data), mdc[1].data_size) s = reader.query_summary() self.assertEqual(s.count, 2)
def test_expa_sort(self): with self.datasets() as env: # Try "expa" matchers with sort option reader = arki.make_qmacro_dataset( arki.cfg.Section(), env.config, "expa", "ds:testds. d:2009-08-07. t:0000. s:AN. l:G00. v:GRIB1/200/140/229.\n" "ds:testds. d:2009-08-08. t:0000. s:GRIB1/1. l:MSL. v:GRIB1/80/2/2.\n" ) mdc = reader.query_data(sort="month:-reftime") self.assertEqual(len(mdc), 2) # TODO: ensure sorting self.assertTrue(mdc[0].has_source()) self.assertEqual(mdc[0]["reftime"], "2009-08-08T00:00:00Z") self.assertTrue(mdc[1].has_source()) self.assertEqual(mdc[1]["reftime"], "2009-08-07T00:00:00Z") s = reader.query_summary() self.assertEqual(s.count, 2)
def test_query_data_qmacro(self): ds = arki.make_qmacro_dataset( {}, """ [test200] format = grib name = test.grib1 path = inbound/test.grib1 type = file """, "expa 2007-07-08", "ds:test200. d:@. t:1300. s:GRIB1/0/0h/0h. l:GRIB1/1. v:GRIB1/200/140/229.\n", ) count = 0 def count_results(md): nonlocal count count += 1 # No arguments ds.query_data(on_metadata=count_results) self.assertEquals(count, 1)