def test_initialize_several_variables(self): """Ensure aggregation file is created correctly according to the variable config.""" config = Config.from_dict( { "dimensions": [{"name": "x", "size": None}, {"name": "y", "size": 10}], "variables": [ { "name": "foo", "dimensions": ["x", "y"], "datatype": "float32", "attributes": {"units": "seconds"}, }, { "name": "foo_x", "dimensions": ["x"], "datatype": "float64", "attributes": {"units": "floops", "created_by": "the flooper"}, }, ], "global attributes": [], } ) initialize_aggregation_file(config, self.filename) with nc.Dataset(self.filename) as nc_check: self.assertEqual(len(nc_check.variables), 2) self.assertEqual(nc_check.variables["foo"].dimensions, ("x", "y")) self.assertEqual(nc_check.variables["foo"].datatype, np.dtype(np.float32)) self.assertEqual(nc_check.variables["foo"].units, "seconds") self.assertEqual(nc_check.variables["foo_x"].dimensions, ("x",)) self.assertEqual(nc_check.variables["foo_x"].datatype, np.dtype(np.float64)) self.assertEqual(nc_check.variables["foo_x"].units, "floops") self.assertEqual( nc_check.variables["foo_x"].getncattr("created_by"), "the flooper" )
def setUp(self): # tmp file to aggregate to _, self.nc_out_filename = tempfile.mkstemp() pwd = os.path.dirname(__file__) self.files = sorted(glob.glob(os.path.join(pwd, "data", "*.nc"))) with open(os.path.join(pwd, "new_dim_config.json")) as config_in: self.config = Config.from_dict(json.load(config_in))
def test_initialize_basic(self): """Ensure aggregation file is created with proper dimensions according to the config.""" config = Config.from_dict( { "dimensions": [{"name": "x", "size": None}, {"name": "y", "size": 10}], "variables": [ {"name": "x", "dimensions": ["x", "y"], "datatype": "int8"} ], "global attributes": [], } ) initialize_aggregation_file(config, self.filename) with nc.Dataset(self.filename) as nc_check: self.assertEqual(len(nc_check.dimensions), 2) self.assertEqual(nc_check.dimensions["y"].size, 10) self.assertFalse(nc_check.dimensions["y"].isunlimited()) self.assertTrue(nc_check.dimensions["x"].isunlimited())
def test_initialize_with_list_attribute(self): """Ensure aggregation file is created with proper dimensions according to the config.""" config = Config.from_dict( { "dimensions": [{"name": "x", "size": None}, {"name": "y", "size": 10}], "variables": [ { "name": "x", "dimensions": ["x", "y"], "datatype": "int8", "attributes": {"valid_range": [0, 10]}, } ], "global attributes": [], } ) initialize_aggregation_file(config, self.filename) with nc.Dataset(self.filename) as nc_check: self.assertEqual(len(nc_check.dimensions), 2) self.assertEqual(nc_check.variables["x"].valid_range[0], 0) self.assertEqual(nc_check.variables["x"].valid_range[1], 10)
def setUpClass(cls): super(TestEvaluateAggregationList, cls).setUpClass() pwd = os.path.dirname(__file__) cls.start_time = datetime(2017, 6, 8, 16, 45) cls.end_time = datetime(2017, 6, 8, 16, 50) cls.files = glob.glob(os.path.join(pwd, "data", "*.nc")) with open(os.path.join( pwd, "seis-l1b-sgps-east.json")) as product_config_file: cls.config = Config.from_dict(json.load(product_config_file)) cls.config.dims["report_number"].update({ "index_by": "L1a_SciData_TimeStamp", "min": cls. start_time, # for convenience, will convert according to index_by units if this is datetime "max": cls.end_time, "expected_cadence": { "report_number": 1, "sensor_unit": 0 }, }) _, cls.filename = tempfile.mkstemp() agg_list = generate_aggregation_list(cls.config, cls.files) evaluate_aggregation_list(cls.config, agg_list, cls.filename) cls.output = nc.Dataset(cls.filename, "r")