def test_add_data(data): d = Datasource() metadata = data["ch4"]["metadata"] ch4_data = data["ch4"]["data"] assert ch4_data["ch4"][0] == pytest.approx(1959.55) assert ch4_data["ch4_variability"][0] == pytest.approx(0.79) assert ch4_data["ch4_number_of_observations"][0] == pytest.approx(26.0) d.add_data(metadata=metadata, data=ch4_data, data_type="timeseries") d.save() bucket = get_local_bucket() data_chunks = [ Datasource.load_dataset(bucket=bucket, key=k) for k in d.data_keys() ] # Now read it out and make sure it's what we expect combined = xr.concat(data_chunks, dim="time") assert combined.equals(ch4_data) expected_metadata = { "site": "bsd", "instrument": "picarro", "sampling_period": "60", "inlet": "248m", "port": "9", "type": "air", "network": "decc", "species": "ch4", "scale": "wmo-x2004a", "long_name": "bilsdale", "data_owner": "simon o'doherty", "data_owner_email": "*****@*****.**", "inlet_height_magl": "248m", "comment": "cavity ring-down measurements. output from gcwerks", "source": "in situ measurements of air", "conventions": "cf-1.6", "calibration_scale": "wmo-x2004a", "station_longitude": -1.15033, "station_latitude": 54.35858, "station_long_name": "bilsdale, uk", "station_height_masl": 380.0, "data_type": "timeseries", } assert d.metadata() == expected_metadata
def test_from_data(data): d = Datasource() metadata = data["ch4"]["metadata"] ch4_data = data["ch4"]["data"] d.add_data(metadata=metadata, data=ch4_data, data_type="timeseries") d.save() obj_data = d.to_data() bucket = get_local_bucket() # Create a new object with the data from d d_2 = Datasource.from_data(bucket=bucket, data=obj_data, shallow=False) metadata = d_2.metadata() assert metadata["site"] == "bsd" assert metadata["instrument"] == "picarro" assert metadata["sampling_period"] == "60" assert metadata["inlet"] == "248m" assert sorted(d_2.data_keys()) == sorted(d.data_keys()) assert d_2.metadata() == d.metadata()