def test_read_noaa(): get_local_bucket(empty=True) data_filepath = get_datapath( filename="co_pocn25_surface-flask_1_ccgg_event.txt", data_type="NOAA") results = ObsSurface.read_file(filepath=data_filepath, data_type="NOAA") uuid = results["co_pocn25_surface-flask_1_ccgg_event.txt"][ "co_pocn25_surface-flask_1_ccgg_event_co"] co_data = Datasource.load(uuid=uuid, shallow=False).data() assert len(co_data.keys()) == 95 old_data = co_data["1990-12-02-12:23:00+00:00_1990-12-02-12:23:00+00:00"] assert old_data.time[0] == Timestamp("1990-12-02T12:23:00") assert old_data.time[-1] == Timestamp("1990-12-02T12:23:00") assert old_data["co"][0] == 141.61 assert old_data["co"][-1] == 141.61 assert old_data["co_repeatability"][0] == -999.99 assert old_data["co_repeatability"][-1] == -999.99 assert old_data["co_selection_flag"][0] == 0 assert old_data["co_selection_flag"][-1] == 0 obs = ObsSurface.load() assert list(obs._datasource_names.keys() )[0] == "co_pocn25_surface-flask_1_ccgg_event_co"
def crds_read(): get_local_bucket(empty=True) test_data = "../data/search_data" folder_path = os.path.join(os.path.dirname(__file__), test_data) ObsSurface.read_folder(folder_path=folder_path, data_type="CRDS", extension="dat")
def test_read_thames_barrier(): get_local_bucket(empty=True) data_filepath = get_datapath(filename="thames_test_20190707.csv", data_type="THAMESBARRIER") results = ObsSurface.read_file(filepath=data_filepath, data_type="THAMESBARRIER") expected_keys = sorted([ 'thames_test_20190707_CH4', 'thames_test_20190707_CO2', 'thames_test_20190707_CO' ]) assert sorted(list( results["thames_test_20190707.csv"].keys())) == expected_keys uuid = results["thames_test_20190707.csv"]["thames_test_20190707_CO2"] data = Datasource.load(uuid=uuid, shallow=False).data() data = data["2019-07-01-00:39:55+00:00_2019-08-01-00:10:30+00:00"] assert data.time[0] == Timestamp("2019-07-01T00:39:55") assert data.time[-1] == Timestamp("2019-08-01T00:10:30") assert data["co2"][0] == pytest.approx(417.97344761) assert data["co2"][-1] == pytest.approx(417.80000653) assert data["co2_variability"][0] == 0 assert data["co2_variability"][-1] == 0 obs = ObsSurface.load() assert sorted(obs._datasource_names.keys()) == expected_keys
def crds(): get_local_bucket(empty=True) filename = "hfd.picarro.1minute.100m.min.dat" filepath = get_datapath(filename=filename, data_type="CRDS") ObsSurface.read_file(filepath=filepath, data_type="CRDS")
def crds(authenticated_user): get_local_bucket(empty=True) creds = StorageCreds(user=authenticated_user, service_url="storage") drive = Drive(creds=creds, name="test_drive") filepath = os.path.join( os.path.dirname(__file__), "../../../tests/data/proc_test_data/CRDS/bsd.picarro.1minute.248m.dat", ) filemeta = drive.upload(filepath) par = PAR(location=filemeta.location(), user=authenticated_user) hugs = Service(service_url="hugs") par_secret = hugs.encrypt_data(par.secret()) auth = Authorisation(resource="process", user=authenticated_user) args = { "authorisation": auth.to_data(), "par": { "data": par.to_data() }, "par_secret": { "data": par_secret }, "data_type": "CRDS", "source_name": "bsd.picarro.1minute.248m", } hugs.call_function(function="process", args=args)
def test_read_CRDS(): get_local_bucket(empty=True) filepath = get_datapath(filename="bsd.picarro.1minute.248m.dat", data_type="CRDS") results = ObsSurface.read_file(filepath=filepath, data_type="CRDS") keys = results["bsd.picarro.1minute.248m.dat"].keys() expected_keys = sorted([ "bsd.picarro.1minute.248m_ch4", "bsd.picarro.1minute.248m_co", "bsd.picarro.1minute.248m_co2", ]) assert sorted(keys) == expected_keys # Load up the assigned Datasources and check they contain the correct data data = results["bsd.picarro.1minute.248m.dat"] ch4_data = Datasource.load( uuid=data["bsd.picarro.1minute.248m_ch4"]).data() ch4_data = ch4_data["2014-01-30-10:52:30+00:00_2014-01-30-14:20:30+00:00"] assert ch4_data.time[0] == Timestamp("2014-01-30T10:52:30") assert ch4_data["ch4"][0] == 1960.24 assert ch4_data["ch4"][-1] == 1952.24 assert ch4_data["ch4_stdev"][-1] == 0.674 assert ch4_data["ch4_n_meas"][-1] == 25.0 obs = ObsSurface.load() assert sorted(obs._datasource_names.keys()) == expected_keys
def test_recombination_CRDS(): get_local_bucket(empty=True) filename = "hfd.picarro.1minute.100m.min.dat" filepath = get_datapath(filename=filename, data_type="CRDS") crds = CRDS() ObsSurface.read_file(filepath, data_type="CRDS") gas_data = crds.read_data(data_filepath=filepath, site="HFD") ch4_data_read = gas_data["ch4"]["data"] gas_name = "ch4" location = "hfd" keys = search(species=gas_name, locations=location) to_download = keys["ch4_hfd_100m_picarro"]["keys"][ "2013-12-04-14:02:30_2019-05-21-15:46:30"] ch4_data_recombined = recombine_sections(data_keys=to_download) ch4_data_recombined.attrs = {} assert ch4_data_read.time.equals(ch4_data_recombined.time) assert ch4_data_read["ch4"].equals(ch4_data_recombined["ch4"])
def crds(): get_local_bucket(empty=True) dir_path = os.path.dirname(__file__) test_data = "../data/proc_test_data/CRDS" filename = "hfd.picarro.1minute.100m.min.dat" filepath = os.path.join(dir_path, test_data, filename) ObsSurface.read_file(filepath=filepath, data_type="CRDS")
def test_upload_same_file_twice_raises(): get_local_bucket(empty=True) data_filepath = get_datapath(filename="tta.co2.1minute.222m.min.dat", data_type="ICOS") ObsSurface.read_file(filepath=data_filepath, data_type="ICOS") with pytest.raises(ValueError): ObsSurface.read_file(filepath=data_filepath, data_type="ICOS")
def gc_read(): get_local_bucket(empty=True) data_file = "capegrim-medusa.18.C" prec_file = "capegrim-medusa.18.precisions.C" dir_path = os.path.dirname(__file__) test_data = "../data/proc_test_data/GC" data_filepath = os.path.join(dir_path, test_data, data_file) prec_filepath = os.path.join(dir_path, test_data, prec_file) ObsSurface.read_file(filepath=(data_filepath, prec_filepath), data_type="GCWERKS")
def test_load_dataset(): filename = "WAO-20magl_EUROPE_201306_small.nc" dir_path = os.path.dirname(__file__) test_data = "../data/emissions" filepath = os.path.join(dir_path, test_data, filename) ds = xarray.load_dataset(filepath) metadata = {"some": "metadata"} d = Datasource("dataset_test") d.add_data(metadata=metadata, data=ds, data_type="footprint") d.save() keys = d._data_keys["latest"]["keys"] key = list(keys.values())[0] bucket = get_local_bucket() loaded_ds = Datasource.load_dataset(bucket=bucket, key=key) assert loaded_ds.equals(ds)
def test_save_footprint(): bucket = get_local_bucket(empty=True) metadata = {"test": "testing123"} dir_path = os.path.dirname(__file__) test_data = "../data/emissions" filename = "WAO-20magl_EUROPE_201306_downsampled.nc" filepath = os.path.join(dir_path, test_data, filename) data = xarray.open_dataset(filepath) datasource = Datasource(name="test_name") datasource.add_data(metadata=metadata, data=data, data_type="footprint") datasource.save() prefix = f"{Datasource._datasource_root}/uuid/{datasource._uuid}" objs = get_object_names(bucket, prefix) datasource_2 = Datasource.load(bucket=bucket, key=objs[0]) date_key = "2013-06-02-00:00:00+00:00_2013-06-30-00:00:00+00:00" data = datasource_2._data[date_key] assert float(data.pressure[0].values) == pytest.approx(1023.971) assert float(data.pressure[2].values) == pytest.approx(1009.940) assert float(data.pressure[-1].values) == pytest.approx(1021.303)
def test_delete_Datasource(): bucket = get_local_bucket(empty=True) data_filepath = get_datapath(filename="tta.co2.1minute.222m.min.dat", data_type="ICOS") ObsSurface.read_file(filepath=data_filepath, data_type="ICOS") obs = ObsSurface.load() datasources = obs.datasources() uuid = datasources[0] datasource = Datasource.load(uuid=uuid) data = datasource.data( )["2011-12-07-01:38:00+00:00_2011-12-31-19:57:00+00:00"] assert data["co2"][0] == pytest.approx(397.334) assert data.time[0] == Timestamp("2011-12-07T01:38:00") data_keys = datasource.data_keys() key = data_keys[0] assert exists(bucket=bucket, key=key) obs.delete(uuid=uuid) assert uuid not in obs.datasources() assert not exists(bucket=bucket, key=key)
def test_process_files(): get_local_bucket(empty=True) dir_path = os.path.dirname(__file__) test_data = "../data/proc_test_data/CRDS" filename = "hfd.picarro.1minute.100m.min.dat" filepath = os.path.join(dir_path, test_data, filename) results = process_files(files=filepath, site="hfd", instrument="picarro", network="DECC", data_type="CRDS") results = results["hfd.picarro.1minute.100m.min.dat"] assert "hfd.picarro.1minute.100m.min_ch4" in results assert "hfd.picarro.1minute.100m.min_co2" in results
def load_two_data(authenticated_user): get_local_bucket(empty=True) def test_folder(filename): dir_path = os.path.dirname(__file__) test_folder = "../../../tests/data/search_data" return os.path.join(dir_path, test_folder, filename) crds_files = [ "bsd.picarro5310.1minute.108m.min.dat", "bsd.picarro5310.1minute.248m.min.dat", "hfd.picarro.1minute.100m.min.dat", ] filepaths = [test_folder(filename=f) for f in crds_files] process = Process(service_url="hugs") process.process_files( user=authenticated_user, files=filepaths, data_type="CRDS", hugs_url="hugs", storage_url="storage", ) dir_path = os.path.dirname(__file__) test_data = "../../../tests/data/proc_test_data/GC" data = os.path.join(dir_path, test_data, "capegrim-medusa.18.C") precision = os.path.join(dir_path, test_data, "capegrim-medusa.18.precisions.C") gc_files = [data, precision] # gc_files = [test_folder(f) for f in gc_files] process.process_files( user=authenticated_user, files=gc_files, data_type="GC", hugs_url="hugs", storage_url="storage", )
def test_save(mock_uuid2): bucket = get_local_bucket() datasource = Datasource(name="test_name") datasource.add_metadata(key="data_type", value="timeseries") datasource.save(bucket) prefix = f"{Datasource._datasource_root}/uuid/{datasource._uuid}" objs = get_object_names(bucket, prefix) assert objs[0].split("/")[-1] == mocked_uuid2
def test_recombination_GC(): get_local_bucket(empty=True) gc = GCWERKS() data = get_datapath(filename="capegrim-medusa.18.C", data_type="GC") precision = get_datapath(filename="capegrim-medusa.18.precisions.C", data_type="GC") ObsSurface.read_file((data, precision), data_type="GCWERKS") data = gc.read_data(data_filepath=data, precision_filepath=precision, site="CGO", instrument="medusa") toluene_data = data["toluene"]["data"] gas_name = "toluene" location = "CGO" keys = search(species=gas_name, locations=location) to_download = keys["toluene_cgo_75m_4_medusa"]["keys"][ "2018-01-01-02:24:00_2018-01-31-23:33:00"] toluene_data_recombined = recombine_sections(data_keys=to_download) toluene_data.attrs = {} toluene_data_recombined.attrs = {} assert toluene_data.time.equals(toluene_data_recombined.time) assert toluene_data["toluene"].equals(toluene_data_recombined["c6h5ch3"]) assert toluene_data["toluene repeatability"].equals( toluene_data_recombined["c6h5ch3_repeatability"]) assert toluene_data["toluene status_flag"].equals( toluene_data_recombined["c6h5ch3_status_flag"]) assert toluene_data["toluene integration_flag"].equals( toluene_data_recombined["c6h5ch3_integration_flag"])
def test_read_cranfield(): get_local_bucket(empty=True) data_filepath = get_datapath(filename="THB_hourly_means_test.csv", data_type="Cranfield_CRDS") results = ObsSurface.read_file(filepath=data_filepath, data_type="CRANFIELD") expected_keys = sorted([ "THB_hourly_means_test_ch4", "THB_hourly_means_test_co2", "THB_hourly_means_test_co", ]) assert sorted(results["THB_hourly_means_test.csv"].keys()) == expected_keys uuid = results["THB_hourly_means_test.csv"]["THB_hourly_means_test_ch4"] ch4_data = Datasource.load(uuid=uuid, shallow=False).data() ch4_data = ch4_data["2018-05-05-00:00:00+00:00_2018-05-13-16:00:00+00:00"] assert ch4_data.time[0] == Timestamp("2018-05-05") assert ch4_data.time[-1] == Timestamp("2018-05-13T16:00:00") assert ch4_data["ch4"][0] == pytest.approx(2585.651) assert ch4_data["ch4"][-1] == pytest.approx(1999.018) assert ch4_data["ch4 variability"][0] == pytest.approx(75.50218) assert ch4_data["ch4 variability"][-1] == pytest.approx(6.48413) # Check obs has stored the keys correctly obs = ObsSurface.load() assert sorted(list(obs._datasource_names.keys())) == sorted([ 'THB_hourly_means_test_ch4', 'THB_hourly_means_test_co2', 'THB_hourly_means_test_co' ])
def load_crds(authenticated_user): get_local_bucket(empty=True) def test_folder(filename): dir_path = os.path.dirname(__file__) test_folder = "../../../tests/data/proc_test_data/CRDS" return os.path.join(dir_path, test_folder, filename) files = [ "hfd.picarro.1minute.100m.min.dat", "hfd.picarro.1minute.50m.min.dat", ] filepaths = [test_folder(f) for f in files] process = Process(service_url="hugs") process.process_files( user=authenticated_user, files=filepaths, data_type="CRDS", hugs_url="hugs", storage_url="storage", )
def test_from_data(data): d = Datasource(name="testing_123") metadata = data["ch4"]["metadata"] ch4_data = data["ch4"]["data"] d.add_data(metadata=metadata, data=ch4_data, data_type="timeseries") obj_data = d.to_data() bucket = get_local_bucket() # Create a new object with the data from d d_2 = Datasource.from_data(bucket=bucket, data=obj_data, shallow=False) metadata = d_2.metadata() assert metadata["site"] == "bsd" assert metadata["instrument"] == "picarro" assert metadata["time_resolution"] == "1_minute" assert metadata["inlet"] == "248m" assert d_2.to_data() == d.to_data()
def run_before_tests(): get_local_bucket(empty=True)
def populate_store(): get_local_bucket(empty=True) filepath = hfd_filepath() ObsSurface.read_file(filepath=filepath, data_type="CRDS", site="hfd")
def test_read_GC(): get_local_bucket(empty=True) data_filepath = get_datapath(filename="capegrim-medusa.18.C", data_type="GC") precision_filepath = get_datapath( filename="capegrim-medusa.18.precisions.C", data_type="GC") results = ObsSurface.read_file(filepath=(data_filepath, precision_filepath), data_type="GCWERKS") expected_keys = sorted([ "capegrim-medusa.18_NF3", "capegrim-medusa.18_CF4", "capegrim-medusa.18_PFC-116", "capegrim-medusa.18_PFC-218", "capegrim-medusa.18_PFC-318", "capegrim-medusa.18_C4F10", "capegrim-medusa.18_C6F14", "capegrim-medusa.18_SF6", "capegrim-medusa.18_SO2F2", "capegrim-medusa.18_SF5CF3", "capegrim-medusa.18_HFC-23", "capegrim-medusa.18_HFC-32", "capegrim-medusa.18_HFC-125", "capegrim-medusa.18_HFC-134a", "capegrim-medusa.18_HFC-143a", "capegrim-medusa.18_HFC-152a", "capegrim-medusa.18_HFC-227ea", "capegrim-medusa.18_HFC-236fa", "capegrim-medusa.18_HFC-245fa", "capegrim-medusa.18_HFC-365mfc", "capegrim-medusa.18_HFC-4310mee", "capegrim-medusa.18_HCFC-22", "capegrim-medusa.18_HCFC-124", "capegrim-medusa.18_HCFC-132b", "capegrim-medusa.18_HCFC-133a", "capegrim-medusa.18_HCFC-141b", "capegrim-medusa.18_HCFC-142b", "capegrim-medusa.18_CFC-11", "capegrim-medusa.18_CFC-12", "capegrim-medusa.18_CFC-13", "capegrim-medusa.18_CFC-112", "capegrim-medusa.18_CFC-113", "capegrim-medusa.18_CFC-114", "capegrim-medusa.18_CFC-115", "capegrim-medusa.18_H-1211", "capegrim-medusa.18_H-1301", "capegrim-medusa.18_H-2402", "capegrim-medusa.18_CH3Cl", "capegrim-medusa.18_CH3Br", "capegrim-medusa.18_CH3I", "capegrim-medusa.18_CH2Cl2", "capegrim-medusa.18_CHCl3", "capegrim-medusa.18_CCl4", "capegrim-medusa.18_CH2Br2", "capegrim-medusa.18_CHBr3", "capegrim-medusa.18_CH3CCl3", "capegrim-medusa.18_TCE", "capegrim-medusa.18_PCE", "capegrim-medusa.18_ethyne", "capegrim-medusa.18_ethane", "capegrim-medusa.18_propane", "capegrim-medusa.18_c-propane", "capegrim-medusa.18_benzene", "capegrim-medusa.18_toluene", "capegrim-medusa.18_COS", "capegrim-medusa.18_desflurane", ]) sorted(list(results["capegrim-medusa.18.C"].keys())) == expected_keys # Load in some data uuid = results["capegrim-medusa.18.C"]["capegrim-medusa.18_HFC-152a"] hfc152a_data = Datasource.load(uuid=uuid, shallow=False).data() hfc152a_data = hfc152a_data[ "2018-01-01-02:24:00+00:00_2018-01-31-23:33:00+00:00"] assert hfc152a_data.time[0] == Timestamp("2018-01-01T02:24:00") assert hfc152a_data.time[-1] == Timestamp("2018-01-31T23:33:00") assert hfc152a_data["hfc152a"][0] == 4.409 assert hfc152a_data["hfc152a"][-1] == 4.262 assert hfc152a_data["hfc152a_repeatability"][0] == 0.03557 assert hfc152a_data["hfc152a_repeatability"][-1] == 0.03271 assert hfc152a_data["hfc152a_status_flag"][0] == 0 assert hfc152a_data["hfc152a_status_flag"][-1] == 0 assert hfc152a_data["hfc152a_integration_flag"][0] == 0 assert hfc152a_data["hfc152a_integration_flag"][-1] == 0 # Check we have the Datasource info saved obs = ObsSurface.load() assert sorted(obs._datasource_names.keys()) == expected_keys del hfc152a_data.attrs["File created"] assert hfc152a_data.attrs == { "data_owner": "Paul Krummel", "data_owner_email": "*****@*****.**", "inlet_height_magl": "75m_4", "comment": "Medusa measurements. Output from GCWerks. See Miller et al. (2008).", "Conditions of use": "Ensure that you contact the data owner at the outset of your project.", "Source": "In situ measurements of air", "Conventions": "CF-1.6", "Processed by": "*****@*****.**", "species": "hfc152a", "Calibration_scale": "SIO-05", "station_longitude": 144.689, "station_latitude": -40.683, "station_long_name": "Cape Grim, Tasmania", "station_height_masl": 94.0, }
def test_crds_attributes(): _ = get_local_bucket(empty=True) crds = CRDS() filepath = get_datapath(filename="tac.picarro.1minute.100m.test.dat", data_type="CRDS") combined = crds.read_data(data_filepath=filepath, site="tac") combined_attributes = assign_attributes(data=combined, site="tac") # for key in combined_attributes: # ds = combined_attributes[key]["data"] # ds.to_netcdf(f"/tmp/testfile_{key}.nc") ch4_data = combined_attributes["ch4"]["data"] co2_data = combined_attributes["co2"]["data"] ch4_attr = ch4_data.attrs co2_attr = co2_data.attrs ch4_attr_complete = ch4_attr.copy() co2_attr_complete = co2_attr.copy() del ch4_attr["File created"] del co2_attr["File created"] del ch4_attr["species"] del co2_attr["species"] del ch4_attr["Calibration_scale"] del co2_attr["Calibration_scale"] del ch4_attr["data_owner_email"] del co2_attr["data_owner_email"] del ch4_attr["data_owner"] del co2_attr["data_owner"] global_attributes = { "inlet_height_magl": "100m", "comment": "Cavity ring-down measurements. Output from GCWerks", "Conditions of use": "Ensure that you contact the data owner at the outset of your project.", "Source": "In situ measurements of air", "Conventions": "CF-1.6", "Processed by": "*****@*****.**", "station_longitude": 1.13872, "station_latitude": 52.51775, "station_long_name": "Tacolneston Tower, UK", "station_height_masl": 50.0, } assert ch4_attr == global_attributes assert co2_attr == global_attributes assert ch4_attr_complete["species"] == "ch4" assert co2_attr_complete["species"] == "co2" # Check the individual variables attributes time_attributes = { "label": "left", "standard_name": "time", "comment": "Time stamp corresponds to beginning of sampling period. Time since midnight UTC of reference date. Note that sampling periods are approximate.", } assert ch4_data.time.attrs == time_attributes assert co2_data.time.attrs == time_attributes # Check individual variables assert ch4_data["ch4"].attrs == { "long_name": "mole_fraction_of_methane_in_air", "units": "1e-9", } assert ch4_data["ch4_stdev"].attrs == { "long_name": "mole_fraction_of_methane_in_air_stdev", "units": "1e-9", } assert ch4_data["ch4_n_meas"].attrs == { "long_name": "mole_fraction_of_methane_in_air_n_meas" } assert co2_data["co2"].attrs == { "long_name": "mole_fraction_of_carbon_dioxide_in_air", "units": "1e-6", } assert co2_data["co2_stdev"].attrs == { "long_name": "mole_fraction_of_carbon_dioxide_in_air_stdev", "units": "1e-6", } assert co2_data["co2_n_meas"].attrs == { "long_name": "mole_fraction_of_carbon_dioxide_in_air_n_meas" }
def test_read_icos(): get_local_bucket(empty=True) data_filepath = get_datapath(filename="tta.co2.1minute.222m.min.dat", data_type="ICOS") results = ObsSurface.read_file(filepath=data_filepath, data_type="ICOS") assert list(results["tta.co2.1minute.222m.min.dat"].keys() )[0] == "tta.co2.1minute.222m.min_co2" uuid = results["tta.co2.1minute.222m.min.dat"][ "tta.co2.1minute.222m.min_co2"] data = Datasource.load(uuid=uuid, shallow=False).data() assert sorted(list(data.keys())) == sorted([ "2011-12-07-01:38:00+00:00_2011-12-31-19:57:00+00:00", "2011-06-01-05:54:00+00:00_2011-08-31-17:58:00+00:00", "2011-03-30-08:52:00+00:00_2011-05-31-20:59:00+00:00", "2011-09-01-11:20:00+00:00_2011-11-30-03:39:00+00:00", "2012-12-01-04:03:00+00:00_2012-12-31-15:41:00+00:00", "2012-06-01-11:15:00+00:00_2012-08-07-19:16:00+00:00", "2012-04-07-06:20:00+00:00_2012-05-31-18:00:00+00:00", "2012-09-05-02:15:00+00:00_2012-11-30-19:08:00+00:00", "2013-01-01-00:01:00+00:00_2013-01-17-18:06:00+00:00", ]) co2_data = data["2012-12-01-04:03:00+00:00_2012-12-31-15:41:00+00:00"] assert co2_data.time[0] == Timestamp("2012-12-01T04:03:00") assert co2_data.time[-1] == Timestamp("2012-12-31T15:41:00") assert co2_data["co2"][0] == 397.765 assert co2_data["co2"][-1] == 398.374 assert co2_data["co2_variability"][0] == 0.057 assert co2_data["co2_variability"][-1] == 0.063 assert co2_data["co2_number_of_observations"][0] == 12 assert co2_data["co2_number_of_observations"][-1] == 13 del co2_data.attrs["File created"] assert co2_data.attrs == { "Conditions of use": "Ensure that you contact the data owner at the outset of your project.", "Source": "In situ measurements of air", "Conventions": "CF-1.6", "Processed by": "*****@*****.**", "species": "co2", "Calibration_scale": "unknown", "station_longitude": -2.98598, "station_latitude": 56.55511, "station_long_name": "Angus Tower, UK", "station_height_masl": 300.0, } obs = ObsSurface.load() assert list( obs._datasource_names.keys())[0] == "tta.co2.1minute.222m.min_co2"