Exemplo n.º 1
0
def test_get_run_attributes() -> None:
    name = "test-dataset"
    ds = new_data_set(name)
    ds.mark_started()
    ds.mark_completed()
    ds.add_metadata("foo", "bar")

    loaded_raw_attrs = get_raw_run_attributes(ds.conn, ds.guid)
    assert loaded_raw_attrs is not None

    assert loaded_raw_attrs["run_id"] == ds.run_id
    assert loaded_raw_attrs["counter"] == ds.counter
    assert loaded_raw_attrs["captured_counter"] == ds.captured_counter
    assert loaded_raw_attrs["captured_run_id"] == ds.captured_run_id
    assert loaded_raw_attrs["captured_run_id"] == ds.captured_run_id
    assert loaded_raw_attrs["experiment"] == get_experiment_attributes_by_exp_id(
        ds.conn, ds.exp_id
    )
    assert loaded_raw_attrs["experiment"]["exp_id"] == ds.exp_id
    assert loaded_raw_attrs["experiment"]["name"] == ds.exp_name
    assert loaded_raw_attrs["experiment"]["sample_name"] == ds.sample_name
    assert loaded_raw_attrs["name"] == name
    assert loaded_raw_attrs["run_timestamp"] == ds.run_timestamp_raw
    assert loaded_raw_attrs["completed_timestamp"] == ds.completed_timestamp_raw
    assert loaded_raw_attrs["parent_dataset_links"] == "[]"
    assert "interdependencies" in loaded_raw_attrs["run_description"]
    assert loaded_raw_attrs["snapshot"] is None
    assert loaded_raw_attrs["metadata"] == {"foo": "bar"}

    loaded_attrs = get_run_attributes(ds.conn, ds.guid)
    assert loaded_attrs is not None

    assert loaded_attrs["run_id"] == ds.run_id
    assert loaded_attrs["counter"] == ds.counter
    assert loaded_attrs["captured_counter"] == ds.captured_counter
    assert loaded_attrs["captured_run_id"] == ds.captured_run_id
    assert loaded_attrs["captured_run_id"] == ds.captured_run_id
    assert loaded_attrs["experiment"] == get_experiment_attributes_by_exp_id(
        ds.conn, ds.exp_id
    )
    assert loaded_attrs["experiment"]["exp_id"] == ds.exp_id
    assert loaded_attrs["name"] == name
    assert loaded_attrs["run_timestamp"] == raw_time_to_str_time(ds.run_timestamp_raw)
    assert loaded_attrs["completed_timestamp"] == raw_time_to_str_time(
        ds.completed_timestamp_raw
    )
    assert loaded_attrs["parent_dataset_links"] == []
    assert isinstance(loaded_attrs["run_description"], RunDescriber)
    assert loaded_attrs["snapshot"] is None
    assert loaded_attrs["metadata"] == {"foo": "bar"}
Exemplo n.º 2
0
    def _load_from_db(cls, conn: ConnectionPlus, guid: str) -> DataSetInMem:

        run_attributes = get_raw_run_attributes(conn, guid)
        if run_attributes is None:
            raise RuntimeError(
                f"Could not find the requested run with GUID: {guid} in the db"
            )

        metadata = run_attributes["metadata"]

        export_info_str = metadata.get("export_info", "")
        export_info = ExportInfo.from_str(export_info_str)

        ds = cls(
            run_id=run_attributes["run_id"],
            captured_run_id=run_attributes["captured_run_id"],
            counter=run_attributes["counter"],
            captured_counter=run_attributes["captured_counter"],
            name=run_attributes["name"],
            exp_id=run_attributes["experiment"]["exp_id"],
            exp_name=run_attributes["experiment"]["name"],
            sample_name=run_attributes["experiment"]["sample_name"],
            guid=guid,
            path_to_db=conn.path_to_dbfile,
            run_timestamp_raw=run_attributes["run_timestamp"],
            completed_timestamp_raw=run_attributes["completed_timestamp"],
            metadata=metadata,
            rundescriber=serial.from_json_to_current(
                run_attributes["run_description"]),
            parent_dataset_links=str_to_links(
                run_attributes["parent_dataset_links"]),
            export_info=export_info,
            snapshot=run_attributes["snapshot"],
        )
        xr_path = export_info.export_paths.get("nc")

        cls._set_cache_from_netcdf(ds, xr_path)
        return ds
Exemplo n.º 3
0
    def _load_from_netcdf(
            cls,
            path: Union[Path, str],
            path_to_db: Optional[Union[Path, str]] = None) -> DataSetInMem:
        """
        Create a in memory dataset from a netcdf file.
        The netcdf file is expected to contain a QCoDeS dataset that
        has been exported using the QCoDeS netcdf export functions.

        Args:
            path: Path to the netcdf file to import.
            path_to_db: Optional path to a database where this dataset may be
                exported to. If not supplied the path can be given at export time
                or the dataset exported to the default db as set in the QCoDeS config.

        Returns:
            The loaded dataset.
        """
        # in the code below floats and ints loaded from attributes are explicitly casted
        # this is due to some older versions of qcodes writing them with a different backend
        # reading them back results in a numpy array of one element

        import xarray as xr

        loaded_data = xr.load_dataset(path, engine="h5netcdf")

        parent_dataset_links = str_to_links(
            loaded_data.attrs.get("parent_dataset_links", "[]"))
        if path_to_db is not None:
            path_to_db = str(path_to_db)

        with contextlib.closing(
                conn_from_dbpath_or_conn(conn=None,
                                         path_to_db=path_to_db)) as conn:
            run_data = get_raw_run_attributes(conn, guid=loaded_data.guid)
            path_to_db = conn.path_to_dbfile

        if run_data is not None:
            run_id = run_data["run_id"]
            counter = run_data["counter"]
        else:
            run_id = int(loaded_data.captured_run_id)
            counter = int(loaded_data.captured_counter)

        path = str(path)
        path = os.path.abspath(path)

        export_info = ExportInfo.from_str(
            loaded_data.attrs.get("export_info", ""))
        export_info.export_paths["nc"] = path
        non_metadata = {
            "run_timestamp_raw",
            "completed_timestamp_raw",
            "ds_name",
            "exp_name",
            "sample_name",
            "export_info",
            "parent_dataset_links",
        }

        metadata_keys = (set(loaded_data.attrs.keys()) -
                         set(RUNS_TABLE_COLUMNS) - non_metadata)
        metadata = {}
        for key in metadata_keys:
            data = loaded_data.attrs[key]
            if isinstance(data, np.ndarray) and data.size == 1:
                data = data[0]
            metadata[key] = data

        ds = cls(
            run_id=run_id,
            captured_run_id=int(loaded_data.captured_run_id),
            counter=counter,
            captured_counter=int(loaded_data.captured_counter),
            name=loaded_data.ds_name,
            exp_id=0,
            exp_name=loaded_data.exp_name,
            sample_name=loaded_data.sample_name,
            guid=loaded_data.guid,
            path_to_db=path_to_db,
            run_timestamp_raw=float(loaded_data.run_timestamp_raw),
            completed_timestamp_raw=float(loaded_data.completed_timestamp_raw),
            metadata=metadata,
            rundescriber=serial.from_json_to_current(
                loaded_data.run_description),
            parent_dataset_links=parent_dataset_links,
            export_info=export_info,
            snapshot=loaded_data.snapshot,
        )
        ds._cache = DataSetCacheInMem(ds)
        ds._cache._data = cls._from_xarray_dataset_to_qcodes_raw_data(
            loaded_data)

        return ds