def test_new_to_old(some_paramspecbases): (ps1, ps2, ps3, ps4) = some_paramspecbases idps_new = InterDependencies_(dependencies={ps1: (ps2, ps3)}, standalones=(ps4, )) paramspec1 = ParamSpec(name=ps1.name, paramtype=ps1.type, label=ps1.label, unit=ps1.unit, depends_on=[ps2.name, ps3.name]) paramspec2 = ParamSpec(name=ps2.name, paramtype=ps2.type, label=ps2.label, unit=ps2.unit) paramspec3 = ParamSpec(name=ps3.name, paramtype=ps3.type, label=ps3.label, unit=ps3.unit) paramspec4 = ParamSpec(name=ps4.name, paramtype=ps4.type, label=ps4.label, unit=ps4.unit) idps_old_expected = InterDependencies(paramspec2, paramspec3, paramspec1, paramspec4) assert new_to_old(idps_new) == idps_old_expected # idps_new = InterDependencies_(inferences={ps1: (ps2, ps3)}, standalones=(ps4, )) paramspec1 = ParamSpec(name=ps1.name, paramtype=ps1.type, label=ps1.label, unit=ps1.unit, inferred_from=[ps2.name, ps3.name]) paramspec2 = ParamSpec(name=ps2.name, paramtype=ps2.type, label=ps2.label, unit=ps2.unit) paramspec3 = ParamSpec(name=ps3.name, paramtype=ps3.type, label=ps3.label, unit=ps3.unit) paramspec4 = ParamSpec(name=ps4.name, paramtype=ps4.type, label=ps4.label, unit=ps4.unit) idps_old_expected = InterDependencies(paramspec2, paramspec3, paramspec1, paramspec4) assert new_to_old(idps_new) == idps_old_expected
def test_convert_v0_to_newer(some_paramspecs): pgroup1 = some_paramspecs[1] interdeps = InterDependencies(pgroup1['ps1'], pgroup1['ps2'], pgroup1['ps3'], pgroup1['ps4'], pgroup1['ps6']) v0 = RunDescriberV0Dict(interdependencies=interdeps._to_dict(), version=0) v1 = v0_to_v1(v0) v2 = v0_to_v2(v0) _assert_dicts_are_related_as_expected(v0, v1, v2)
def test_old_to_new(some_paramspecs): ps1 = some_paramspecs[1]['ps1'] ps2 = some_paramspecs[1]['ps2'] ps3 = some_paramspecs[1]['ps3'] ps4 = some_paramspecs[1]['ps4'] ps5 = some_paramspecs[1]['ps5'] ps6 = some_paramspecs[1]['ps6'] idps_old = InterDependencies(ps1, ps2, ps3) idps_new = old_to_new(idps_old) ps1_base = ps1.base_version() ps2_base = ps2.base_version() ps3_base = ps3.base_version() ps4_base = ps4.base_version() ps5_base = ps5.base_version() ps6_base = ps6.base_version() assert idps_new.dependencies == {} assert idps_new.inferences == {ps3_base: (ps1_base, )} assert idps_new.standalones == set((ps2_base, )) paramspecs = (ps1_base, ps2_base, ps3_base) assert idps_new._id_to_paramspec == {ps.name: ps for ps in paramspecs} idps_old = InterDependencies(ps2, ps4, ps1, ps2, ps3, ps5, ps6) idps_new = old_to_new(idps_old) assert idps_new.dependencies == { ps5_base: (ps3_base, ps4_base), ps6_base: (ps3_base, ps4_base) } assert idps_new.inferences == { ps3_base: (ps1_base, ), ps4_base: (ps2_base, ) } assert idps_new.standalones == set() paramspecs = (ps1_base, ps2_base, ps3_base, ps4_base, ps5_base, ps6_base) assert idps_new._id_to_paramspec == {ps.name: ps for ps in paramspecs} idps_old = InterDependencies(ps1, ps2) idps_new = old_to_new(idps_old) assert idps_new.dependencies == {} assert idps_new.inferences == {} assert idps_new.standalones == set((ps1_base, ps2_base)) paramspecs = (ps1_base, ps2_base) assert idps_new._id_to_paramspec == {ps.name: ps for ps in paramspecs}
def upgrade_5_to_6(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 5 to version 6. The upgrade ensures that the runs_description has a top-level entry called 'version'. Note that version changes of the runs_description will not be tracked as schema upgrades. """ no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)') no_of_runs = no_of_runs or 0 # If one run fails, we want the whole upgrade to roll back, hence the # entire upgrade is one atomic transaction with atomic(conn) as conn: pbar = tqdm(range(1, no_of_runs + 1)) pbar.set_description("Upgrading database, version 5 -> 6") empty_idps_ser = InterDependencies()._to_dict() for run_id in pbar: json_str = get_run_description(conn, run_id) if json_str is None: new_json = json.dumps({ 'version': 0, 'interdependencies': empty_idps_ser }) else: ser = json.loads(json_str) new_ser = {'version': 0} # let 'version' be the first entry new_ser['interdependencies'] = ser['interdependencies'] new_json = json.dumps(new_ser) update_run_description(conn, run_id, new_json)
def test_wrong_input_raises(): for pspecs in [['p1', 'p2', 'p3'], [ParamSpec('p1', paramtype='numeric'), 'p2'], ['p1', ParamSpec('p2', paramtype='text')]]: with pytest.raises(ValueError): InterDependencies(pspecs)
def test_construct_currect_rundesciber_from_v0(some_paramspecs): pgroup1 = some_paramspecs[1] interdeps = InterDependencies(pgroup1['ps1'], pgroup1['ps2'], pgroup1['ps3'], pgroup1['ps4'], pgroup1['ps6']) v0 = RunDescriberV0Dict(interdependencies=interdeps._to_dict(), version=0) rds1 = RunDescriber._from_dict(v0) rds2 = from_dict_to_current(v0) expected_v2_dict = RunDescriberV2Dict( interdependencies=interdeps._to_dict(), interdependencies_=old_to_new(interdeps)._to_dict(), version=2) assert DeepDiff(rds1._to_dict(), expected_v2_dict, ignore_order=True) == {} assert DeepDiff(rds2._to_dict(), expected_v2_dict, ignore_order=True) == {}
def _assert_dicts_are_related_as_expected(v0, v1, v2): assert v1['interdependencies'] == old_to_new( InterDependencies._from_dict(v0['interdependencies']))._to_dict() assert v1['version'] == 1 assert len(v1) == 2 # conversion does not preserve order in the dict so use deepdiff to compare assert DeepDiff(v2['interdependencies'], v0['interdependencies'], ignore_order=True) == {} assert v2['interdependencies_'] == v1['interdependencies'] assert v2['version'] == 2 assert len(v2) == 3
def new_to_old(idps: InterDependencies_) -> InterDependencies: """ Create a new InterDependencies object (old style) from an existing InterDependencies_ object (new style). Leaves the original object unchanged. Only meant to be used for ensuring backwards-compatibility until we update sqlite module to forget about ParamSpecs """ paramspecs: Dict[str, ParamSpec] = {} # first the independent parameters for indeps in idps.dependencies.values(): for indep in indeps: paramspecs.update({ indep.name: ParamSpec(name=indep.name, paramtype=indep.type, label=indep.label, unit=indep.unit) }) for inffs in idps.inferences.values(): for inff in inffs: paramspecs.update({ inff.name: ParamSpec(name=inff.name, paramtype=inff.type, label=inff.label, unit=inff.unit) }) for ps_base in idps._paramspec_to_id.keys(): paramspecs.update({ ps_base.name: ParamSpec(name=ps_base.name, paramtype=ps_base.type, label=ps_base.label, unit=ps_base.unit) }) for ps, indeps in idps.dependencies.items(): for indep in indeps: paramspecs[ps.name]._depends_on.append(indep.name) for ps, inffs in idps.inferences.items(): for inff in inffs: paramspecs[ps.name]._inferred_from.append(inff.name) return InterDependencies(*tuple(paramspecs.values()))
def test_equality_old(some_paramspecs): # TODO: make this more fancy with itertools ps1 = some_paramspecs[1]['ps1'] ps2 = some_paramspecs[1]['ps2'] ps3 = some_paramspecs[1]['ps3'] ps4 = some_paramspecs[1]['ps4'] ps5 = some_paramspecs[1]['ps5'] ps6 = some_paramspecs[1]['ps6'] assert InterDependencies(ps1, ps2, ps3) == InterDependencies(ps3, ps2, ps1) assert InterDependencies(ps1, ps6, ps3) == InterDependencies(ps3, ps6, ps1) assert InterDependencies(ps4, ps5, ps3) == InterDependencies(ps3, ps4, ps5)
def upgrade_2_to_3(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 2 to version 3 Insert a new column, run_description, to the runs table and fill it out for exisitng runs with information retrieved from the layouts and dependencies tables represented as the json output of a RunDescriber object """ no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)') no_of_runs = no_of_runs or 0 # If one run fails, we want the whole upgrade to roll back, hence the # entire upgrade is one atomic transaction with atomic(conn) as conn: sql = "ALTER TABLE runs ADD COLUMN run_description TEXT" transaction(conn, sql) result_tables = _2to3_get_result_tables(conn) layout_ids_all = _2to3_get_layout_ids(conn) indeps_all = _2to3_get_indeps(conn) deps_all = _2to3_get_deps(conn) layouts = _2to3_get_layouts(conn) dependencies = _2to3_get_dependencies(conn) pbar = tqdm(range(1, no_of_runs + 1)) pbar.set_description("Upgrading database") for run_id in pbar: if run_id in layout_ids_all: result_table_name = result_tables[run_id] layout_ids = list(layout_ids_all[run_id]) if run_id in indeps_all: independents = tuple(indeps_all[run_id]) else: independents = () if run_id in deps_all: dependents = tuple(deps_all[run_id]) else: dependents = () paramspecs = _2to3_get_paramspecs(conn, layout_ids, layouts, dependencies, dependents, independents, result_table_name) interdeps = InterDependencies(*paramspecs.values()) desc_dict = {'interdependencies': interdeps._to_dict()} json_str = json.dumps(desc_dict) else: desc_dict = { 'interdependencies': InterDependencies()._to_dict() } json_str = json.dumps(desc_dict) sql = f""" UPDATE runs SET run_description = ? WHERE run_id == ? """ cur = conn.cursor() cur.execute(sql, (json_str, run_id)) log.debug(f"Upgrade in transition, run number {run_id}: OK")
def test_perform_upgrade_v3_to_v4(): """ Test that a db upgrade from v2 to v4 works correctly. """ v3fixpath = os.path.join(fixturepath, 'db_files', 'version3') dbname_old = os.path.join(v3fixpath, 'some_runs_upgraded_2.db') if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the " "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=3) as conn: assert get_user_version(conn) == 3 sql = f""" SELECT run_description FROM runs WHERE run_id == 1 """ perform_db_upgrade_3_to_4(conn) c = atomic_transaction(conn, sql) json_str = one(c, 'run_description') unversioned_dict = json.loads(json_str) idp = InterDependencies._from_dict( unversioned_dict['interdependencies']) assert isinstance(idp, InterDependencies) p0 = [p for p in idp.paramspecs if p.name == 'p0'][0] assert p0.depends_on == '' assert p0.depends_on_ == [] assert p0.inferred_from == '' assert p0.inferred_from_ == [] assert p0.label == "Parameter 0" assert p0.unit == "unit 0" p1 = [p for p in idp.paramspecs if p.name == 'p1'][0] assert p1.depends_on == '' assert p1.depends_on_ == [] assert p1.inferred_from == '' assert p1.inferred_from_ == [] assert p1.label == "Parameter 1" assert p1.unit == "unit 1" p2 = [p for p in idp.paramspecs if p.name == 'p2'][0] assert p2.depends_on == '' assert p2.depends_on_ == [] assert p2.inferred_from == 'p0' assert p2.inferred_from_ == ['p0'] assert p2.label == "Parameter 2" assert p2.unit == "unit 2" p3 = [p for p in idp.paramspecs if p.name == 'p3'][0] assert p3.depends_on == '' assert p3.depends_on_ == [] assert p3.inferred_from == 'p1, p0' assert p3.inferred_from_ == ['p1', 'p0'] assert p3.label == "Parameter 3" assert p3.unit == "unit 3" p4 = [p for p in idp.paramspecs if p.name == 'p4'][0] assert p4.depends_on == 'p2, p3' assert p4.depends_on_ == ['p2', 'p3'] assert p4.inferred_from == '' assert p4.inferred_from_ == [] assert p4.label == "Parameter 4" assert p4.unit == "unit 4" p5 = [p for p in idp.paramspecs if p.name == 'p5'][0] assert p5.depends_on == '' assert p5.depends_on_ == [] assert p5.inferred_from == 'p0' assert p5.inferred_from_ == ['p0'] assert p5.label == "Parameter 5" assert p5.unit == "unit 5"
def test_perform_actual_upgrade_2_to_3_some_runs(): v2fixpath = os.path.join(fixturepath, 'db_files', 'version2') dbname_old = os.path.join(v2fixpath, 'some_runs.db') if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the" "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=2) as conn: assert get_user_version(conn) == 2 perform_db_upgrade_2_to_3(conn) desc_query = 'SELECT run_description FROM runs' c = atomic_transaction(conn, desc_query) assert len(c.fetchall()) == 10 # retrieve the json string and recreate the object sql = f""" SELECT run_description FROM runs WHERE run_id == 1 """ c = atomic_transaction(conn, sql) json_str = one(c, 'run_description') unversioned_dict = json.loads(json_str) idp = InterDependencies._from_dict( unversioned_dict['interdependencies']) assert isinstance(idp, InterDependencies) # here we verify that the dependencies encoded in # tests/dataset/legacy_DB_generation/generate_version_2.py # are recovered p0 = [p for p in idp.paramspecs if p.name == 'p0'][0] assert p0.depends_on == '' assert p0.depends_on_ == [] assert p0.inferred_from == '' assert p0.inferred_from_ == [] assert p0.label == "Parameter 0" assert p0.unit == "unit 0" p1 = [p for p in idp.paramspecs if p.name == 'p1'][0] assert p1.depends_on == '' assert p1.depends_on_ == [] assert p1.inferred_from == '' assert p1.inferred_from_ == [] assert p1.label == "Parameter 1" assert p1.unit == "unit 1" p2 = [p for p in idp.paramspecs if p.name == 'p2'][0] assert p2.depends_on == '' assert p2.depends_on_ == [] assert p2.inferred_from == 'p0' assert p2.inferred_from_ == ['p0'] assert p2.label == "Parameter 2" assert p2.unit == "unit 2" p3 = [p for p in idp.paramspecs if p.name == 'p3'][0] assert p3.depends_on == '' assert p3.depends_on_ == [] assert p3.inferred_from == 'p1, p0' assert p3.inferred_from_ == ['p1', 'p0'] assert p3.label == "Parameter 3" assert p3.unit == "unit 3" p4 = [p for p in idp.paramspecs if p.name == 'p4'][0] assert p4.depends_on == 'p2, p3' assert p4.depends_on_ == ['p2', 'p3'] assert p4.inferred_from == '' assert p4.inferred_from_ == [] assert p4.label == "Parameter 4" assert p4.unit == "unit 4" p5 = [p for p in idp.paramspecs if p.name == 'p5'][0] assert p5.depends_on == '' assert p5.depends_on_ == [] assert p5.inferred_from == 'p0' assert p5.inferred_from_ == ['p0'] assert p5.label == "Parameter 5" assert p5.unit == "unit 5"
def test_old_to_new_and_back(some_paramspecs): idps_old = InterDependencies(*some_paramspecs[1].values()) idps_new = old_to_new(idps_old) assert new_to_old(idps_new) == idps_old
def upgrade_3_to_4(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 3 to version 4. This really repeats the version 3 upgrade as it originally had two bugs in the inferred annotation. inferred_from was passed incorrectly resulting in the parameter being marked inferred_from for each char in the inferred_from variable and inferred_from was not handled correctly for parameters that were neither dependencies nor dependent on other parameters. Both have since been fixed so rerun the upgrade. """ no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)') no_of_runs = no_of_runs or 0 # If one run fails, we want the whole upgrade to roll back, hence the # entire upgrade is one atomic transaction with atomic(conn) as conn: result_tables = _2to3_get_result_tables(conn) layout_ids_all = _2to3_get_layout_ids(conn) indeps_all = _2to3_get_indeps(conn) deps_all = _2to3_get_deps(conn) layouts = _2to3_get_layouts(conn) dependencies = _2to3_get_dependencies(conn) pbar = tqdm(range(1, no_of_runs + 1), file=sys.stdout) pbar.set_description("Upgrading database; v3 -> v4") for run_id in pbar: if run_id in layout_ids_all: result_table_name = result_tables[run_id] layout_ids = list(layout_ids_all[run_id]) if run_id in indeps_all: independents = tuple(indeps_all[run_id]) else: independents = () if run_id in deps_all: dependents = tuple(deps_all[run_id]) else: dependents = () paramspecs = _2to3_get_paramspecs(conn, layout_ids, layouts, dependencies, dependents, independents, result_table_name) interdeps = InterDependencies(*paramspecs.values()) desc_dict = {'interdependencies': interdeps._to_dict()} json_str = json.dumps(desc_dict) else: desc_dict = { 'interdependencies': InterDependencies()._to_dict() } json_str = json.dumps(desc_dict) sql = f""" UPDATE runs SET run_description = ? WHERE run_id == ? """ cur = conn.cursor() cur.execute(sql, (json_str, run_id)) log.debug(f"Upgrade in transition, run number {run_id}: OK")
def __init__(self, path_to_db: str = None, run_id: Optional[int] = None, conn: Optional[ConnectionPlus] = None, exp_id=None, name: str = None, specs: Optional[SpecsOrInterDeps] = None, values=None, metadata=None) -> None: """ Create a new DataSet object. The object can either hold a new run or an already existing run. If a run_id is provided, then an old run is looked up, else a new run is created. Args: path_to_db: path to the sqlite file on disk. If not provided, the path will be read from the config. run_id: provide this when loading an existing run, leave it as None when creating a new run conn: connection to the DB; if provided and `path_to_db` is provided as well, then a ValueError is raised (this is to prevent the possibility of providing a connection to a DB file that is different from `path_to_db`) exp_id: the id of the experiment in which to create a new run. Ignored if run_id is provided. name: the name of the dataset. Ignored if run_id is provided. specs: paramspecs belonging to the dataset. Ignored if run_id is provided. values: values to insert into the dataset. Ignored if run_id is provided. metadata: metadata to insert into the dataset. Ignored if run_id is provided. """ self.conn = conn_from_dbpath_or_conn(conn, path_to_db) self._run_id = run_id self._debug = False self.subscribers: Dict[str, _Subscriber] = {} self._interdeps: InterDependencies_ if run_id is not None: if not run_exists(self.conn, run_id): raise ValueError(f"Run with run_id {run_id} does not exist in " f"the database") self._completed = completed(self.conn, self.run_id) run_desc = self._get_run_description_from_db() self._interdeps = run_desc.interdeps self._metadata = get_metadata_from_run_id(self.conn, run_id) self._started = self.run_timestamp_raw is not None else: # Actually perform all the side effects needed for the creation # of a new dataset. Note that a dataset is created (in the DB) # with no parameters; they are written to disk when the dataset # is marked as started if exp_id is None: if len(get_experiments(self.conn)) > 0: exp_id = get_last_experiment(self.conn) else: raise ValueError("No experiments found." "You can start a new one with:" " new_experiment(name, sample_name)") name = name or "dataset" _, run_id, __ = create_run(self.conn, exp_id, name, generate_guid(), parameters=None, values=values, metadata=metadata) # this is really the UUID (an ever increasing count in the db) self._run_id = run_id self._completed = False self._started = False if isinstance(specs, InterDependencies_): self._interdeps = specs elif specs is not None: self._interdeps = old_to_new(InterDependencies(*specs)) else: self._interdeps = InterDependencies_() self._metadata = get_metadata_from_run_id(self.conn, self.run_id)
def _insert_run( conn: ConnectionPlus, exp_id: int, name: str, guid: str, parameters: Optional[List[ParamSpec]] = None, ): # get run counter and formatter from experiments run_counter, format_string = select_many_where(conn, "experiments", "run_counter", "format_string", where_column="exp_id", where_value=exp_id) run_counter += 1 formatted_name = format_table_name(format_string, name, exp_id, run_counter) table = "runs" parameters = parameters or [] run_desc = RunDescriber(old_to_new(InterDependencies(*parameters))) desc_str = serial.to_json_for_storage(run_desc) with atomic(conn) as conn: if parameters: query = f""" INSERT INTO {table} (name, exp_id, guid, result_table_name, result_counter, run_timestamp, parameters, is_completed, run_description) VALUES (?,?,?,?,?,?,?,?,?) """ curr = transaction(conn, query, name, exp_id, guid, formatted_name, run_counter, None, ",".join([p.name for p in parameters]), False, desc_str) _add_parameters_to_layout_and_deps(conn, formatted_name, *parameters) else: query = f""" INSERT INTO {table} (name, exp_id, guid, result_table_name, result_counter, run_timestamp, is_completed, run_description) VALUES (?,?,?,?,?,?,?,?) """ curr = transaction(conn, query, name, exp_id, guid, formatted_name, run_counter, None, False, desc_str) run_id = curr.lastrowid return run_counter, formatted_name, run_id