def test_perform_actual_upgrade_1_to_2(): v1fixpath = os.path.join(fixturepath, 'db_files', 'version1') dbname_old = os.path.join(v1fixpath, 'empty.db') if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the legacy_DB_generation folder") with temporarily_copied_DB(dbname_old, debug=False, version=1) as conn: assert get_user_version(conn) == 1 guid_table_query = "SELECT guid FROM runs" c = atomic_transaction(conn, guid_table_query) assert len(c.fetchall()) == 0 index_query = "PRAGMA index_list(runs)" c = atomic_transaction(conn, index_query) assert len(c.fetchall()) == 0 perform_db_upgrade_1_to_2(conn) c = atomic_transaction(conn, index_query) assert len(c.fetchall()) == 2
def test_perform_actual_upgrade_0_to_1(): # we cannot use the empty_temp_db, since that has already called connect # and is therefore latest version already v0fixpath = os.path.join(fixturepath, 'db_files', 'version0') dbname_old = os.path.join(v0fixpath, 'empty.db') if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the " "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=0) as conn: assert get_user_version(conn) == 0 guid_table_query = "SELECT guid FROM runs" with pytest.raises(RuntimeError) as excinfo: atomic_transaction(conn, guid_table_query) assert error_caused_by(excinfo, 'no such column: guid') perform_db_upgrade_0_to_1(conn) assert get_user_version(conn) == 1 c = atomic_transaction(conn, guid_table_query) assert len(c.fetchall()) == 0
def test_perform_actual_upgrade_2_to_3_empty(): v2fixpath = os.path.join(fixturepath, 'db_files', 'version2') dbname_old = os.path.join(v2fixpath, 'empty.db') if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the " "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=2) as conn: assert get_user_version(conn) == 2 desc_query = 'SELECT run_description FROM runs' with pytest.raises(RuntimeError) as excinfo: atomic_transaction(conn, desc_query) assert error_caused_by(excinfo, 'no such column: run_description') perform_db_upgrade_2_to_3(conn) assert get_user_version(conn) == 3 c = atomic_transaction(conn, desc_query) assert len(c.fetchall()) == 0
def test_atomic_transaction_raises(experiment): conn = experiment.conn bad_sql = '""' with pytest.raises(RuntimeError): mut_conn.atomic_transaction(conn, bad_sql)
def _update_experiment_run_counter(conn: ConnectionPlus, exp_id: int, run_counter: int) -> None: query = """ UPDATE experiments SET run_counter = ? WHERE exp_id = ? """ atomic_transaction(conn, query, run_counter, exp_id)
def test_atomic_transaction_on_sqlite3_connection_raises(tmp_path): """Test that atomic_transaction does not work for sqlite3.Connection""" dbfile = str(tmp_path / 'temp.db') conn = sqlite3.connect(dbfile) match_str = re.escape('atomic context manager only accepts ConnectionPlus ' 'database connection objects.') with pytest.raises(ValueError, match=match_str): atomic_transaction(conn, 'whatever sql query')
def finish_experiment(conn: ConnectionPlus, exp_id: int): """ Finish experiment Args: conn: database connection name: the name of the experiment """ query = """ UPDATE experiments SET end_time=? WHERE exp_id=?; """ atomic_transaction(conn, query, time.time(), exp_id)
def update_where(conn: ConnectionPlus, table: str, where_column: str, where_value: Any, **updates: Any) -> None: _updates, values = _massage_dict(updates) query = f""" UPDATE '{table}' SET {_updates} WHERE {where_column} = ? """ atomic_transaction(conn, query, *values, where_value)
def test_perform_actual_upgrade_6_to_7(): fixpath = os.path.join(fixturepath, 'db_files', 'version6') db_file = 'some_runs.db' dbname_old = os.path.join(fixpath, db_file) if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the " "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=6) as conn: assert isinstance(conn, ConnectionPlus) perform_db_upgrade_6_to_7(conn) assert get_user_version(conn) == 7 no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)') assert no_of_runs == 10 columns = atomic_transaction(conn, "PRAGMA table_info(runs)").fetchall() col_names = [col['name'] for col in columns] assert 'captured_run_id' in col_names assert 'captured_counter' in col_names for run_id in range(1, no_of_runs + 1): ds1 = load_by_id(run_id, conn) ds2 = load_by_run_spec(captured_run_id=run_id, conn=conn) assert ds1.the_same_dataset_as(ds2) assert ds1.run_id == run_id assert ds1.run_id == ds1.captured_run_id assert ds2.run_id == run_id assert ds2.run_id == ds2.captured_run_id exp_id = 1 for counter in range(1, no_of_runs + 1): ds1 = load_by_counter(counter, exp_id, conn) ds2 = load_by_run_spec(captured_counter=counter, conn=conn) assert ds1.the_same_dataset_as(ds2) assert ds1.counter == counter assert ds1.counter == ds1.captured_counter assert ds2.counter == counter assert ds2.counter == ds2.captured_counter
def __init__( self, dataSet: 'DataSet', id_: str, callback: Callable[..., None], state: Optional[Any] = None, loop_sleep_time: int = 0, # in milliseconds min_queue_length: int = 1, callback_kwargs: Optional[Dict[str, Any]] = None) -> None: super().__init__() self._id = id_ self.dataSet = dataSet self.table_name = dataSet.table_name self._data_set_len = len(dataSet) self.state = state self.data_queue: Queue = Queue() self._queue_length: int = 0 self._stop_signal: bool = False # convert milliseconds to seconds self._loop_sleep_time = loop_sleep_time / 1000 self.min_queue_length = min_queue_length if callback_kwargs is None or len(callback_kwargs) == 0: self.callback = callback else: self.callback = functools.partial(callback, **callback_kwargs) self.callback_id = f"callback{self._id}" self.trigger_id = f"sub{self._id}" conn = dataSet.conn conn.create_function(self.callback_id, -1, self._cache_data_to_queue) parameters = dataSet.get_parameters() sql_param_list = ",".join([f"NEW.{p.name}" for p in parameters]) sql_create_trigger_for_callback = f""" CREATE TRIGGER {self.trigger_id} AFTER INSERT ON '{self.table_name}' BEGIN SELECT {self.callback_id}({sql_param_list}); END;""" atomic_transaction(conn, sql_create_trigger_for_callback) self.log = logging.getLogger(f"_Subscriber {self._id}")
def test_atomic_transaction(tmp_path): """Test that atomic_transaction works for ConnectionPlus""" dbfile = str(tmp_path / 'temp.db') conn = ConnectionPlus(sqlite3.connect(dbfile)) ctrl_conn = sqlite3.connect(dbfile) sql_create_table = 'CREATE TABLE smth (name TEXT)' sql_table_exists = 'SELECT sql FROM sqlite_master WHERE TYPE = "table"' atomic_transaction(conn, sql_create_table) assert sql_create_table in ctrl_conn.execute( sql_table_exists).fetchall()[0]
def test_perform_actual_upgrade_5_to_6(): fixpath = os.path.join(fixturepath, 'db_files', 'version5') db_file = 'empty.db' dbname_old = os.path.join(fixpath, db_file) if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the " "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=5) as conn: perform_db_upgrade_5_to_6(conn) assert get_user_version(conn) == 6 db_file = 'some_runs.db' dbname_old = os.path.join(fixpath, db_file) with temporarily_copied_DB(dbname_old, debug=False, version=5) as conn: perform_db_upgrade_5_to_6(conn) assert get_user_version(conn) == 6 no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one( atomic_transaction(conn, no_of_runs_query), 'max(run_id)') assert no_of_runs == 10 for run_id in range(1, no_of_runs + 1): json_str = get_run_description(conn, run_id) deser = json.loads(json_str) assert deser['version'] == 0 desc = serial.from_json_to_current(json_str) assert desc._version == 1
def fix_version_4a_run_description_bug(conn: ConnectionPlus) -> Dict[str, int]: """ Fix function to fix a bug where the RunDescriber accidentally wrote itself to string using the (new) InterDependencies_ object instead of the (old) InterDependencies object. After the first call, this function should be idempotent. Args: conn: the connection to the database Returns: A dict with the fix results ('runs_inspected', 'runs_fixed') """ user_version = get_user_version(conn) if not user_version == 4: raise RuntimeError('Database of wrong version. Will not apply fix. ' 'Expected version 4, found version {user_version}') no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)') no_of_runs = no_of_runs or 0 with atomic(conn) as conn: pbar = tqdm(range(1, no_of_runs+1)) pbar.set_description("Fixing database") # collect some metrics runs_inspected = 0 runs_fixed = 0 old_style_keys = ['paramspecs'] new_style_keys = ['parameters', 'dependencies', 'inferences', 'standalones'] for run_id in pbar: desc_str = get_run_description(conn, run_id) desc_ser = json.loads(desc_str) idps_ser = desc_ser['interdependencies'] if list(idps_ser.keys()) == old_style_keys: pass elif list(idps_ser.keys()) == new_style_keys: old_desc_ser = \ _convert_run_describer_v1_like_dict_to_v0_like_dict( desc_ser) json_str = json.dumps(old_desc_ser) _update_run_description(conn, run_id, json_str) runs_fixed += 1 else: raise RuntimeError(f'Invalid runs_description for run_id: ' f'{run_id}') runs_inspected += 1 return {'runs_inspected': runs_inspected, 'runs_fixed': runs_fixed}
def get_layout_id(conn: ConnectionPlus, parameter: Union[ParamSpec, str], run_id: int) -> int: """ Get the layout id of a parameter in a given run Args: conn: The database connection parameter: A ParamSpec or the name of the parameter run_id: The run_id of the run in question """ # get the parameter layout id sql = """ SELECT layout_id FROM layouts WHERE parameter = ? and run_id = ? """ if isinstance(parameter, ParamSpec): name = parameter.name elif isinstance(parameter, str): name = parameter else: raise ValueError('Wrong parameter type, must be ParamSpec or str, ' f'received {type(parameter)}.') c = atomic_transaction(conn, sql, name, run_id) res = one(c, 'layout_id') return res
def perform_db_upgrade_6_to_7(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 6 to version 7 Add a captured_run_id and captured_counter column to the runs table and assign the value from the run_id and result_counter to these columns. """ sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='runs'" cur = atomic_transaction(conn, sql) n_run_tables = len(cur.fetchall()) if n_run_tables == 1: with atomic(conn) as conn: sql = "ALTER TABLE runs ADD COLUMN captured_run_id" transaction(conn, sql) sql = "ALTER TABLE runs ADD COLUMN captured_counter" transaction(conn, sql) sql = f""" UPDATE runs SET captured_run_id = run_id, captured_counter = result_counter """ transaction(conn, sql) else: raise RuntimeError(f"found {n_run_tables} runs tables expected 1")
def perform_db_upgrade_1_to_2(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 1 to version 2 Add two indeces on the runs table, one for exp_id and one for GUID """ sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='runs'" cur = atomic_transaction(conn, sql) n_run_tables = len(cur.fetchall()) if n_run_tables == 1: _IX_runs_exp_id = """ CREATE INDEX IF NOT EXISTS IX_runs_exp_id ON runs (exp_id DESC) """ _IX_runs_guid = """ CREATE INDEX IF NOT EXISTS IX_runs_guid ON runs (guid DESC) """ with atomic(conn) as conn: transaction(conn, _IX_runs_exp_id) transaction(conn, _IX_runs_guid) else: raise RuntimeError(f"found {n_run_tables} runs tables expected 1")
def get_data( conn: ConnectionPlus, table_name: str, columns: List[str], start: Optional[int] = None, end: Optional[int] = None, ) -> List[List[Any]]: """ Get data from the columns of a table. Allows to specify a range of rows (1-based indexing, both ends are included). Args: conn: database connection table_name: name of the table columns: list of columns start: start of range; if None, then starts from the top of the table end: end of range; if None, then ends at the bottom of the table Returns: the data requested in the format of list of rows of values """ if len(columns) == 0: warnings.warn( 'get_data: requested data without specifying parameters/columns.' 'Returning empty list.') return [[]] query = _build_data_query(table_name, columns, start, end) c = atomic_transaction(conn, query) res = many_many(c, *columns) return res
def perform_db_upgrade_6_to_7(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 6 to version 7 Add a captured_run_id and captured_counter column to the runs table and assign the value from the run_id and result_counter to these columns. """ sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='runs'" cur = atomic_transaction(conn, sql) n_run_tables = len(cur.fetchall()) if n_run_tables == 1: pbar = tqdm(range(1), file=sys.stdout) pbar.set_description("Upgrading database; v6 -> v7") # iterate through the pbar for the sake of the side effect; it # prints that the database is being upgraded for _ in pbar: with atomic(conn) as conn: sql = "ALTER TABLE runs ADD COLUMN captured_run_id" transaction(conn, sql) sql = "ALTER TABLE runs ADD COLUMN captured_counter" transaction(conn, sql) sql = f""" UPDATE runs SET captured_run_id = run_id, captured_counter = result_counter """ transaction(conn, sql) else: raise RuntimeError(f"found {n_run_tables} runs tables expected 1")
def upgrade_5_to_6(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 5 to version 6. The upgrade ensures that the runs_description has a top-level entry called 'version'. Note that version changes of the runs_description will not be tracked as schema upgrades. """ no_of_runs_query = "SELECT max(run_id) FROM runs" no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)') no_of_runs = no_of_runs or 0 # If one run fails, we want the whole upgrade to roll back, hence the # entire upgrade is one atomic transaction with atomic(conn) as conn: pbar = tqdm(range(1, no_of_runs + 1)) pbar.set_description("Upgrading database, version 5 -> 6") empty_idps_ser = InterDependencies()._to_dict() for run_id in pbar: json_str = get_run_description(conn, run_id) if json_str is None: new_json = json.dumps({ 'version': 0, 'interdependencies': empty_idps_ser }) else: ser = json.loads(json_str) new_ser = {'version': 0} # let 'version' be the first entry new_ser['interdependencies'] = ser['interdependencies'] new_json = json.dumps(new_ser) update_run_description(conn, run_id, new_json)
def select_one_where(conn: ConnectionPlus, table: str, column: str, where_column: str, where_value: VALUE) -> VALUE: """ Select a value from a given column given a match of a value in a different column. If the given matched row/column intersect is empty None will be returned. Args: conn: Connection to the db table: Table to look for values in column: Column to return value from where_column: Column to match on where_value: Value to match in where_column Returns: Value found raises: RuntimeError if not exactly match is found. """ query = f""" SELECT {column} FROM {table} WHERE {where_column} = ? """ cur = atomic_transaction(conn, query, where_value) res = one(cur, column) return res
def perform_db_upgrade_1_to_2(conn: ConnectionPlus) -> None: """ Perform the upgrade from version 1 to version 2 Add two indeces on the runs table, one for exp_id and one for GUID """ sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='runs'" cur = atomic_transaction(conn, sql) n_run_tables = len(cur.fetchall()) pbar = tqdm(range(1), file=sys.stdout) pbar.set_description("Upgrading database; v1 -> v2") if n_run_tables == 1: _IX_runs_exp_id = """ CREATE INDEX IF NOT EXISTS IX_runs_exp_id ON runs (exp_id DESC) """ _IX_runs_guid = """ CREATE INDEX IF NOT EXISTS IX_runs_guid ON runs (guid DESC) """ with atomic(conn) as conn: # iterate through the pbar for the sake of the side effect; it # prints that the database is being upgraded for _ in pbar: transaction(conn, _IX_runs_exp_id) transaction(conn, _IX_runs_guid) else: raise RuntimeError(f"found {n_run_tables} runs tables expected 1")
def modify_values( conn: ConnectionPlus, formatted_name: str, index: int, columns: List[str], values: VALUES, ) -> int: """ Modify values for the specified columns. If a column is in the table but not in the columns list is left untouched. If a column is mapped to None, it will be a null value. """ name_val_template = [] for name in columns: name_val_template.append(f"{name}=?") name_val_templates = ",".join(name_val_template) query = f""" UPDATE "{formatted_name}" SET {name_val_templates} WHERE rowid = {index+1} """ c = atomic_transaction(conn, query, *values) return c.rowcount
def insert_column(conn: ConnectionPlus, table: str, name: str, paramtype: Optional[str] = None) -> None: """Insert new column to a table Args: conn: database connection table: destination for the insertion name: column name type: sqlite type of the column """ # first check that the column is not already there # and do nothing if it is query = f'PRAGMA TABLE_INFO("{table}");' cur = atomic_transaction(conn, query) columns = many_many(cur, "name") if name in [col[0] for col in columns]: return with atomic(conn) as conn: if paramtype: transaction( conn, f'ALTER TABLE "{table}" ADD COLUMN "{name}" ' f'{paramtype}') else: transaction(conn, f'ALTER TABLE "{table}" ADD COLUMN "{name}"')
def mark_run_complete(conn: ConnectionPlus, run_id: int): """ Mark run complete Args: conn: database connection run_id: id of the run to mark complete complete: wether the run is completed or not """ query = """ UPDATE runs SET completed_timestamp=?, is_completed=? WHERE run_id=?; """ atomic_transaction(conn, query, time.time(), True, run_id)
def get_last_experiment(conn: ConnectionPlus) -> Optional[int]: """ Return last started experiment id Returns None if there are no experiments in the database """ query = "SELECT MAX(exp_id) FROM experiments" c = atomic_transaction(conn, query) return c.fetchall()[0][0]
def get_dependents(conn: ConnectionPlus, run_id: int) -> List[int]: """ Get dependent layout_ids for a certain run_id, i.e. the layout_ids of all the dependent variables """ sql = """ SELECT layout_id FROM layouts WHERE run_id=? and layout_id in (SELECT dependent FROM dependencies) """ c = atomic_transaction(conn, sql, run_id) res = [d[0] for d in many_many(c, 'layout_id')] return res
def select_one_where(conn: ConnectionPlus, table: str, column: str, where_column: str, where_value: Any) -> Any: query = f""" SELECT {column} FROM {table} WHERE {where_column} = ? """ cur = atomic_transaction(conn, query, where_value) res = one(cur, column) return res
def select_many_where(conn: ConnectionPlus, table: str, *columns: str, where_column: str, where_value: Any) -> Any: _columns = ",".join(columns) query = f""" SELECT {_columns} FROM {table} WHERE {where_column} = ? """ cur = atomic_transaction(conn, query, where_value) res = many(cur, *columns) return res
def test_perform_actual_upgrade_8_to_9(db_file): v8fixpath = os.path.join(fixturepath, 'db_files', 'version8') db_file += '.db' dbname_old = os.path.join(v8fixpath, db_file) if not os.path.exists(dbname_old): pytest.skip("No db-file fixtures found. You can generate test db-files" " using the scripts in the " "https://github.com/QCoDeS/qcodes_generate_test_db/ repo") with temporarily_copied_DB(dbname_old, debug=False, version=8) as conn: index_query = "PRAGMA index_list(runs)" c = atomic_transaction(conn, index_query) assert len(c.fetchall()) == 2 perform_db_upgrade_8_to_9(conn) c = atomic_transaction(conn, index_query) assert len(c.fetchall()) == 3
def unsubscribe_all(self): """ Remove all subscribers """ sql = "select * from sqlite_master where type = 'trigger';" triggers = atomic_transaction(self.conn, sql).fetchall() with atomic(self.conn) as conn: for trigger in triggers: remove_trigger(conn, trigger['name']) for sub in self.subscribers.values(): sub.schedule_stop() sub.join() self.subscribers.clear()