Esempio n. 1
0
def test_perform_actual_upgrade_0_to_1():
    # we cannot use the empty_temp_db, since that has already called connect
    # and is therefore latest version already

    v0fixpath = os.path.join(fixturepath, 'db_files', 'version0')

    dbname_old = os.path.join(v0fixpath, 'empty.db')

    if not os.path.exists(dbname_old):
        pytest.skip("No db-file fixtures found. You can generate test db-files"
                    " using the scripts in the "
                    "https://github.com/QCoDeS/qcodes_generate_test_db/ repo")

    with temporarily_copied_DB(dbname_old, debug=False, version=0) as conn:

        assert get_user_version(conn) == 0

        guid_table_query = "SELECT guid FROM runs"

        with pytest.raises(RuntimeError) as excinfo:
            atomic_transaction(conn, guid_table_query)

        assert error_caused_by(excinfo, 'no such column: guid')

        perform_db_upgrade_0_to_1(conn)
        assert get_user_version(conn) == 1

        c = atomic_transaction(conn, guid_table_query)
        assert len(c.fetchall()) == 0
def test_perform_actual_upgrade_2_to_3_empty():

    v2fixpath = os.path.join(fixturepath, 'db_files', 'version2')

    dbname_old = os.path.join(v2fixpath, 'empty.db')

    if not os.path.exists(dbname_old):
        pytest.skip("No db-file fixtures found. You can generate test db-files"
                    " using the scripts in the legacy_DB_generation folder")

    with temporarily_copied_DB(dbname_old, debug=False, version=2) as conn:

        assert get_user_version(conn) == 2

        desc_query = 'SELECT run_description FROM runs'

        with pytest.raises(RuntimeError) as excinfo:
            atomic_transaction(conn, desc_query)

        assert error_caused_by(excinfo, 'no such column: run_description')

        perform_db_upgrade_2_to_3(conn)

        assert get_user_version(conn) == 3

        c = atomic_transaction(conn, desc_query)
        assert len(c.fetchall()) == 0
Esempio n. 3
0
def test_perform_actual_upgrade_0_to_1():
    # we cannot use the empty_temp_db, since that has already called connect
    # and is therefore latest version already

    v0fixpath = os.path.join(fixturepath, 'db_files', 'version0')

    if not os.path.exists(v0fixpath):
        pytest.skip("No db-file fixtures found. You can generate test db-files"
                    " using the scripts in the legacy_DB_generation folder")

    dbname_old = os.path.join(v0fixpath, 'empty.db')

    with temporarily_copied_DB(dbname_old, debug=False, version=0) as conn:

        assert get_user_version(conn) == 0

        guid_table_query = "SELECT guid FROM runs"

        with pytest.raises(RuntimeError):
            atomic_transaction(conn, guid_table_query)

        perform_db_upgrade_0_to_1(conn)
        assert get_user_version(conn) == 1

        c = atomic_transaction(conn, guid_table_query)
        assert len(c.fetchall()) == 0
Esempio n. 4
0
def test_perform_actual_upgrade_1_to_2():

    v1fixpath = os.path.join(fixturepath, 'db_files', 'version1')

    if not os.path.exists(v1fixpath):
        pytest.skip("No db-file fixtures found. You can generate test db-files"
                    " using the scripts in the legacy_DB_generation folder")

    dbname_old = os.path.join(v1fixpath, 'empty.db')

    with temporarily_copied_DB(dbname_old, debug=False, version=1) as conn:

        assert get_user_version(conn) == 1

        guid_table_query = "SELECT guid FROM runs"

        c = atomic_transaction(conn, guid_table_query)
        assert len(c.fetchall()) == 0

        index_query = "PRAGMA index_list(runs)"

        c = atomic_transaction(conn, index_query)
        assert len(c.fetchall()) == 0

        perform_db_upgrade_1_to_2(conn)

        c = atomic_transaction(conn, index_query)
        assert len(c.fetchall()) == 2
Esempio n. 5
0
def test_atomic_transaction_raises(experiment):
    conn = experiment.conn

    bad_sql = '""'

    with pytest.raises(RuntimeError):
        mut.atomic_transaction(conn, bad_sql)
Esempio n. 6
0
def test_atomic_transaction_on_sqlite3_connection_raises(tmp_path):
    """Test that atomic_transaction does not work for sqlite3.Connection"""
    dbfile = str(tmp_path / 'temp.db')

    conn = sqlite3.connect(dbfile)

    match_str = re.escape('atomic context manager only accepts ConnectionPlus '
                          'database connection objects.')

    with pytest.raises(ValueError, match=match_str):
        atomic_transaction(conn, 'whatever sql query')
Esempio n. 7
0
def test_database_upgrade(empty_temp_db):
    connection = connect(qc.config["core"]["db_location"],
                 qc.config["core"]["db_debug"])
    userversion = get_user_version(connection)
    if userversion != 0:
        raise RuntimeError("trying to upgrade from version 0"
                           " but your database is version"
                           " {}".format(userversion))
    sql = 'ALTER TABLE "runs" ADD COLUMN "quality"'

    atomic_transaction(connection, sql)
    set_user_version(connection, 1)
def test_perform_actual_upgrade_2_to_3_some_runs():

    v2fixpath = os.path.join(fixturepath, 'db_files', 'version2')

    dbname_old = os.path.join(v2fixpath, 'some_runs.db')

    if not os.path.exists(dbname_old):
        pytest.skip("No db-file fixtures found. You can generate test db-files"
                    " using the scripts in the legacy_DB_generation folder")

    with temporarily_copied_DB(dbname_old, debug=False, version=2) as conn:

        assert get_user_version(conn) == 2

        perform_db_upgrade_2_to_3(conn)

        desc_query = 'SELECT run_description FROM runs'

        c = atomic_transaction(conn, desc_query)
        assert len(c.fetchall()) == 10

        # retrieve the json string and recreate the object

        sql = f"""
              SELECT run_description
              FROM runs
              WHERE run_id == 1
              """
        c = atomic_transaction(conn, sql)
        json_str = one(c, 'run_description')

        desc = RunDescriber.from_json(json_str)
        idp = desc.interdeps
        assert isinstance(idp, InterDependencies)

        # here we verify that the dependencies encoded in
        # tests/dataset/legacy_DB_generation/generate_version_2.py
        # are recovered

        p0 = [p for p in idp.paramspecs if p.name == 'p0'][0]
        assert p0.depends_on == ''
        assert p0.inferred_from == ''
        assert p0.label == "Parameter 0"
        assert p0.unit == "unit 0"

        p4 = [p for p in idp.paramspecs if p.name == 'p4'][0]
        assert p4.depends_on == 'p2, p3'
        assert p4.inferred_from == ''
        assert p4.label == "Parameter 4"
        assert p4.unit == "unit 4"
Esempio n. 9
0
    def __init__(self,
                 dataSet: 'DataSet',
                 id_: str,
                 callback: Callable[..., None],
                 state: Optional[Any] = None,
                 loop_sleep_time: int = 0,  # in milliseconds
                 min_queue_length: int = 1,
                 callback_kwargs: Optional[Dict[str, Any]] = None
                 ) -> None:
        super().__init__()

        self._id = id_

        self.dataSet = dataSet
        self.table_name = dataSet.table_name
        self._data_set_len = len(dataSet)

        self.state = state

        self.data_queue: Queue = Queue()
        self._queue_length: int = 0
        self._stop_signal: bool = False
        # convert milliseconds to seconds
        self._loop_sleep_time = loop_sleep_time / 1000
        self.min_queue_length = min_queue_length

        if callback_kwargs is None or len(callback_kwargs) == 0:
            self.callback = callback
        else:
            self.callback = functools.partial(callback, **callback_kwargs)

        self.callback_id = f"callback{self._id}"
        self.trigger_id = f"sub{self._id}"

        conn = dataSet.conn

        conn.create_function(self.callback_id, -1, self._cache_data_to_queue)

        parameters = dataSet.get_parameters()
        sql_param_list = ",".join([f"NEW.{p.name}" for p in parameters])
        sql_create_trigger_for_callback = f"""
        CREATE TRIGGER {self.trigger_id}
            AFTER INSERT ON '{self.table_name}'
        BEGIN
            SELECT {self.callback_id}({sql_param_list});
        END;"""
        atomic_transaction(conn, sql_create_trigger_for_callback)

        self.log = logging.getLogger(f"_Subscriber {self._id}")
Esempio n. 10
0
def test_atomic_transaction(tmp_path):
    """Test that atomic_transaction works for ConnectionPlus"""
    dbfile = str(tmp_path / 'temp.db')

    conn = ConnectionPlus(sqlite3.connect(dbfile))

    ctrl_conn = sqlite3.connect(dbfile)

    sql_create_table = 'CREATE TABLE smth (name TEXT)'
    sql_table_exists = 'SELECT sql FROM sqlite_master WHERE TYPE = "table"'

    atomic_transaction(conn, sql_create_table)

    assert sql_create_table in ctrl_conn.execute(
        sql_table_exists).fetchall()[0]
Esempio n. 11
0
def test_perform_actual_upgrade_0_to_1():
    # we cannot use the empty_temp_db, since that has already called connect
    # and is therefore latest version already
    connection = connect(':memory:', debug=False, version=0)

    assert get_user_version(connection) == 0

    guid_table_query = "SELECT guid FROM runs"

    with pytest.raises(RuntimeError):
        atomic_transaction(connection, guid_table_query)

    perform_db_upgrade_0_to_1(connection)
    assert get_user_version(connection) == 1

    c = atomic_transaction(connection, guid_table_query)
    assert len(c.fetchall()) == 0
Esempio n. 12
0
    def __init__(self,
                 dataSet,
                 sub_id: str,
                 callback: Callable[..., None],
                 state: Optional[Any] = None,
                 min_wait: int = 100,
                 min_count: int = 1,
                 callback_kwargs: Optional[Dict[str, Any]] = None) -> None:
        self.sub_id = sub_id
        # whether or not this is actually thread safe I am not sure :P
        self.dataSet = dataSet
        self.table_name = dataSet.table_name
        self.conn = dataSet.conn
        self.log = logging.getLogger(f"Subscriber {self.sub_id}")

        self.state = state
        self.min_wait = min_wait
        self.min_count = min_count
        self._send_queue: int = 0
        if callback_kwargs is None:
            self.callback = callback
        else:
            self.callback = functools.partial(callback, **callback_kwargs)
        self._stop_signal: bool = False

        parameters = dataSet.get_parameters()
        param_sql = ",".join([f"NEW.{p.name}" for p in parameters])
        self.callbackid = f"callback{self.sub_id}"

        self.conn.create_function(self.callbackid, -1, self.cache)
        sql = f"""
        CREATE TRIGGER sub{self.sub_id}
            AFTER INSERT ON '{self.table_name}'
        BEGIN
            SELECT {self.callbackid}({param_sql});
        END;"""
        atomic_transaction(self.conn, sql)
        self.data: Queue = Queue()
        self._data_set_len = len(dataSet)
        super().__init__()
Esempio n. 13
0
 def unsubscribe_all(self):
     """
     Remove all subscribers
     """
     sql = "select * from sqlite_master where type = 'trigger';"
     triggers = atomic_transaction(self.conn, sql).fetchall()
     with atomic(self.conn) as self.conn:
         for trigger in triggers:
             self._remove_trigger(trigger['name'])
         for sub in self.subscribers.values():
             sub.schedule_stop()
             sub.join()
         self.subscribers.clear()
Esempio n. 14
0
def fix_version_4a_run_description_bug(conn: ConnectionPlus) -> Dict[str, int]:
    """
    Fix function to fix a bug where the RunDescriber accidentally wrote itself
    to string using the (new) InterDependencies_ object instead of the (old)
    InterDependencies object. After the first run, this function should be
    idempotent.


    Args:
        conn: the connection to the database

    Returns:
        A dict with the fix results ('runs_inspected', 'runs_fixed')
    """

    user_version = get_user_version(conn)

    if not user_version == 4:
        raise RuntimeError('Database of wrong version. Will not apply fix. '
                           'Expected version 4, found version {user_version}')

    no_of_runs_query = "SELECT max(run_id) FROM runs"
    no_of_runs = one(atomic_transaction(conn, no_of_runs_query), 'max(run_id)')
    no_of_runs = no_of_runs or 0

    with atomic(conn) as conn:

        pbar = tqdm(range(1, no_of_runs + 1))
        pbar.set_description("Fixing database")

        # collect some metrics
        runs_inspected = 0
        runs_fixed = 0

        for run_id in pbar:

            desc_str = get_run_description(conn, run_id)
            desc_ser = json.loads(desc_str)
            idps_ser = desc_ser['interdependencies']

            if RunDescriber._is_description_old_style(idps_ser):
                pass
            else:
                new_desc = RunDescriber.from_json(desc_str)
                update_run_description(conn, run_id, new_desc.to_json())
                runs_fixed += 1

            runs_inspected += 1

    return {'runs_inspected': runs_inspected, 'runs_fixed': runs_fixed}
Esempio n. 15
0
def test_basic_subscription(dataset, basic_subscriber):
    xparam = ParamSpec(name='x',
                       paramtype='numeric',
                       label='x parameter',
                       unit='V')
    yparam = ParamSpec(name='y',
                       paramtype='numeric',
                       label='y parameter',
                       unit='Hz',
                       depends_on=[xparam])
    dataset.add_parameter(xparam)
    dataset.add_parameter(yparam)

    sub_id = dataset.subscribe(basic_subscriber,
                               min_wait=0,
                               min_count=1,
                               state={})

    assert len(dataset.subscribers) == 1
    assert list(dataset.subscribers.keys()) == [sub_id]

    expected_state = {}

    for x in range(10):
        y = -x**2
        dataset.add_result({'x': x, 'y': y})
        expected_state[x + 1] = [(x, y)]

        @retry_until_does_not_throw(exception_class_to_expect=AssertionError,
                                    delay=0,
                                    tries=10)
        def assert_expected_state():
            assert dataset.subscribers[sub_id].state == expected_state

        assert_expected_state()

    dataset.unsubscribe(sub_id)

    assert len(dataset.subscribers) == 0
    assert list(dataset.subscribers.keys()) == []

    # Ensure the trigger for the subscriber have been removed from the database
    get_triggers_sql = "SELECT * FROM sqlite_master WHERE TYPE = 'trigger';"
    triggers = atomic_transaction(dataset.conn, get_triggers_sql).fetchall()
    assert len(triggers) == 0
def test_subscriptions(experiment, DAC, DMM):
    """
    Test that subscribers are called at the moment the data is flushed to database

    Note that for the purpose of this test, flush_data_to_database method is
    called explicitly instead of waiting for the data to be flushed
    automatically after the write_period passes after a add_result call.
    """
    def collect_all_results(results, length, state):
        """
        Updates the *state* to contain all the *results* acquired
        during the experiment run
        """
        # Due to the fact that by default subscribers only hold 1 data value
        # in their internal queue, this assignment should work (i.e. not
        # overwrite values in the "state" object) assuming that at the start
        # of the experiment both the dataset and the *state* objects have
        # the same length.
        state[length] = results

    def collect_values_larger_than_7(results, length, state):
        """
        Appends to the *state* only the values from *results*
        that are larger than 7
        """
        for result_tuple in results:
            state += [value for value in result_tuple if value > 7]

    meas = Measurement(exp=experiment)
    meas.register_parameter(DAC.ch1)
    meas.register_parameter(DMM.v1, setpoints=(DAC.ch1, ))

    # key is the number of the result tuple,
    # value is the result tuple itself
    all_results_dict = {}
    values_larger_than_7 = []

    meas.add_subscriber(collect_all_results, state=all_results_dict)
    assert len(meas.subscribers) == 1
    meas.add_subscriber(collect_values_larger_than_7,
                        state=values_larger_than_7)
    assert len(meas.subscribers) == 2

    meas.write_period = 0.2

    with meas.run() as datasaver:

        # Assert that the measurement, runner, and datasaver
        # have added subscribers to the dataset
        assert len(datasaver._dataset.subscribers) == 2

        assert all_results_dict == {}
        assert values_larger_than_7 == []

        dac_vals_and_dmm_vals = list(zip(range(5), range(3, 8)))
        values_larger_than_7__expected = []

        for num in range(5):
            (dac_val, dmm_val) = dac_vals_and_dmm_vals[num]
            values_larger_than_7__expected += \
                [val for val in (dac_val, dmm_val) if val > 7]

            datasaver.add_result((DAC.ch1, dac_val), (DMM.v1, dmm_val))

            # Ensure that data is flushed to the database despite the write
            # period, so that the database triggers are executed, which in turn
            # add data to the queues within the subscribers
            datasaver.flush_data_to_database()

            # In order to make this test deterministic, we need to ensure that
            # just enough time has passed between the moment the data is flushed
            # to database and the "state" object (that is passed to subscriber
            # constructor) has been updated by the corresponding subscriber's
            # callback function. At the moment, there is no robust way to ensure
            # this. The reason is that the subscribers have internal queue which
            # is populated via a trigger call from the SQL database, hence from
            # this "main" thread it is difficult to say whether the queue is
            # empty because the subscriber callbacks have already been executed
            # or because the triggers of the SQL database has not been executed
            # yet.
            #
            # In order to overcome this problem, a special decorator is used
            # to wrap the assertions. This is going to ensure that some time
            # is given to the Subscriber threads to finish exhausting the queue.
            @retry_until_does_not_throw(
                exception_class_to_expect=AssertionError, delay=0.5, tries=10)
            def assert_states_updated_from_callbacks():
                assert values_larger_than_7 == values_larger_than_7__expected
                assert list(all_results_dict.keys()) == \
                    [result_index for result_index in range(1, num + 1 + 1)]

            assert_states_updated_from_callbacks()

    # Ensure that after exiting the "run()" context,
    # all subscribers get unsubscribed from the dataset
    assert len(datasaver._dataset.subscribers) == 0

    # Ensure that the triggers for each subscriber
    # have been removed from the database
    get_triggers_sql = "SELECT * FROM sqlite_master WHERE TYPE = 'trigger';"
    triggers = atomic_transaction(datasaver._dataset.conn,
                                  get_triggers_sql).fetchall()
    assert len(triggers) == 0
Esempio n. 17
0
 def number_of_results(self):
     tabnam = self.table_name
     # TODO: is it better/faster to use the max index?
     sql = f'SELECT COUNT(*) FROM "{tabnam}"'
     cursor = atomic_transaction(self.conn, sql)
     return one(cursor, 'COUNT(*)')
Esempio n. 18
0
 def number_of_results(self):
     sql = f'SELECT COUNT(*) FROM "{self.table_name}"'
     cursor = atomic_transaction(self.conn, sql)
     return one(cursor, 'COUNT(*)')
Esempio n. 19
0
def test_perform_upgrade_v3_to_v4():
    """
    Test that a db upgrade from v2 to v4 works correctly.
    """

    v3fixpath = os.path.join(fixturepath, 'db_files', 'version3')

    dbname_old = os.path.join(v3fixpath, 'some_runs_upgraded_2.db')

    if not os.path.exists(dbname_old):
        pytest.skip("No db-file fixtures found. You can generate test db-files"
                    " using the scripts in the "
                    "https://github.com/QCoDeS/qcodes_generate_test_db/ repo")

    with temporarily_copied_DB(dbname_old, debug=False, version=3) as conn:

        assert get_user_version(conn) == 3

        sql = f"""
              SELECT run_description
              FROM runs
              WHERE run_id == 1
              """

        perform_db_upgrade_3_to_4(conn)

        c = atomic_transaction(conn, sql)
        json_str = one(c, 'run_description')

        desc = RunDescriber.from_json(json_str)
        idp = desc.interdeps
        assert isinstance(idp, InterDependencies)

        p0 = [p for p in idp.paramspecs if p.name == 'p0'][0]
        assert p0.depends_on == ''
        assert p0.depends_on_ == []
        assert p0.inferred_from == ''
        assert p0.inferred_from_ == []
        assert p0.label == "Parameter 0"
        assert p0.unit == "unit 0"

        p1 = [p for p in idp.paramspecs if p.name == 'p1'][0]
        assert p1.depends_on == ''
        assert p1.depends_on_ == []
        assert p1.inferred_from == ''
        assert p1.inferred_from_ == []
        assert p1.label == "Parameter 1"
        assert p1.unit == "unit 1"

        p2 = [p for p in idp.paramspecs if p.name == 'p2'][0]
        assert p2.depends_on == ''
        assert p2.depends_on_ == []
        assert p2.inferred_from == 'p0'
        assert p2.inferred_from_ == ['p0']
        assert p2.label == "Parameter 2"
        assert p2.unit == "unit 2"

        p3 = [p for p in idp.paramspecs if p.name == 'p3'][0]
        assert p3.depends_on == ''
        assert p3.depends_on_ == []
        assert p3.inferred_from == 'p1, p0'
        assert p3.inferred_from_ == ['p1', 'p0']
        assert p3.label == "Parameter 3"
        assert p3.unit == "unit 3"

        p4 = [p for p in idp.paramspecs if p.name == 'p4'][0]
        assert p4.depends_on == 'p2, p3'
        assert p4.depends_on_ == ['p2', 'p3']
        assert p4.inferred_from == ''
        assert p4.inferred_from_ == []
        assert p4.label == "Parameter 4"
        assert p4.unit == "unit 4"

        p5 = [p for p in idp.paramspecs if p.name == 'p5'][0]
        assert p5.depends_on == ''
        assert p5.depends_on_ == []
        assert p5.inferred_from == 'p0'
        assert p5.inferred_from_ == ['p0']
        assert p5.label == "Parameter 5"
        assert p5.unit == "unit 5"