def test_create_dataset_in_memory_explicit_db(empty_temp_db): default_db_location = qc.config["core"]["db_location"] extra_db_location = str(Path(default_db_location).parent / "extra.db") load_or_create_experiment( conn=connect(extra_db_location), experiment_name="myexp", sample_name="mysample" ) ds = DataSetInMem._create_new_run(name="foo", path_to_db=str(extra_db_location)) assert ds.path_to_db == extra_db_location assert default_db_location != extra_db_location
def test_write_metadata_to_explicit_db(empty_temp_db): default_db_location = qc.config["core"]["db_location"] extra_db_location = str(Path(default_db_location).parent / "extra.db") load_or_create_experiment(experiment_name="myexp", sample_name="mysample") load_or_create_experiment( conn=connect(extra_db_location), experiment_name="myexp", sample_name="mysample" ) ds = DataSetInMem._create_new_run(name="foo") assert ds._parameters is None assert ds.path_to_db == default_db_location ds.export("netcdf") ds.write_metadata_to_db(path_to_db=extra_db_location) loaded_ds = load_by_guid(ds.guid, conn=connect(extra_db_location)) ds.the_same_dataset_as(loaded_ds)
def __set_qcodes(self): initialise_or_create_database_at(self.__database) self.station = Station() #### instruments needs change # A dummy instrument dac with two parameters ch1 and ch2 self.dac = DummyInstrument('dac', gates=['amp']) # A dummy instrument dmm with two parameters ch1 and Ch2 self.dmm = DummyInstrument('dmm', gates=['v1']) #These are the parameters which come ready to use from the intruments drivers #dac.add_parameter('amp',label='Amplitude', unit="V", get_cmd=None, set_cmd=None) self.dac.add_parameter('freq', label='Frequency', unit="Hz", get_cmd=None, set_cmd=None) #puts current time in a string to facilitate control of the samples #makes the sample name now = datetime.now() now = now.strftime("%Y-%m-%d_%H-%M-%S") print(now) #the experiment is a unit of data inside the database it's made #out self.exp = load_or_create_experiment(experiment_name=self.__exp_name, sample_name=now) self.dmm.v1 = dmm_parameter('dmm_v1', self.dac)
def test_load_2dsoftsweep(): qc.config.core.db_location = DBPATH initialise_database() exp = load_or_create_experiment('2d_softsweep', sample_name='no sample') N = 5 m = qc.Measurement(exp=exp) m.register_custom_parameter('x') m.register_custom_parameter('y') # check that unused parameters don't mess with m.register_custom_parameter('foo') dd_expected = DataDict(x=dict(values=np.array([])), y=dict(values=np.array([]))) for n in range(N): m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y']) dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y']) dd_expected.validate() with m.run() as datasaver: for result in testdata.generate_2d_scalar_simple(3, 3, N): row = [(k, v) for k, v in result.items()] + [('foo', 1)] datasaver.add_result(*row) dd_expected.add_data(**result) # retrieve data as data dict run_id = datasaver.dataset.captured_run_id ddict = datadict_from_path_and_run_id(DBPATH, run_id) assert ddict == dd_expected
def database_with_three_datasets(empty_db_path): """Fixture of a database file with 3 DataSets""" exp1 = load_or_create_experiment('get_runs_from_db', sample_name='qubit') m1 = qc.Measurement(exp=exp1) m1.register_custom_parameter('x', unit='cm') m1.register_custom_parameter('y') m1.register_custom_parameter('foo') for n in range(2): m1.register_custom_parameter(f'z_{n}', setpoints=['x', 'y']) with m1.run() as datasaver: dataset11 = datasaver.dataset with m1.run() as datasaver: datasaver.add_result(('x', 1.), ('y', 2.), ('z_0', 42.), ('z_1', 0.2)) dataset12 = datasaver.dataset exp2 = load_or_create_experiment('give_em', sample_name='now') m2 = qc.Measurement(exp=exp2) m2.register_custom_parameter('a') m2.register_custom_parameter('b', unit='mm') m2.register_custom_parameter('c', setpoints=['a', 'b']) with m2.run() as datasaver: datasaver.add_result(('a', 1.), ('b', 2.), ('c', 42.)) datasaver.add_result(('a', 4.), ('b', 5.), ('c', 77.)) dataset2 = datasaver.dataset datasets = (dataset11, dataset12, dataset2) yield empty_db_path, datasets for ds in datasets: ds.conn.close() exp1.conn.close() exp2.conn.close()
def measure_triggered_softsweep_vs_parameter(controller, sweep_param, sweep_vals, integration_time, parameter, values, exp_name=None, channel=0, **kw): """ Set up and measure a series of triggered softsweep, iterating over an additional parameter. :param controller: softsweep controller to use :param sweep_param: qcodes parameter to sweep over :param sweep_vals: values for the sweep :param integration_time: total integration time per point [s] :param parameter: additional qcodes parameter to iterate over (as an outer loop) :param values: values for the additional parameter :param exp_name: name of the experiment. if ``None``, determine one automatically :param channel: Alazar channel that contains the data :return: qcodes dataset kws will be passed to ``setup_triggered_softsweep``. """ station = qcodes.Station.default sample = qcodes.config.user.get('current_sample') if exp_name is None: exp_name = f'{sweep_param.full_name}_triggered_softsweep' exp = qcodes.load_or_create_experiment(exp_name, sample) meas = qcodes.Measurement(exp, station) independents = [parameter, sweep_param] meas.register_parameter(parameter) meas.register_parameter(sweep_param, paramtype='array') meas.register_custom_parameter('amplitude', unit='V', setpoints=independents, paramtype='array') meas.register_custom_parameter('phase', unit='rad', setpoints=independents, paramtype='array') with meas.run() as datasaver: for i, v in enumerate(values): setup_triggered_softsweep(controller, sweep_param, sweep_vals, integration_time, setup_awg=(i==0), verbose=(i==0), **kw) data = np.squeeze(controller.acquisition())[..., channel] result = [] result.append((parameter, v)) result.append((sweep_param, sweep_vals)) result.append(('amplitude', np.abs(data))) result.append(('phase', np.angle(data))) datasaver.add_result(*result) return datasaver.dataset
def test_update_qcloader(qtbot, empty_db_path): db_path = empty_db_path exp = load_or_create_experiment('2d_softsweep', sample_name='no sample') N = 2 m = qc.Measurement(exp=exp) m.register_custom_parameter('x') m.register_custom_parameter('y') dd_expected = DataDict(x=dict(values=np.array([])), y=dict(values=np.array([]))) for n in range(N): m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y']) dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y']) dd_expected.validate() # setting up the flowchart fc = linearFlowchart(('loader', QCodesDSLoader)) loader = fc.nodes()['loader'] def check(): nresults = ds.number_of_results loader.update() ddict = fc.output()['dataOut'] if ddict is not None and nresults > 0: z_in = dd_expected.data_vals('z_1') z_out = ddict.data_vals('z_1') if z_out is not None: assert z_in.size == z_out.size assert np.allclose(z_in, z_out, atol=1e-15) with m.run() as datasaver: ds = datasaver.dataset run_id = datasaver.dataset.captured_run_id loader.pathAndId = db_path, run_id for result in testdata.generate_2d_scalar_simple(3, 3, N): row = [(k, v) for k, v in result.items()] datasaver.add_result(*row) dd_expected.add_data(**result) check() check()
def dataset_with_shape(empty_db_path): """Fixture of a database file a shaped and an unshapeed dataset""" exp = load_or_create_experiment('get_runs_from_db', sample_name='qubit') m1 = qc.Measurement(exp=exp) m1.register_custom_parameter('x', unit='cm') m1.register_custom_parameter('y') for n in range(2): m1.register_custom_parameter(f'z_{n}', setpoints=['x', 'y']) shapes = (10, 5) m1.set_shapes({'z_0': shapes}) with m1.run() as datasaver: for x in np.linspace(0, 1, shapes[0]): for y in np.linspace(4, 6, shapes[1]): datasaver.add_result(('x', x), ('y', y), ('z_0', x + y), ('z_1', x**2 + y)) dataset = datasaver.dataset yield dataset dataset.conn.close() exp.conn.close()
def experiment(empty_db_path): exp = load_or_create_experiment('2d_softsweep', sample_name='no sample') yield exp exp.conn.close()