def _execute_sample_2q_xeb_tasks_in_batches( tasks: List[_Sample2qXEBTask], sampler: 'cirq.Sampler', combinations_by_layer: List[CircuitLibraryCombination], repetitions: int, batch_size: int, progress_bar: Callable[..., ContextManager], dataset_directory: Optional[str] = None, ) -> List[Dict[str, Any]]: """Helper function used in `sample_2q_xeb_circuits` to batch and execute sampling tasks.""" n_tasks = len(tasks) batched_tasks = [tasks[i : i + batch_size] for i in range(0, n_tasks, batch_size)] run_batch = _SampleInBatches( sampler=sampler, repetitions=repetitions, combinations_by_layer=combinations_by_layer ) with ThreadPoolExecutor(max_workers=2) as pool: futures = [pool.submit(run_batch, task_batch) for task_batch in batched_tasks] records = [] with progress_bar(total=len(batched_tasks) * batch_size) as progress: for future in concurrent.futures.as_completed(futures): new_records = future.result() if dataset_directory is not None: os.makedirs(f'{dataset_directory}', exist_ok=True) protocols.to_json(new_records, f'{dataset_directory}/xeb.{uuid.uuid4()}.json') records.extend(new_records) progress.update(batch_size) return records
def save(task: Task, data: Dict[str, Any], base_dir: str, mode='x'): with_meta = { 'timestamp': datetime.datetime.now().isoformat(), 'task': task, } with_meta.update(data) fn = f'{base_dir}/{task.fn}.json' os.makedirs(os.path.dirname(fn), exist_ok=True) with open(fn, mode) as f: protocols.to_json(with_meta, f) return fn
def maybe_to_json(self, obj: Any): """Call `cirq.to_json with `value` according to the configuration options in this class. If `checkpoint=False`, nothing will happen. Otherwise, we will use `checkpoint_fn` and `checkpoint_other_fn` as the destination JSON file as described in the class docstring. """ if not self.checkpoint: return assert self.checkpoint_fn is not None, 'mypy' assert self.checkpoint_other_fn is not None, 'mypy' if os.path.exists(self.checkpoint_fn): os.replace(self.checkpoint_fn, self.checkpoint_other_fn) protocols.to_json(obj, self.checkpoint_fn)
def save(params: Any, obj: Any, base_dir: str, mode: str = 'x') -> str: """Save an object to filesystem as a JSON file. Arguments: params: Parameters describing the object. This should have an `filename` attribute containing the filename with which to save the object. obj: The object to save. base_dir: The directory in which to save the object. mode: The mode with which to open the file to write. Defaults to 'x', which means that the save will fail if the file already exists. Returns: The full path to the saved JSON file. """ filename = os.path.join(base_dir, params.filename) os.makedirs(os.path.dirname(filename), exist_ok=True) with open(filename, mode) as f: protocols.to_json(obj, f) return filename
def measure_grouped_settings( circuit: 'cirq.Circuit', grouped_settings: Dict[InitObsSetting, List[InitObsSetting]], sampler: 'cirq.Sampler', stopping_criteria: StoppingCriteria, *, readout_symmetrization: bool = False, circuit_sweep: 'cirq.study.sweepable.SweepLike' = None, readout_calibrations: Optional[BitstringAccumulator] = None, checkpoint: bool = False, checkpoint_fn: Optional[str] = None, checkpoint_other_fn: Optional[str] = None, ) -> List[BitstringAccumulator]: """Measure a suite of grouped InitObsSetting settings. This is a low-level API for accessing the observable measurement framework. See also `measure_observables` and `measure_observables_df`. Args: circuit: The circuit. This can contain parameters, in which case you should also specify `circuit_sweep`. grouped_settings: A series of setting groups expressed as a dictionary. The key is the max-weight setting used for preparing single-qubit basis-change rotations. The value is a list of settings compatible with the maximal setting you desire to measure. Automated routing algorithms like `group_settings_greedy` can be used to construct this input. sampler: A sampler. stopping_criteria: A StoppingCriteria object that can report whether enough samples have been sampled. readout_symmetrization: If set to True, each `meas_spec` will be split into two runs: one normal and one where a bit flip is incorporated prior to measurement. In the latter case, the measured bit will be flipped back classically and accumulated together. This causes readout error to appear symmetric, p(0|0) = p(1|1). circuit_sweep: Additional parameter sweeps for parameters contained in `circuit`. The total sweep is the product of the circuit sweep with parameter settings for the single-qubit basis-change rotations. readout_calibrations: The result of `calibrate_readout_error`. checkpoint: If set to True, save cumulative raw results at the end of each iteration of the sampling loop. Load in these results with `cirq.read_json`. checkpoint_fn: The filename for the checkpoint file. If `checkpoint` is set to True and this is not specified, a file in a temporary directory will be used. checkpoint_other_fn: The filename for another checkpoint file, which contains the previous checkpoint. This lets us avoid losing data if a failure occurs during checkpoint writing. If `checkpoint` is set to True and this is not specified, a file in a temporary directory will be used. If `checkpoint` is set to True and `checkpoint_fn` is specified but this argument is *not* specified, "{checkpoint_fn}.prev.json" will be used. """ if readout_calibrations is not None and not readout_symmetrization: raise ValueError("Readout calibration only works if `readout_symmetrization` is enabled.") checkpoint_fn, checkpoint_other_fn = _parse_checkpoint_options( checkpoint=checkpoint, checkpoint_fn=checkpoint_fn, checkpoint_other_fn=checkpoint_other_fn ) qubits = sorted({q for ms in grouped_settings.keys() for q in ms.init_state.qubits}) qubit_to_index = {q: i for i, q in enumerate(qubits)} needs_init_layer = _needs_init_layer(grouped_settings) measurement_param_circuit = _with_parameterized_layers(circuit, qubits, needs_init_layer) grouped_settings = { _pad_setting(max_setting, qubits): settings for max_setting, settings in grouped_settings.items() } circuit_sweep = study.UnitSweep if circuit_sweep is None else study.to_sweep(circuit_sweep) # meas_spec provides a key for accumulators. # meas_specs_todo is a mutable list. We will pop things from it as various # specs are measured to the satisfaction of the stopping criteria accumulators = {} meas_specs_todo = [] for max_setting, circuit_params in itertools.product( grouped_settings.keys(), circuit_sweep.param_tuples() ): # The type annotation for Param is just `Iterable`. # We make sure that it's truly a tuple. circuit_params = dict(circuit_params) meas_spec = _MeasurementSpec(max_setting=max_setting, circuit_params=circuit_params) accumulator = BitstringAccumulator( meas_spec=meas_spec, simul_settings=grouped_settings[max_setting], qubit_to_index=qubit_to_index, readout_calibration=readout_calibrations, ) accumulators[meas_spec] = accumulator meas_specs_todo += [meas_spec] while True: meas_specs_todo, repetitions = _check_meas_specs_still_todo( meas_specs=meas_specs_todo, accumulators=accumulators, stopping_criteria=stopping_criteria, ) if len(meas_specs_todo) == 0: break flippy_meas_specs, repetitions = _subdivide_meas_specs( meas_specs=meas_specs_todo, repetitions=repetitions, qubits=qubits, readout_symmetrization=readout_symmetrization, ) resolved_params = [ flippy_ms.param_tuples(needs_init_layer=needs_init_layer) for flippy_ms in flippy_meas_specs ] resolved_params = _to_sweep(resolved_params) results = sampler.run_sweep( program=measurement_param_circuit, params=resolved_params, repetitions=repetitions ) assert len(results) == len( flippy_meas_specs ), 'Not as many results received as sweeps requested!' for flippy_ms, result in zip(flippy_meas_specs, results): accumulator = accumulators[flippy_ms.meas_spec] bitstrings = np.logical_xor(flippy_ms.flips, result.measurements['z']) accumulator.consume_results(bitstrings.astype(np.uint8, casting='safe')) if checkpoint: assert checkpoint_fn is not None, 'mypy' assert checkpoint_other_fn is not None, 'mypy' if os.path.exists(checkpoint_fn): os.replace(checkpoint_fn, checkpoint_other_fn) to_json(list(accumulators.values()), checkpoint_fn) return list(accumulators.values())