def test_state(self): """Test that the state can be correctly set and returned, and that the correct error is raised when attempting to set it again.""" result = Result(raw_results, samples_dict={0: [0, 1, 0], 2: [0]}) result.state = base_gaussian_state assert result.state == base_gaussian_state with pytest.raises(TypeError, match="State already set and cannot be changed."): result.state = base_gaussian_state
def test_state_print(self, capfd): """Test that printing a result object with a state provides the correct output.""" samples = {"output": [np.array([[1, 2], [3, 4], [5, 6]])]} samples_dict = {0: [1, 2, 3, 4, 5, 6]} result = Result(samples, samples_dict=samples_dict) result.state = base_gaussian_state print(result) captured = capfd.readouterr() assert "modes=2" in captured.out assert "shots=3" in captured.out assert "contains state=True" in captured.out
def test_stateless_print(self, capfd): """Test that printing a result object with no state provides the correct output.""" result = Result({"output": [np.array([[1, 2], [3, 4], [5, 6]])]}) print(result) captured = capfd.readouterr() assert "modes=2" in captured.out assert "shots=3" in captured.out assert "contains state=False" in captured.out
def test_tdm_print(self, capfd): """Test that printing a result object with TDM samples provides the correct output.""" samples = np.ones((2, 3, 4)) result = Result({"output": [samples]}) print(result) captured = capfd.readouterr() assert "spatial_modes=3" in captured.out assert "shots=2" in captured.out assert "timebins=4" in captured.out assert "contains state=False" in captured.out
def test_metadata(self): """Test that metadata is correctly returned.""" result = Result(raw_results) expected = { "meta_array": meta_array, "meta_matrix": meta_matrix, } assert result.metadata.keys() == expected.keys() for key, val in result.metadata.items(): assert np.allclose(val, expected[key])
def test_unknown_shape_print(self, capfd): """Test that printing a result object with samples with an unknown shape provides the correct output.""" samples = np.ones((2, 3, 4, 5)) result = Result({"output": [samples]}) print(result) captured = capfd.readouterr() assert "modes" not in captured.out assert "shots" not in captured.out assert "timebins" not in captured.out assert "contains state=False" in captured.out
def test_run_async(self, prog): """Tests that a non-blocking job execution can succeed.""" engine = RemoteEngine("X8_01") job = engine.run_async(prog, shots=10) # job.status calls job.finished, incrementing the request counter assert job.status == "open" for _ in range(REQUESTS_BEFORE_COMPLETED - 1): assert job.finished is False assert job.finished is True assert job.status == "complete" assert np.array_equal(job.result["foo"], [np.array([5, 6])]) assert np.array_equal(job.result["output"], [np.array([[1, 2], [3, 4]])]) result = Result(job.result) result.state is None
def run(*args, **kwargs): return Result({"output": [MOCK_SAMPLES]})
def run(self, program: Program, *, compile_options=None, recompile=False, **kwargs) -> Optional[Result]: """Runs a blocking job. In the blocking mode, the engine blocks until the job is completed, failed, or cancelled. A job in progress can be cancelled with a keyboard interrupt (`ctrl+c`). If the job completes successfully, the result is returned; if the job fails or is cancelled, ``None`` is returned. Args: program (strawberryfields.Program): the quantum circuit compile_options (None, Dict[str, Any]): keyword arguments for :meth:`.Program.compile` recompile (bool): Specifies if ``program`` should be recompiled using ``compile_options``, or if not provided, the default compilation options. Keyword Args: shots (Optional[int]): The number of shots for which to run the job. If this argument is not provided, the shots are derived from the given ``program``. integer_overflow_protection (Optional[bool]): Whether to enable the conversion of integral job results into ``np.int64`` objects. By default, integer overflow protection is enabled. For more information, see `xcc.Job.get_result <https://xanadu-cloud-client.readthedocs.io/en/stable/api/xcc.Job.html#xcc.Job.get_result>`_. Returns: strawberryfields.Result, None: the job result if successful, and ``None`` otherwise Raises: requests.exceptions.RequestException: if there was an issue fetching the device specifications from the Xanadu Cloud FailedJobError: if the remote job fails on the server side ("cancelled" or "failed") """ job = self.run_async(program, compile_options=compile_options, recompile=recompile, **kwargs) try: while True: # TODO: needed to refresh connection; remove once xcc.Connection # is able to refresh config info dynamically job._connection = self.connection job.clear() if job.finished: break time.sleep(self.POLLING_INTERVAL_SECONDS) except KeyboardInterrupt as e: xcc.Job(id_=job.id, connection=self.connection).cancel() raise KeyboardInterrupt("The job has been cancelled.") from e if job.status == "failed": message = (f"The remote job {job.id} failed due to an internal " f"server error: {job.metadata}. Please try again.") self.log.error(message) raise FailedJobError(message) if job.status == "complete": self.log.info(f"The remote job {job.id} has been completed.") integer_overflow_protection = kwargs.get( "integer_overflow_protection", True) result = job.get_result( integer_overflow_protection=integer_overflow_protection) output = result.get("output") # crop vacuum modes arriving at the detector before the first computational mode if output and isinstance(program, TDMProgram) and kwargs.get( "crop", False): output[0] = output[0][:, :, program.get_crop_value():] return Result(result) message = f"The remote job {job.id} has failed with status {job.status}: {job.metadata}." self.log.info(message) raise FailedJobError(message)
def _run(self, program, *, args, compile_options, **kwargs): """Execute the given programs by sending them to the backend. If multiple Programs are given they will be executed sequentially as parts of a single computation. For each :class:`.Program` instance given as input, the following happens: * The Program instance is compiled for the target backend. * The compiled program is executed on the backend. * The measurement results of each subsystem (if any) are stored in the :class:`.RegRef` instances of the corresponding Program, as well as in :attr:`~BaseEngine.samples`. * The compiled program is appended to :attr:`~BaseEngine.run_progs`. Finally, the result of the computation is returned. Args: program (Program, Sequence[Program]): quantum programs to run args (Dict[str, Any]): values for the free parameters in the program(s) (if any) compile_options (Dict[str, Any]): keyword arguments for :meth:`.Program.compile` The ``kwargs`` keyword arguments are passed to the backend API calls via :meth:`Operation.apply`. Returns: Result: results of the computation """ # pop modes so that it's not passed on to the backend API calls via 'op.apply' modes = kwargs.pop("modes") if not isinstance(program, collections.abc.Sequence): program = [program] kwargs.setdefault("shots", 1) # NOTE: by putting ``shots`` into keyword arguments, it allows for the # signatures of methods in Operations to remain cleaner, since only # Measurements need to know about shots prev = self.run_progs[ -1] if self.run_progs else None # previous program segment for p in program: if self.backend.compiler: default_compiler = getattr(compile_options.get("device"), "default_compiler", self.backend.compiler) compile_options.setdefault("compiler", default_compiler) # compile the program for the correct backend if a compiler or a device exists if "compiler" in compile_options or "device" in compile_options: p = p.compile(**compile_options) received_rolled = False # whether a TDMProgram had a rolled circuit if isinstance(p, TDMProgram): tdm_options = self.get_tdm_options(p, **kwargs) # pop modes so that it's not passed on to the backend API calls via 'op.apply' modes = tdm_options.pop("modes") received_rolled = tdm_options.pop("received_rolled") kwargs.update(tdm_options) if prev is None: # initialize the backend self._init_backend(p.init_num_subsystems) else: # there was a previous program segment if not p.can_follow(prev): raise RuntimeError( f"Register mismatch: program {len(self.run_progs)}, '{p.name}'." ) # Copy the latest measured values in the RegRefs of p. # We cannot copy from prev directly because it could be used in more than one # engine. for k, v in enumerate(self.samples): p.reg_refs[k].val = v # bind free parameters to their values p.bind_params(args) p.lock() _, self.samples, self.samples_dict = self._run_program(p, **kwargs) self.run_progs.append(p) if isinstance(p, TDMProgram) and received_rolled: p.roll() prev = p ancillae_samples = None if isinstance(self.backend, BosonicBackend): ancillae_samples = self.backend.ancillae_samples_dict.copy() samples = {"output": [self.samples]} result = Result(samples, samples_dict=self.samples_dict, ancillae_samples=ancillae_samples) # if `modes`` is empty (i.e. `modes==[]`) return no state, else return state with # selected modes (all if `modes==None`) if modes is None or modes: # state object requested # session and feed_dict are needed by TF backend both during simulation (if program # contains measurements) and state object construction. result.state = self.backend.state(modes=modes, **kwargs) return result
def run(self, program: Program, *, compile_options=None, recompile=False, **kwargs) -> Optional[Result]: """Runs a blocking job. In the blocking mode, the engine blocks until the job is completed, failed, or cancelled. A job in progress can be cancelled with a keyboard interrupt (`ctrl+c`). If the job completes successfully, the result is returned; if the job fails or is cancelled, ``None`` is returned. Args: program (strawberryfields.Program): the quantum circuit compile_options (None, Dict[str, Any]): keyword arguments for :meth:`.Program.compile` recompile (bool): Specifies if ``program`` should be recompiled using ``compile_options``, or if not provided, the default compilation options. Keyword Args: shots (Optional[int]): The number of shots for which to run the job. If this argument is not provided, the shots are derived from the given ``program``. Returns: strawberryfields.Result, None: the job result if successful, and ``None`` otherwise Raises: requests.exceptions.RequestException: if there was an issue fetching the device specifications from the Xanadu Cloud FailedJobError: if the remote job fails on the server side ("cancelled" or "failed") """ job = self.run_async(program, compile_options=compile_options, recompile=recompile, **kwargs) try: while True: # TODO: needed to refresh connection; remove once xcc.Connection # is able to refresh config info dynamically job._connection = self.connection job.clear() if job.finished: break time.sleep(self.POLLING_INTERVAL_SECONDS) except KeyboardInterrupt as e: xcc.Job(id_=job.id, connection=self.connection).cancel() raise KeyboardInterrupt("The job has been cancelled.") from e if job.status == "failed": message = (f"The remote job {job.id} failed due to an internal " f"server error: {job.metadata}. Please try again.") self.log.error(message) raise FailedJobError(message) if job.status == "complete": self.log.info(f"The remote job {job.id} has been completed.") return Result(job.result) message = f"The remote job {job.id} has failed with status {job.status}: {job.metadata}." self.log.info(message) raise FailedJobError(message)
def _run(self, program, *, args, compile_options, **kwargs): """Execute the given programs by sending them to the backend. If multiple Programs are given they will be executed sequentially as parts of a single computation. For each :class:`.Program` instance given as input, the following happens: * The Program instance is compiled for the target backend. * The compiled program is executed on the backend. * The measurement results of each subsystem (if any) are stored in the :class:`.RegRef` instances of the corresponding Program, as well as in :attr:`~BaseEngine.samples`. * The compiled program is appended to :attr:`~BaseEngine.run_progs`. Finally, the result of the computation is returned. Args: program (Program, Sequence[Program]): quantum programs to run args (Dict[str, Any]): values for the free parameters in the program(s) (if any) compile_options (Dict[str, Any]): keyword arguments for :meth:`.Program.compile` The ``kwargs`` keyword arguments are passed to the backend API calls via :meth:`Operation.apply`. Returns: Result: results of the computation """ if not isinstance(program, collections.abc.Sequence): program = [program] kwargs.setdefault("shots", 1) # NOTE: by putting ``shots`` into keyword arguments, it allows for the # signatures of methods in Operations to remain cleaner, since only # Measurements need to know about shots prev = self.run_progs[ -1] if self.run_progs else None # previous program segment for p in program: if prev is None: # initialize the backend self._init_backend(p.init_num_subsystems) else: # there was a previous program segment if not p.can_follow(prev): raise RuntimeError( f"Register mismatch: program {len(self.run_progs)}, '{p.name}'." ) # Copy the latest measured values in the RegRefs of p. # We cannot copy from prev directly because it could be used in more than one # engine. for k, v in enumerate(self.samples): p.reg_refs[k].val = v # bind free parameters to their values p.bind_params(args) # compile the program for the correct backend target = self.backend.compiler if target is not None: p = p.compile(compiler=target, **compile_options) p.lock() _, self.samples, self.samples_dict = self._run_program(p, **kwargs) self.run_progs.append(p) prev = p ancillae_samples = None if isinstance(self.backend, BosonicBackend): ancillae_samples = self.backend.ancillae_samples_dict.copy() samples = {"output": [self.samples]} return Result(samples, samples_dict=self.samples_dict, ancillae_samples=ancillae_samples)
def test_state_no_modes(self): """Test that the correct error is raised when setting a state on remote job result.""" result = Result(raw_results) with pytest.raises(ValueError, match="State can only be set for local simulations."): result.state = base_gaussian_state
def test_ancillae_samples(self): """Test that ancilla samples are correctly returned.""" ancillae_samples = {0: [0, 1], 2: [1, 3]} result = Result(raw_results, ancillae_samples=ancillae_samples) assert result.ancillae_samples == ancillae_samples
def test_samples_dict(self): """Test that ``samples_dict`` is correctly returned.""" samples_dict = {0: [1, 2, 3], 1: [4, 5]} result = Result(raw_results, samples_dict=samples_dict) assert result.samples_dict == samples_dict
def test_samples(self): """Test that ``samples`` is correctly returned.""" result = Result(raw_results) assert result.samples is not None assert np.array_equal(result.samples, test_samples)