Ejemplo n.º 1
0
def _dags_2_qobj(dags, backend_name, config=None, shots=None,
                 max_credits=None, qobj_id=None, basis_gates=None, coupling_map=None,
                 seed=None):
    """Convert a list of dags into a qobj.

    Args:
        dags (list[DAGCircuit]): dags to compile
        backend_name (str): name of runner backend
        config (dict): dictionary of parameters (e.g. noise) used by runner
        shots (int): number of repetitions of each circuit, for sampling
        max_credits (int): maximum credits to use
        qobj_id (int): identifier for the generated qobj
        basis_gates (list[str])): basis gates for the experiment
        coupling_map (list): coupling map (perhaps custom) to target in mapping
        seed (int): random seed for simulators

    Returns:
        Qobj: the Qobj to be run on the backends
    """
    # TODO: the following will be removed from qobj and thus removed here:
    # `basis_gates`, `coupling_map`

    # Step 1: create the Qobj, with empty experiments.
    # Copy the configuration: the values in `config` have preference
    qobj_config = deepcopy(config or {})
    # TODO: "memory_slots" is required by the qobj schema in the top-level
    # qobj.config, and is user-defined. At the moment is set to the maximum
    # number of *register* slots for the circuits, in order to have `measure`
    # behave properly until the transition is over; and each circuit stores
    # its memory_slots in its configuration.
    qobj_config.update({'shots': shots,
                        'max_credits': max_credits,
                        'memory_slots': 0})

    qobj = Qobj(qobj_id=qobj_id or str(uuid.uuid4()),
                config=QobjConfig(**qobj_config),
                experiments=[],
                header=QobjHeader(backend_name=backend_name))
    if seed:
        qobj.config.seed = seed

    qobj.experiments = parallel_map(_dags_2_qobj_parallel, dags,
                                    task_kwargs={'basis_gates': basis_gates,
                                                 'config': config,
                                                 'coupling_map': coupling_map})

    # Update the `memory_slots` value.
    # TODO: remove when `memory_slots` can be provided by the user.
    qobj.config.memory_slots = max(experiment.config.memory_slots for
                                   experiment in qobj.experiments)

    # Update the `n_qubits` global value.
    # TODO: num_qubits is not part of the qobj specification, but needed
    # for the simulator.
    qobj.config.n_qubits = max(experiment.config.n_qubits for
                               experiment in qobj.experiments)

    return qobj
Ejemplo n.º 2
0
    def from_data(date=None, qobj=None, backend=None, job_id=None, noise_model=None, external_id=None, theta=None):
        # type: (str, dict, str, str, dict, str, list) -> FinishedExperiment
        """
        We expect a dict with a qobj, job_id, backend name and optionally a noise model.
        When we have a Aer backend the simulation is redone to have the results.
        If the backend is a IBMQ then it is retrieved from the API.

        Thus it can take some time until this call ends.
        :param date: a string
        :param qobj: a dictionary
        :param job_id: a string
        :param noise_model: a dictionary
        :return: the Finished Experiment
        """

        if theta is None:
            theta = []
        if 'ibmq' in backend and job_id is not None:
            backend_obj = provider().get_backend(backend)  # type: IBMQBackend
            job = backend_obj.retrieve_job(job_id)  # type: IBMQJob
            qobj = job.qobj().to_dict()
            qobj = Qobj.from_dict(qobj)
            date = job.creation_date()
        elif date is not None and qobj is not None and backend is not None:
            if isinstance(qobj, dict):
                qobj = Qobj.from_dict(qobj)
            backend_obj = qiskit.Aer.get_backend(backend)  # type: AerBackend
            job = backend_obj.run(qobj=qobj, noise_model=noise_model)  # type: AerJob
            job_id = job.job_id()
        else:
            raise ValueError("Either use a IBMQ backend with a job_id or provide a date, qobj, backend.")

        if noise_model is not None:
            noise_model = NoiseModel.from_dict(noise_model)
        if isinstance(date, str):
            date = dateutil.parser.parse(date)  # type: datetime.datetime

        external_id = 'job_{}'.format(date.strftime("%Y%m%dT%H%M%SZ")) if external_id is None else external_id
        running_experiment = RunningExperiment(date=date, qobj=qobj, noise_model=noise_model, job=job, external_id=external_id)

        while not running_experiment.is_done():
            time.sleep(10)
            LOG.info("Simulation job {} is not done yet.".format(job_id))

        fin_ex = FinishedExperiment.from_running_experiment(running_experiment)
        fin_ex.set_theta(theta)

        return fin_ex
Ejemplo n.º 3
0
 def run_with_api(self, api):
     """Creates a new `IBMQJob` instance running with the provided API
     object."""
     self._current_api = api
     self._current_qjob = IBMQJob(Qobj.from_dict(new_fake_qobj()), api,
                                  False)
     return self._current_qjob
Ejemplo n.º 4
0
def update_qobj_config(qobj: Qobj,
                       backend_options: Optional[Dict] = None,
                       noise_model: Any = None) -> Qobj:
    """Update a ``Qobj`` configuration from backend options and a noise model.

    Args:
        qobj: Description of the job.
        backend_options: Backend options.
        noise_model: Noise model.

    Returns:
        The updated ``Qobj``.
    """
    config = qobj.config.to_dict()

    # Append backend options to configuration.
    if backend_options:
        for key, val in backend_options.items():
            config[key] = val

    # Append noise model to configuration. Overwrites backend option
    if noise_model:
        config['noise_model'] = noise_model

    # Look for noise_models in the config, and try to transform them
    config = _serialize_noise_model(config)

    # Update the Qobj configuration.
    qobj.config = QobjHeader.from_dict(config)

    return qobj
    def _submit_job(
            self,
            qobj: Qobj,
            job_name: Optional[str] = None,
            job_share_level: Optional[ApiJobShareLevel] = None) -> IBMQJob:
        """Submit qobj job to IBM-Q.
        Args:
            qobj: description of job.
            job_name: custom name to be assigned to the job. This job
                name can subsequently be used as a filter in the
                ``jobs()`` function call. Job names do not need to be unique.
            job_share_level: level the job should be shared at.

        Returns:
            an instance derived from BaseJob

        Events:
            ibmq.job.start: The job has started.

        Raises:
            IBMQBackendApiError: If an unexpected error occurred while submitting
                the job.
            IBMQBackendError: If an unexpected error occurred after submitting
                the job.
            IBMQBackendApiProtocolError: If an unexpected value received when
                 the server.
        """
        try:
            qobj_dict = qobj.to_dict()
            submit_info = self._api.job_submit(
                backend_name=self.name(),
                qobj_dict=qobj_dict,
                use_object_storage=getattr(self.configuration(),
                                           'allow_object_storage', False),
                job_name=job_name,
                job_share_level=job_share_level)
        except ApiError as ex:
            raise IBMQBackendApiError('Error submitting job: {}'.format(
                str(ex)))

        # Error in the job after submission:
        # Transition to the `ERROR` final state.
        if 'error' in submit_info:
            raise IBMQBackendError('Error submitting job: {}'.format(
                str(submit_info['error'])))

        # Submission success.
        submit_info.update({
            '_backend': self,
            'api': self._api,
            'qObject': qobj_dict
        })
        try:
            job = IBMQJob.from_dict(submit_info)
        except ModelValidationError as err:
            raise IBMQBackendApiProtocolError(
                'Unexpected return value from the server '
                'when submitting job: {}'.format(str(err)))
        Publisher().publish("ibmq.job.start", job)
        return job
Ejemplo n.º 6
0
 def test_qobj_to_circuits_with_nothing(self):
     """Verify that qobj_to_circuits returns None without any data."""
     qobj = Qobj(qobj_id='abc123',
                 config=QobjConfig(),
                 header=QobjHeader(),
                 experiments=[],
                 type='QASM')
     self.assertIsNone(qobj_to_circuits(qobj))
Ejemplo n.º 7
0
 def try_loading_cache_from_file(self):
     if len(self.qobjs) == 0 and self.cache_file is not None and len(self.cache_file) > 0:
         cache_handler = open(self.cache_file, "rb")
         cache = pickle.load(cache_handler, encoding="ASCII")
         cache_handler.close()
         self.qobjs = [Qobj.from_dict(qob) for qob in cache['qobjs']]
         self.mappings = cache['mappings']
         logger.debug("Circuit cache loaded from file: {}".format(self.cache_file))
Ejemplo n.º 8
0
 def try_loading_cache_from_file(self):
     if len(self.qobjs) == 0 and self.cache_file is not None and len(self.cache_file) > 0:
         with open(self.cache_file, "rb") as cache_handler:
             try:
                 cache = pickle.load(cache_handler, encoding="ASCII")
             except (EOFError) as e:
                 logger.debug("No cache found in file: {}".format(self.cache_file))
                 return
             self.qobjs = [Qobj.from_dict(qob) for qob in cache['qobjs']]
             self.mappings = cache['mappings']
             self.cache_transpiled_circuits = cache['transpile']
             logger.debug("Circuit cache loaded from file: {}".format(self.cache_file))
Ejemplo n.º 9
0
def new_fake_qobj():
    """Create fake `Qobj` and backend instances."""
    backend = FakeQasmSimulator()
    return Qobj(qobj_id='test-id',
                config=QobjConfig(shots=1024, memory_slots=1, max_credits=100),
                header=QobjHeader(backend_name=backend.name()),
                experiments=[
                    QobjExperiment(instructions=[
                        QobjInstruction(name='barrier', qubits=[1])
                    ],
                                   header=QobjExperimentHeader(),
                                   config=QobjItem(seed=123456))
                ])
Ejemplo n.º 10
0
    def _result_from_job_response(self, job_response):
        # type: (AcQuantumResultResponse) -> Result

        backend = self.backend()  # type: BaseBackend
        config = backend.configuration()  # type: BackendConfiguration
        experiment = self._api.get_experiment(int(self.job_id()))  # type: AcQuantumExperiment

        result_details = {}
        job_results = job_response.get_results()
        if len(job_results) == 1:
            experiment_result = job_results[0]  # type: AcQuantumResult

            counts = dict((hex(int(k, 2)), int(v * experiment_result.shots)) for k, v in experiment_result.data.items())
            self._qobj = Qobj.from_dict(json.loads(experiment.code))
            self._job_name = self._qobj.experiments[0].header.name

            success = experiment_result.exception is None

            result_details = {
                "status": self._status.name,
                "success": success,
                "name": self._job_name,
                "seed": experiment_result.seed,
                "shots": experiment_result.shots,
                "data": {
                    "counts": counts
                },
                "start_time": experiment_result.start_time,
                "finish_time": experiment_result.finish_time,
                "header": self._qobj.experiments[0].header.as_dict()
            }

        from dateutil.parser import parser
        date = parser().parse(result_details['finish_time'])

        result_dict = {
            'results': [result_details],
            'backend_name': config.backend_name,
            'backend_version': config.backend_version,
            'qobj_id': self._qobj.qobj_id,
            'job_id': str(self.job_id()),
            'success': len(job_results) == 1,
            'header': {
                "backend_name": config.backend_name
            },
            "date": date.isoformat()
        }

        result = Result.from_dict(result_dict)

        return result
Ejemplo n.º 11
0
 def from_dict(dict):
     return FinishedExperiment(
         backend_name=dict.get('backend_name', ''),
         backend_version=dict.get('backend_version', None),
         date=dateutil.parser.parse(dict['date']) if 'date' in dict else None,
         qobj=Qobj.from_dict(dict.get('qobj', {})),
         job_id=dict.get('job_id', ''),
         status=JobStatus[dict['job_status']] if 'job_status' in dict else JobStatus.INITIALIZING,
         results=[ExperimentResult.from_dict(d) for d in dict.get('results', [])],
         noise_model=NoiseModel.from_dict(dict['noise_model']) if 'noise_model' in dict and dict['noise_model'] is not None else None,
         external_id=dict.get('external_id', None),
         theta=dict.get('theta', np.arange(0, 2*np.pi, 0.1)),  # fixing a bug here.... argh!
         parameters=dict.get('parameters', [])
     )
Ejemplo n.º 12
0
 def try_loading_cache_from_file(self):
     """ load cache from file """
     if not self.qobjs and self.cache_file:
         with open(self.cache_file, "rb") as cache_handler:
             try:
                 cache = pickle.load(cache_handler, encoding="ASCII")
             except EOFError:
                 logger.debug("No cache found in file: %s", self.cache_file)
                 return
             self.qobjs = [Qobj.from_dict(qob) for qob in cache['qobjs']]
             self.mappings = cache['mappings']
             self.cache_transpiled_circuits = cache['transpile']
             logger.debug("Circuit cache loaded from file: %s",
                          self.cache_file)
Ejemplo n.º 13
0
    def test_saving_and_loading_one_circ(self):
        """ Saving and Loading one Circ test """
        with tempfile.NamedTemporaryFile(suffix='.inp',
                                         delete=True) as cache_tmp_file:
            cache_tmp_file_name = cache_tmp_file.name
            var_form = RYRZ(num_qubits=4, depth=5)
            backend = BasicAer.get_backend('statevector_simulator')

            params0 = aqua_globals.random.random_sample(
                var_form.num_parameters)
            circ0 = var_form.construct_circuit(params0)

            qi0 = QuantumInstance(backend,
                                  circuit_caching=True,
                                  cache_file=cache_tmp_file_name,
                                  skip_qobj_deepcopy=True,
                                  skip_qobj_validation=True,
                                  seed_simulator=self.seed,
                                  seed_transpiler=self.seed)

            _ = qi0.execute([circ0])
            with open(cache_tmp_file_name, "rb") as cache_handler:
                saved_cache = pickle.load(cache_handler, encoding="ASCII")
            self.assertIn('qobjs', saved_cache)
            self.assertIn('mappings', saved_cache)
            qobjs = [Qobj.from_dict(qob) for qob in saved_cache['qobjs']]
            self.assertTrue(isinstance(qobjs[0], Qobj))
            self.assertGreaterEqual(len(saved_cache['mappings'][0][0]), 50)

            qi1 = QuantumInstance(backend,
                                  circuit_caching=True,
                                  cache_file=cache_tmp_file_name,
                                  skip_qobj_deepcopy=True,
                                  skip_qobj_validation=True,
                                  seed_simulator=self.seed,
                                  seed_transpiler=self.seed)

            params1 = aqua_globals.random.random_sample(
                var_form.num_parameters)
            circ1 = var_form.construct_circuit(params1)

            qobj1 = qi1.circuit_cache.load_qobj_from_cache(
                [circ1], 0, run_config=qi1.run_config)
            self.assertTrue(isinstance(qobj1, Qobj))
            _ = qi1.execute([circ1])

            self.assertEqual(qi0.circuit_cache.mappings,
                             qi1.circuit_cache.mappings)
            self.assertLessEqual(qi1.circuit_cache.misses, 0)