Esempio n. 1
0
    def save_to_file(self, file_name):
        if file_name is None:
            raise AquaError('Missing file path')

        file_name = file_name.strip()
        if not file_name:
            raise AquaError('Missing file path')

        with open(file_name, 'w') as file:
            print(json.dumps(self.get_sections(), sort_keys=True, indent=4), file=file)
Esempio n. 2
0
    def _validate_algorithm_problem(self):
        algo_name = self.get_section_property(PluggableType.ALGORITHM.value, JSONSchema.NAME)
        if algo_name is None:
            return

        problem_name = self.get_section_property(JSONSchema.PROBLEM, JSONSchema.NAME)
        if problem_name is None:
            problem_name = self.get_property_default_value(JSONSchema.PROBLEM, JSONSchema.NAME)

        if problem_name is None:
            raise AquaError("No algorithm 'problem' section found on input.")

        problems = BaseParser.get_algorithm_problems(algo_name)
        if problem_name not in problems:
            raise AquaError("Problem: {} not in the list of problems: {} for algorithm: {}.".format(problem_name, problems, algo_name))
Esempio n. 3
0
    def _update_algorithm_problem(self):
        problem_name = self.get_section_property(JSONSchema.PROBLEM,
                                                 JSONSchema.NAME)
        if problem_name is None:
            problem_name = self.get_property_default_value(
                JSONSchema.PROBLEM, JSONSchema.NAME)

        if problem_name is None:
            raise AquaError("No algorithm 'problem' section found on input.")

        algo_name = self.get_section_property(PluggableType.ALGORITHM.value,
                                              JSONSchema.NAME)
        if algo_name is not None and problem_name in BaseParser.get_algorithm_problems(
                algo_name):
            return

        for algo_name in local_pluggables(PluggableType.ALGORITHM):
            if problem_name in self.get_algorithm_problems(algo_name):
                # set to the first algorithm to solve the problem
                self.set_section_property(PluggableType.ALGORITHM.value,
                                          JSONSchema.NAME, algo_name)
                return

        # no algorithm solve this problem, remove section
        self.delete_section(PluggableType.ALGORITHM.value)
Esempio n. 4
0
    def _update_input_problem(self):
        problem_name = self.get_section_property(JSONSchema.PROBLEM,
                                                 JSONSchema.NAME)
        if problem_name is None:
            problem_name = self.get_property_default_value(
                JSONSchema.PROBLEM, JSONSchema.NAME)

        if problem_name is None:
            raise AquaError("No algorithm 'problem' section found on input.")

        input_name = self.get_section_property(PluggableType.INPUT.value,
                                               JSONSchema.NAME)
        if input_name is not None and problem_name in InputParser.get_input_problems(
                input_name):
            return

        for input_name in local_pluggables(PluggableType.INPUT):
            if problem_name in self.get_input_problems(input_name):
                # set to the first input to solve the problem
                self.set_section_property(PluggableType.INPUT.value,
                                          JSONSchema.NAME, input_name)
                return

        # no input solve this problem, remove section
        self.delete_section(PluggableType.INPUT.value)
Esempio n. 5
0
    def _set_section_property(sections, section_name, property_name, value,
                              types):
        """
        Args:
            sections (dict): sections dictionary
            section_name (str): the name of the section, case insensitive
            property_name (str): the property name in the section
            value (Union(dict,list,int,float,str)): property value
            types (list): schema types
        Raises:
            AquaError: failed to set pluggable
        """
        section_name = JSONSchema.format_section_name(section_name)
        property_name = JSONSchema.format_property_name(property_name)
        value = JSONSchema.get_value(value, types)

        if JSONSchema.NAME == property_name and not value and \
           BaseParser.is_pluggable_section(section_name):
            raise AquaError(
                "Unable to set pluggable '{}' name: Missing name.".format(
                    section_name))

        if section_name not in sections:
            sections[section_name] = OrderedDict()

        # name should come first
        if JSONSchema.NAME == property_name and property_name not in sections[
                section_name]:
            new_dict = OrderedDict([(property_name, value)])
            new_dict.update(sections[section_name])
            sections[section_name] = new_dict
        else:
            sections[section_name][property_name] = value
Esempio n. 6
0
    def _set_section_property(sections, section_name, property_name, value, types):
        """
        Args:
            section_name (str): the name of the section, case insensitive
            property_name (str): the property name in the section
            value : property value
            types : schema types
        """
        section_name = JSONSchema.format_section_name(section_name)
        property_name = JSONSchema.format_property_name(property_name)
        value = JSONSchema.get_value(value, types)

        if JSONSchema.NAME == property_name and \
           (value is None or len(value) == 0) and \
           BaseParser.is_pluggable_section(section_name):
            raise AquaError("Unable to set pluggable '{}' name: Missing name.".format(section_name))

        if section_name not in sections:
            sections[section_name] = OrderedDict()

        # name should come first
        if JSONSchema.NAME == property_name and property_name not in sections[section_name]:
            new_dict = OrderedDict([(property_name, value)])
            new_dict.update(sections[section_name])
            sections[section_name] = new_dict
        else:
            sections[section_name][property_name] = value
Esempio n. 7
0
    def _validate_input_problem(self):
        input_name = self.get_section_property(PluggableType.INPUT.value, JSONSchema.NAME)
        if input_name is None:
            return

        problem_name = self.get_section_property(JSONSchema.PROBLEM, JSONSchema.NAME)
        if problem_name is None:
            problem_name = self.get_property_default_value(JSONSchema.PROBLEM, JSONSchema.NAME)

        if problem_name is None:
            raise AquaError("No algorithm 'problem' section found on input.")

        problems = InputParser.get_input_problems(input_name)
        if problem_name not in problems:
            raise AquaError(
                "Problem: {} not in the list of problems: {} for input: {}.".format(
                    problem_name, problems, input_name))
Esempio n. 8
0
    def set_section_property(self, section_name, property_name, value):
        section_name = JSONSchema.format_section_name(section_name).lower()
        property_name = JSONSchema.format_property_name(property_name)
        value = self._json_schema.check_property_value(section_name,
                                                       property_name, value)
        types = self.get_property_types(section_name, property_name)

        sections_temp = copy.deepcopy(self._sections)
        InputParser._set_section_property(sections_temp, section_name,
                                          property_name, value, types)
        msg = self._json_schema.validate_property(sections_temp, section_name,
                                                  property_name)
        if msg is not None:
            raise AquaError("{}.{}: Value '{}': '{}'".format(
                section_name, property_name, value, msg))

        # check if this provider is loadable and valid
        if JSONSchema.BACKEND == section_name and property_name == JSONSchema.PROVIDER:
            get_backends_from_provider(value)

        InputParser._set_section_property(self._sections, section_name,
                                          property_name, value, types)
        if property_name == JSONSchema.NAME:
            if PluggableType.INPUT.value == section_name:
                self._update_algorithm_input_schema()
                # remove properties that are not valid for this section
                default_properties = self.get_section_default_properties(
                    section_name)
                if isinstance(default_properties, dict):
                    properties = self.get_section_properties(section_name)
                    for property_name in list(properties.keys()):
                        if property_name != JSONSchema.NAME and property_name not in default_properties:
                            self.delete_section_property(
                                section_name, property_name)
            elif JSONSchema.PROBLEM == section_name:
                self._update_algorithm_problem()
                self._update_input_problem()
            elif JSONSchema.BACKEND == section_name:
                self._json_schema.update_backend_schema()
            elif InputParser.is_pluggable_section(section_name):
                self._json_schema.update_pluggable_input_schemas(self)
                # remove properties that are not valid for this section
                default_properties = self.get_section_default_properties(
                    section_name)
                if isinstance(default_properties, dict):
                    properties = self.get_section_properties(section_name)
                    for property_name in list(properties.keys()):
                        if property_name != JSONSchema.NAME and property_name not in default_properties:
                            self.delete_section_property(
                                section_name, property_name)

                if section_name == PluggableType.ALGORITHM.value:
                    self._update_dependency_sections()

        self._sections = self._order_sections(self._sections)
Esempio n. 9
0
    def _update_algorithm_input_schema(self):
        # find algorithm input
        default_name = self.get_property_default_value(PluggableType.INPUT.value, JSONSchema.NAME)
        input_name = self.get_section_property(PluggableType.INPUT.value,
                                               JSONSchema.NAME, default_name)
        if input_name is None:
            # find the first valid input for the problem
            problem_name = self.get_section_property(JSONSchema.PROBLEM, JSONSchema.NAME)
            if problem_name is None:
                problem_name = self.get_property_default_value(JSONSchema.PROBLEM,
                                                               JSONSchema.NAME)

            if problem_name is None:
                raise AquaError("No algorithm 'problem' section found on input.")

            for name in local_pluggables(PluggableType.INPUT):
                if problem_name in self.get_input_problems(name):
                    # set to the first input to solve the problem
                    input_name = name
                    break

        if input_name is None:
            # just remove from schema if none solves the problem
            if PluggableType.INPUT.value in self.json_schema.schema['properties']:
                del self.json_schema.schema['properties'][PluggableType.INPUT.value]
            return

        if default_name is None:
            default_name = input_name

        config = {}
        try:
            config = get_pluggable_configuration(PluggableType.INPUT, input_name)
        except Exception:  # pylint: disable=broad-except
            pass

        input_schema = config['input_schema'] if 'input_schema' in config else {}
        properties = input_schema['properties'] if 'properties' in input_schema else {}
        properties[JSONSchema.NAME] = {'type': 'string'}
        required = input_schema['required'] if 'required' in input_schema else []
        additional_properties = input_schema['additionalProperties'] \
            if 'additionalProperties' in input_schema else True
        if default_name is not None:
            properties[JSONSchema.NAME]['default'] = default_name
            required.append(JSONSchema.NAME)

        if PluggableType.INPUT.value not in self.json_schema.schema['properties']:
            self.json_schema.schema['properties'][PluggableType.INPUT.value] = {'type': 'object'}

        self.json_schema.schema['properties'][PluggableType.INPUT.value]['properties'] = properties
        self.json_schema.schema['properties'][PluggableType.INPUT.value]['required'] = required
        self.json_schema.schema['properties'][PluggableType.INPUT.value]['additionalProperties'] = \
            additional_properties
Esempio n. 10
0
    def parse(self):
        """Parse the data."""
        if self._sections is None:
            if self._filename is None:
                raise AquaError("Missing input file")

            with open(self._filename) as json_file:
                self._sections = json.load(json_file)

        self.json_schema.update_backend_schema(self)
        self.json_schema.update_pluggable_schemas(self)
        self._update_algorithm_input_schema()
        self._sections = self._order_sections(self._sections)
        self._original_sections = copy.deepcopy(self._sections)
Esempio n. 11
0
 def get_section(self, section_name):
     """Return a Section by name.
     Args:
         section_name (str): the name of the section, case insensitive
     Returns:
         Section: The section with this name
     Raises:
         AquaError: if the section does not exist.
     """
     section_name = JSONSchema.format_section_name(section_name).lower()
     try:
         return self._sections[section_name]
     except KeyError:
         raise AquaError('No section "{0}"'.format(section_name))
Esempio n. 12
0
    def load_qobj_from_cache(self, circuits, chunk, run_config=None):
        self.try_loading_cache_from_file()

        if self.try_reusing_qobjs and self.qobjs is not None and len(self.qobjs) <= chunk:
            self.mappings.insert(chunk, self.mappings[0])
            self.qobjs.insert(chunk, copy.deepcopy(self.qobjs[0]))

        for circ_num, input_circuit in enumerate(circuits):

            # If there are too few experiments in the cache, try reusing the first experiment.
            # Only do this for the first chunk. Subsequent chunks should rely on these copies
            # through the deepcopy above.
            if self.try_reusing_qobjs and chunk == 0 and circ_num > 0 and len(self.qobjs[chunk].experiments) <= \
                    circ_num:
                self.qobjs[0].experiments.insert(circ_num, copy.deepcopy(self.qobjs[0].experiments[0]))
                self.mappings[0].insert(circ_num, self.mappings[0][0])

            # Unroll circuit in case of composite gates
            raw_gates = []
            for gate in input_circuit.data:
                if isinstance(gate, CompositeGate): raw_gates += gate.instruction_list()
                else: raw_gates += [gate]
            self.qobjs[chunk].experiments[circ_num].header.name = input_circuit.name
            for gate_num, compiled_gate in enumerate(self.qobjs[chunk].experiments[circ_num].instructions):
                if not hasattr(compiled_gate, 'params') or len(compiled_gate.params) < 1: continue
                if compiled_gate.name == 'snapshot': continue
                cache_index = self.mappings[chunk][circ_num][gate_num]
                uncompiled_gate = raw_gates[cache_index]

                # Need the 'getattr' wrapper because measure has no 'params' field and breaks this.
                if not len(getattr(compiled_gate, 'params', [])) == len(getattr(uncompiled_gate, 'params', [])) or \
                    not compiled_gate.name == uncompiled_gate.name:
                    raise AquaError('Gate mismatch at gate {0} ({1}, {2} params) of circuit against gate {3} ({4}, '
                                    '{5} params) of cached qobj'.format(cache_index,
                                                                 uncompiled_gate.name,
                                                                 len(uncompiled_gate.params),
                                                                 gate_num,
                                                                 compiled_gate.name,
                                                                 len(compiled_gate.params)))
                compiled_gate.params = np.array(uncompiled_gate.params, dtype=float).tolist()
        exec_qobj = copy.copy(self.qobjs[chunk])
        if self.skip_qobj_deepcopy: exec_qobj.experiments = self.qobjs[chunk].experiments[0:len(circuits)]
        else: exec_qobj.experiments = copy.deepcopy(self.qobjs[chunk].experiments[0:len(circuits)])

        if run_config is None:
            run_config = RunConfig(shots=1024, max_credits=10, memory=False)
        exec_qobj.config = QobjConfig(**run_config.to_dict())
        exec_qobj.config.memory_slots = max(experiment.config.memory_slots for experiment in exec_qobj.experiments)
        exec_qobj.config.n_qubits = max(experiment.config.n_qubits for experiment in exec_qobj.experiments)
        return exec_qobj
def _safe_get_job_status(job, job_id):

    while True:
        try:
            job_status = job.status()
            break
        except JobError as ex:
            logger.warning("FAILURE: job id: %s, "
                           "status: 'FAIL_TO_GET_STATUS' "
                           "Terra job error: %s", job_id, ex)
            time.sleep(5)
        except Exception as ex:  # pylint: disable=broad-except
            raise AquaError("FAILURE: job id: {}, "
                            "status: 'FAIL_TO_GET_STATUS' "
                            "Unknown error: ({})".format(job_id, ex)) from ex
    return job_status
Esempio n. 14
0
def _safe_get_job_status(job, job_id):

    while True:
        try:
            job_status = job.status()
            break
        except JobError as e:
            logger.warning("FAILURE: job id: {}, "
                           "status: 'FAIL_TO_GET_STATUS' "
                           "Terra job error: {}".format(job_id, e))
            time.sleep(5)
        except Exception as e:
            raise AquaError("FAILURE: job id: {}, "
                            "status: 'FAIL_TO_GET_STATUS' "
                            "Unknown error: ({})".format(job_id, e)) from e
    return job_status
def _split_qobj_to_qobjs(qobj, chunk_size):
    qobjs = []
    num_chunks = int(np.ceil(len(qobj.experiments) / chunk_size))
    if num_chunks == 1:
        qobjs = [qobj]
    else:
        if isinstance(qobj, QasmQobj):
            qobj_template = QasmQobj(qobj_id=qobj.qobj_id,
                                     config=qobj.config, experiments=[], header=qobj.header)
            for i in range(num_chunks):
                temp_qobj = copy.deepcopy(qobj_template)
                temp_qobj.qobj_id = str(uuid.uuid4())
                temp_qobj.experiments = qobj.experiments[i * chunk_size:(i + 1) * chunk_size]
                qobjs.append(temp_qobj)
        else:
            raise AquaError("Only support QasmQobj now.")

    return qobjs
Esempio n. 16
0
    def __init__(self, input_value=None):
        """Create Parser object."""
        super().__init__(JSONSchema(os.path.join(os.path.dirname(__file__), 'input_schema.json')))
        if input is not None:
            if isinstance(input_value, dict):
                self._sections = input_value
            elif isinstance(input_value, str):
                self._filename = input_value
            else:
                raise AquaError("Invalid parser input type.")

        self._section_order = [JSONSchema.PROBLEM,
                               PluggableType.INPUT.value,
                               PluggableType.ALGORITHM.value]
        for pluggable_type in local_pluggables_types():
            if pluggable_type not in [PluggableType.INPUT, PluggableType.ALGORITHM]:
                self._section_order.append(pluggable_type.value)

        self._section_order.extend([JSONSchema.BACKEND, InputParser._UNKNOWN])
Esempio n. 17
0
def run_qobj(qobj,
             backend,
             qjob_config=None,
             backend_options=None,
             noise_config=None,
             skip_qobj_validation=False,
             job_callback=None):
    """
    An execution wrapper with Qiskit-Terra, with job auto recover capability.

    The auto-recovery feature is only applied for non-simulator backend.
    This wrapper will try to get the result no matter how long it takes.

    Args:
        qobj (QasmQobj): qobj to execute
        backend (BaseBackend): backend instance
        qjob_config (dict, optional): configuration for quantum job object
        backend_options (dict, optional): configuration for simulator
        noise_config (dict, optional): configuration for noise model
        skip_qobj_validation (bool, optional): Bypass Qobj validation to decrease submission time,
                                               only works for Aer and BasicAer providers
        job_callback (Callable, optional): callback used in querying info of the submitted job, and
                                           providing the following arguments:
                                            job_id, job_status, queue_position, job

    Returns:
        Result: Result object

    Raises:
        ValueError: invalid backend
        AquaError: Any error except for JobError raised by Qiskit Terra
    """
    qjob_config = qjob_config or {}
    backend_options = backend_options or {}
    noise_config = noise_config or {}

    if backend is None or not isinstance(backend, (Backend, BaseBackend)):
        raise ValueError(
            'Backend is missing or not an instance of BaseBackend')

    with_autorecover = not is_simulator_backend(backend)

    if MAX_CIRCUITS_PER_JOB is not None:
        max_circuits_per_job = int(MAX_CIRCUITS_PER_JOB)
    else:
        if is_local_backend(backend):
            max_circuits_per_job = sys.maxsize
        else:
            max_circuits_per_job = backend.configuration().max_experiments

    # split qobj if it exceeds the payload of the backend

    qobjs = _split_qobj_to_qobjs(qobj, max_circuits_per_job)
    jobs = []
    job_ids = []
    for qob in qobjs:
        job, job_id = _safe_submit_qobj(qob, backend, backend_options,
                                        noise_config, skip_qobj_validation)
        job_ids.append(job_id)
        jobs.append(job)

    results = []
    if with_autorecover:
        logger.info("Backend status: %s", backend.status())
        logger.info("There are %s jobs are submitted.", len(jobs))
        logger.info("All job ids:\n%s", job_ids)
        for idx, _ in enumerate(jobs):
            job = jobs[idx]
            job_id = job_ids[idx]
            while True:
                logger.info("Running %s-th qobj, job id: %s", idx, job_id)
                # try to get result if possible
                while True:
                    job_status = _safe_get_job_status(job, job_id)
                    queue_position = 0
                    if job_status in JOB_FINAL_STATES:
                        # do callback again after the job is in the final states
                        if job_callback is not None:
                            job_callback(job_id, job_status, queue_position,
                                         job)
                        break
                    if job_status == JobStatus.QUEUED:
                        queue_position = job.queue_position()
                        logger.info("Job id: %s is queued at position %s",
                                    job_id, queue_position)
                    else:
                        logger.info("Job id: %s, status: %s", job_id,
                                    job_status)
                    if job_callback is not None:
                        job_callback(job_id, job_status, queue_position, job)
                    time.sleep(qjob_config['wait'])

                # get result after the status is DONE
                if job_status == JobStatus.DONE:
                    while True:
                        result = job.result(**qjob_config)
                        if result.success:
                            results.append(result)
                            logger.info("COMPLETED the %s-th qobj, job id: %s",
                                        idx, job_id)
                            break

                        logger.warning("FAILURE: Job id: %s", job_id)
                        logger.warning(
                            "Job (%s) is completed anyway, retrieve result "
                            "from backend again.", job_id)
                        job = backend.retrieve_job(job_id)
                    break
                # for other cases, resubmit the qobj until the result is available.
                # since if there is no result returned, there is no way algorithm can do any process
                # get back the qobj first to avoid for job is consumed
                qobj = job.qobj()
                if job_status == JobStatus.CANCELLED:
                    logger.warning(
                        "FAILURE: Job id: %s is cancelled. Re-submit the Qobj.",
                        job_id)
                elif job_status == JobStatus.ERROR:
                    logger.warning(
                        "FAILURE: Job id: %s encounters the error. "
                        "Error is : %s. Re-submit the Qobj.", job_id,
                        job.error_message())
                else:
                    logging.warning(
                        "FAILURE: Job id: %s. Unknown status: %s. "
                        "Re-submit the Qobj.", job_id, job_status)

                job, job_id = _safe_submit_qobj(qobj, backend, backend_options,
                                                noise_config,
                                                skip_qobj_validation)
                jobs[idx] = job
                job_ids[idx] = job_id
    else:
        results = []
        for job in jobs:
            results.append(job.result(**qjob_config))

    result = _combine_result_objects(results) if results else None

    # If result was not successful then raise an exception with either the status msg or
    # extra information if this was an Aer partial result return
    if not result.success:
        msg = result.status
        if result.status == 'PARTIAL COMPLETED':
            # Aer can return partial results which Aqua algorithms cannot process and signals
            # using partial completed status where each returned result has a success and status.
            # We use the status from the first result that was not successful
            for res in result.results:
                if not res.success:
                    msg += ', ' + res.status
                    break
        raise AquaError('Circuit execution failed: {}'.format(msg))

    return result
Esempio n. 18
0
def build_algorithm_from_dict(params, algo_input=None, backend=None):
    """
        Construct algorithm as named in params, using params and algo_input as input data
        and returning a QuantumAlgorithm and QuantumInstance instance

        Args:
            params (dict): Dictionary of params for algo and dependent objects
            algo_input (AlgorithmInput): Main input data for algorithm. Optional, an algo may run entirely from params
            backend (BaseBackend): Backend object to be used in place of backend name

        Returns:
            Ready-to-run QuantumAlgorithm and QuantumInstance as specified in input parameters. Note that
            no QuantumInstance will be returned if none is specified - None will be returned instead.
        """
    _discover_on_demand()

    inputparser = InputParser(params)
    inputparser.parse()
    # before merging defaults attempts to find a provider for the backend in case no
    # provider was passed
    if backend is None and inputparser.get_section_property(
            JSONSchema.BACKEND, JSONSchema.PROVIDER) is None:
        backend_name = inputparser.get_section_property(
            JSONSchema.BACKEND, JSONSchema.NAME)
        if backend_name is not None:
            inputparser.set_section_property(
                JSONSchema.BACKEND, JSONSchema.PROVIDER,
                get_provider_from_backend(backend_name))

    inputparser.validate_merge_defaults()
    logger.debug('Algorithm Input: {}'.format(
        json.dumps(inputparser.get_sections(), sort_keys=True, indent=4)))

    algo_name = inputparser.get_section_property(PluggableType.ALGORITHM.value,
                                                 JSONSchema.NAME)
    if algo_name is None:
        raise AquaError('Missing algorithm name')

    if algo_name not in local_pluggables(PluggableType.ALGORITHM):
        raise AquaError(
            'Algorithm "{0}" missing in local algorithms'.format(algo_name))

    if algo_input is None:
        input_name = inputparser.get_section_property('input', JSONSchema.NAME)
        if input_name is not None:
            input_params = copy.deepcopy(
                inputparser.get_section_properties('input'))
            del input_params[JSONSchema.NAME]
            convert_json_to_dict(input_params)
            algo_input = get_pluggable_class(
                PluggableType.INPUT, input_name).from_params(input_params)

    algo_params = copy.deepcopy(inputparser.get_sections())
    algorithm = get_pluggable_class(PluggableType.ALGORITHM,
                                    algo_name).init_params(
                                        algo_params, algo_input)
    random_seed = inputparser.get_section_property(JSONSchema.PROBLEM,
                                                   'random_seed')
    algorithm.random_seed = random_seed
    quantum_instance = None
    # setup backend
    backend_provider = inputparser.get_section_property(
        JSONSchema.BACKEND, JSONSchema.PROVIDER)
    backend_name = inputparser.get_section_property(JSONSchema.BACKEND,
                                                    JSONSchema.NAME)
    if backend_provider is not None and backend_name is not None:  # quantum algorithm
        backend_cfg = {
            k: v
            for k, v in inputparser.get_section(JSONSchema.BACKEND).items()
            if k not in [JSONSchema.PROVIDER, JSONSchema.NAME]
        }
        # TODO, how to build the noise model from a dictionary?
        backend_cfg['seed_mapper'] = random_seed
        pass_manager = PassManager() if backend_cfg.pop(
            'skip_transpiler', False) else None
        if pass_manager is not None:
            backend_cfg['pass_manager'] = pass_manager

        if backend is None or not isinstance(backend, BaseBackend):
            backend = get_backend_from_provider(backend_provider, backend_name)
        backend_cfg['backend'] = backend

        # overwrite the basis_gates and coupling_map
        basis_gates = backend_cfg.pop('basis_gates', None)
        coupling_map = backend_cfg.pop('coupling_map', None)
        if backend.configuration().simulator:
            if basis_gates is not None:
                backend.configuration().basis_gates = basis_gates
            if coupling_map is not None:
                backend.configuration().coupling_map = coupling_map
        else:
            logger.warning(
                "Change basis_gates and coupling_map on a real device is disallowed."
            )

        shots = backend_cfg.pop('shots', 1024)
        seed = random_seed
        max_credits = backend_cfg.pop('max_credits', 10)
        memory = backend_cfg.pop('memory', False)
        run_config = RunConfig(shots=shots,
                               max_credits=max_credits,
                               memory=memory)
        if seed is not None:
            run_config.seed = seed
        backend_cfg['run_config'] = run_config

        backend_cfg['skip_qobj_validation'] = inputparser.get_section_property(
            JSONSchema.PROBLEM, 'skip_qobj_validation')
        use_caching = inputparser.get_section_property(JSONSchema.PROBLEM,
                                                       'circuit_caching')
        if use_caching:
            deepcopy_qobj = inputparser.get_section_property(
                JSONSchema.PROBLEM, 'skip_qobj_deepcopy')
            cache_file = inputparser.get_section_property(
                JSONSchema.PROBLEM, 'circuit_cache_file')
            backend_cfg['circuit_cache'] = CircuitCache(
                skip_qobj_deepcopy=deepcopy_qobj, cache_file=cache_file)

        quantum_instance = QuantumInstance(**backend_cfg)

    # Note that quantum_instance can be None if none is specified
    return algorithm, quantum_instance
Esempio n. 19
0
def compile_and_run_circuits(circuits,
                             backend,
                             backend_config=None,
                             compile_config=None,
                             run_config=None,
                             qjob_config=None,
                             backend_options=None,
                             noise_config=None,
                             show_circuit_summary=False,
                             has_shared_circuits=False,
                             circuit_cache=None,
                             skip_qobj_validation=False,
                             **kwargs):
    """
    An execution wrapper with Qiskit-Terra, with job auto recover capability.

    The autorecovery feature is only applied for non-simulator backend.
    This wraper will try to get the result no matter how long it costs.

    Args:
        circuits (QuantumCircuit or list[QuantumCircuit]): circuits to execute
        backend (BaseBackend): backend instance
        backend_config (dict, optional): configuration for backend
        compile_config (dict, optional): configuration for compilation
        run_config (RunConfig, optional): configuration for running a circuit
        qjob_config (dict, optional): configuration for quantum job object
        backend_options (dict, optional): configuration for simulator
        noise_config (dict, optional): configuration for noise model
        show_circuit_summary (bool, optional): showing the summary of submitted circuits.
        has_shared_circuits (bool, optional): use the 0-th circuits as initial state for other circuits.
        circuit_cache (CircuitCache, optional): A CircuitCache to use when calling compile_and_run_circuits
        skip_qobj_validation (bool, optional): Bypass Qobj validation to decrease submission time

    Returns:
        Result: Result object

    Raises:
        AquaError: Any error except for JobError raised by Qiskit Terra
    """
    backend_config = backend_config or {}
    compile_config = compile_config or {}
    run_config = run_config or {}
    qjob_config = qjob_config or {}
    backend_options = backend_options or {}
    noise_config = noise_config or {}

    if backend is None or not isinstance(backend, BaseBackend):
        raise ValueError(
            'Backend is missing or not an instance of BaseBackend')

    if not isinstance(circuits, list):
        circuits = [circuits]

    if is_simulator_backend(backend):
        circuits = _avoid_empty_circuits(circuits)

    if has_shared_circuits:
        return _reuse_shared_circuits(circuits, backend, backend_config,
                                      compile_config, run_config, qjob_config,
                                      backend_options)

    with_autorecover = False if is_simulator_backend(backend) else True

    if MAX_CIRCUITS_PER_JOB is not None:
        max_circuits_per_job = int(MAX_CIRCUITS_PER_JOB)
    else:
        if is_local_backend(backend):
            max_circuits_per_job = sys.maxsize
        else:
            max_circuits_per_job = backend.configuration().max_experiments

    if circuit_cache is not None and circuit_cache.try_reusing_qobjs:
        # Check if all circuits are the same length.
        # If not, don't try to use the same qobj.experiment for all of them.
        if len(set([len(circ.data) for circ in circuits])) > 1:
            circuit_cache.try_reusing_qobjs = False
        else:  # Try setting up the reusable qobj
            # Compile and cache first circuit if cache is empty. The load method will try to reuse it
            if circuit_cache.qobjs is None:
                qobj, _ = _compile_wrapper([circuits[0]], backend,
                                           backend_config, compile_config,
                                           run_config)
                if is_aer_provider(backend):
                    qobj = _maybe_add_aer_expectation_instruction(qobj, kwargs)
                circuit_cache.cache_circuit(qobj, [circuits[0]], 0)

    qobjs = []
    jobs = []
    job_ids = []
    transpiled_circuits = []
    chunks = int(np.ceil(len(circuits) / max_circuits_per_job))
    for i in range(chunks):
        sub_circuits = circuits[i * max_circuits_per_job:(i + 1) *
                                max_circuits_per_job]
        if circuit_cache is not None and circuit_cache.misses < circuit_cache.allowed_misses:
            try:
                if circuit_cache.cache_transpiled_circuits:
                    transpiled_sub_circuits = compiler.transpile(
                        sub_circuits, backend, **backend_config,
                        **compile_config)
                    qobj = circuit_cache.load_qobj_from_cache(
                        transpiled_sub_circuits, i, run_config=run_config)
                else:
                    qobj = circuit_cache.load_qobj_from_cache(
                        sub_circuits, i, run_config=run_config)
                if is_aer_provider(backend):
                    qobj = _maybe_add_aer_expectation_instruction(qobj, kwargs)
            # cache miss, fail gracefully
            except (TypeError, IndexError, FileNotFoundError, EOFError,
                    AquaError, AttributeError) as e:
                circuit_cache.try_reusing_qobjs = False  # Reusing Qobj didn't work
                if len(circuit_cache.qobjs) > 0:
                    logger.info(
                        'Circuit cache miss, recompiling. Cache miss reason: '
                        + repr(e))
                    circuit_cache.misses += 1
                else:
                    logger.info(
                        'Circuit cache is empty, compiling from scratch.')
                circuit_cache.clear_cache()
                qobj, transpiled_sub_circuits = _compile_wrapper(
                    sub_circuits, backend, backend_config, compile_config,
                    run_config)
                transpiled_circuits.extend(transpiled_sub_circuits)
                if is_aer_provider(backend):
                    qobj = _maybe_add_aer_expectation_instruction(qobj, kwargs)
                try:
                    circuit_cache.cache_circuit(qobj, sub_circuits, i)
                except (TypeError, IndexError, AquaError, AttributeError,
                        KeyError) as e:
                    try:
                        circuit_cache.cache_transpiled_circuits = True
                        circuit_cache.cache_circuit(qobj,
                                                    transpiled_sub_circuits, i)
                    except (TypeError, IndexError, AquaError, AttributeError,
                            KeyError) as e:
                        logger.info(
                            'Circuit could not be cached for reason: ' +
                            repr(e))
                        logger.info(
                            'Transpilation may be too aggressive. Try skipping transpiler.'
                        )

        else:
            qobj, transpiled_sub_circuits = _compile_wrapper(
                sub_circuits, backend, backend_config, compile_config,
                run_config)
            transpiled_circuits.extend(transpiled_sub_circuits)
            if is_aer_provider(backend):
                qobj = _maybe_add_aer_expectation_instruction(qobj, kwargs)

        # assure get job ids
        while True:
            job = run_on_backend(backend,
                                 qobj,
                                 backend_options=backend_options,
                                 noise_config=noise_config,
                                 skip_qobj_validation=skip_qobj_validation)
            try:
                job_id = job.job_id()
                break
            except JobError as e:
                logger.warning(
                    "FAILURE: the {}-th chunk of circuits, can not get job id, "
                    "Resubmit the qobj to get job id. "
                    "Terra job error: {} ".format(i, e))
            except Exception as e:
                logger.warning(
                    "FAILURE: the {}-th chunk of circuits, can not get job id, "
                    "Resubmit the qobj to get job id. "
                    "Error: {} ".format(i, e))
        job_ids.append(job_id)
        jobs.append(job)
        qobjs.append(qobj)

    if logger.isEnabledFor(logging.DEBUG) and show_circuit_summary:
        logger.debug("==== Before transpiler ====")
        logger.debug(summarize_circuits(circuits))
        logger.debug("====  After transpiler ====")
        logger.debug(summarize_circuits(transpiled_circuits))

    results = []
    if with_autorecover:
        logger.info("Backend status: {}".format(backend.status()))
        logger.info(
            "There are {} circuits and they are chunked into {} chunks, "
            "each with {} circutis (max.).".format(len(circuits), chunks,
                                                   max_circuits_per_job))
        logger.info("All job ids:\n{}".format(job_ids))
        for idx in range(len(jobs)):
            while True:
                job = jobs[idx]
                job_id = job_ids[idx]
                logger.info("Running {}-th chunk circuits, job id: {}".format(
                    idx, job_id))
                # try to get result if possible
                try:
                    result = job.result(**qjob_config)
                    if result.success:
                        results.append(result)
                        logger.info("COMPLETED the {}-th chunk of circuits, "
                                    "job id: {}".format(idx, job_id))
                        break
                    else:
                        logger.warning("FAILURE: the {}-th chunk of circuits, "
                                       "job id: {}".format(idx, job_id))
                except JobError as e:
                    # if terra raise any error, which means something wrong, re-run it
                    logger.warning(
                        "FAILURE: the {}-th chunk of circuits, job id: {} "
                        "Terra job error: {} ".format(idx, job_id, e))
                except Exception as e:
                    raise AquaError(
                        "FAILURE: the {}-th chunk of circuits, job id: {} "
                        "Unknown error: {} ".format(idx, job_id, e)) from e

                # something wrong here, querying the status to check how to handle it.
                # keep qeurying it until getting the status.
                while True:
                    try:
                        job_status = job.status()
                        break
                    except JobError as e:
                        logger.warning("FAILURE: job id: {}, "
                                       "status: 'FAIL_TO_GET_STATUS' "
                                       "Terra job error: {}".format(job_id, e))
                        time.sleep(5)
                    except Exception as e:
                        raise AquaError("FAILURE: job id: {}, "
                                        "status: 'FAIL_TO_GET_STATUS' "
                                        "Unknown error: ({})".format(
                                            job_id, e)) from e

                logger.info("Job status: {}".format(job_status))

                # handle the failure job based on job status
                if job_status == JobStatus.DONE:
                    logger.info(
                        "Job ({}) is completed anyway, retrieve result "
                        "from backend.".format(job_id))
                    job = backend.retrieve_job(job_id)
                elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED:
                    logger.info("Job ({}) is {}, but encounter an exception, "
                                "recover it from backend.".format(
                                    job_id, job_status))
                    job = backend.retrieve_job(job_id)
                else:
                    logger.info(
                        "Fail to run Job ({}), resubmit it.".format(job_id))
                    qobj = qobjs[idx]
                    #  assure job get its id
                    while True:
                        job = run_on_backend(
                            backend,
                            qobj,
                            backend_options=backend_options,
                            noise_config=noise_config,
                            skip_qobj_validation=skip_qobj_validation)
                        try:
                            job_id = job.job_id()
                            break
                        except JobError as e:
                            logger.warning(
                                "FAILURE: the {}-th chunk of circuits, "
                                "can not get job id. Resubmit the qobj to get job id. "
                                "Terra job error: {} ".format(idx, e))
                        except Exception as e:
                            logger.warning(
                                "FAILURE: the {}-th chunk of circuits, "
                                "can not get job id, Resubmit the qobj to get job id. "
                                "Unknown error: {} ".format(idx, e))
                    jobs[idx] = job
                    job_ids[idx] = job_id
    else:
        results = []
        for job in jobs:
            results.append(job.result(**qjob_config))

    result = _combine_result_objects(results) if len(results) != 0 else None

    return result
Esempio n. 20
0
    def set_section_property(self, section_name, property_name, value):
        """
        Args:
            section_name (str): the name of the section, case insensitive
            property_name (str): the name of the property
            value (obj): the value of the property
        """
        section_name = JSONSchema.format_section_name(section_name).lower()
        property_name = JSONSchema.format_property_name(property_name)
        value = self._json_schema.check_property_value(section_name,
                                                       property_name, value)
        types = self.get_property_types(section_name, property_name)
        sections_temp = copy.deepcopy(self._sections)
        BaseParser._set_section_property(sections_temp, section_name,
                                         property_name, value, types)
        msg = self._json_schema.validate_property(sections_temp, section_name,
                                                  property_name)
        if msg is not None:
            raise AquaError("{}.{}: Value '{}': '{}'".format(
                section_name, property_name, value, msg))

        value_changed = False
        if section_name not in self._sections:
            value_changed = True
        elif property_name not in self._sections[section_name]:
            value_changed = True
        else:
            old_value = self.get_section_property(section_name, property_name)
            value_changed = (old_value != value)

        if not value_changed:
            # nothing changed
            return

        # check if this provider is loadable and valid
        if JSONSchema.BACKEND == section_name and property_name == JSONSchema.PROVIDER:
            get_backends_from_provider(value)

        BaseParser._set_section_property(self._sections, section_name,
                                         property_name, value, types)
        if property_name == JSONSchema.NAME:
            if JSONSchema.PROBLEM == section_name:
                self._update_algorithm_problem()
            elif JSONSchema.BACKEND == section_name:
                self._json_schema.update_backend_schema()
            elif BaseParser.is_pluggable_section(section_name):
                self._json_schema.update_pluggable_schemas(self)
                # remove properties that are not valid for this section
                default_properties = self.get_section_default_properties(
                    section_name)
                if isinstance(default_properties, dict):
                    properties = self.get_section_properties(section_name)
                    for p_name in list(properties.keys()):
                        if p_name != JSONSchema.NAME and p_name not in default_properties:
                            self.delete_section_property(section_name, p_name)

                self._update_dependency_sections(section_name)
            else:
                self.post_set_section_property(section_name, property_name)

        self._sections = self._order_sections(self._sections)
Esempio n. 21
0
    def _set_section_property_without_checking_defaults(self, section_name, property_name, value):
        """
        Args:
            section_name (str): the name of the section, case insensitive
            property_name (str): the name of the property
            value (obj): the value of the property
        Returns:
            Bool: True if value changed
        """
        section_name = JSONSchema.format_section_name(section_name).lower()
        property_name = JSONSchema.format_property_name(property_name)
        value = self._json_schema.check_property_value(section_name, property_name, value)
        types = self.get_property_types(section_name, property_name)
        sections_temp = copy.deepcopy(self._sections)
        BaseParser._set_section_property(sections_temp, section_name, property_name, value, types)
        msg = self._json_schema.validate_property(sections_temp, section_name, property_name)
        if msg is not None:
            raise AquaError("{}.{}: Value '{}': '{}'".format(section_name, property_name, value, msg))

        value_changed = False
        old_value = None
        if section_name not in self._sections:
            value_changed = True
        elif property_name not in self._sections[section_name]:
            value_changed = True
        else:
            old_value = self.get_section_property(section_name, property_name)
            value_changed = (old_value != value)

        if not value_changed:
            # nothing changed
            return False

        # check if the provider/backend is loadable and valid
        backend_names = []
        if JSONSchema.BACKEND == section_name and property_name in [JSONSchema.PROVIDER, JSONSchema.NAME]:
            provider_name = value if property_name == JSONSchema.PROVIDER else self.get_section_property(section_name, JSONSchema.PROVIDER)
            backend_names = get_backends_from_provider(provider_name)
            if property_name == JSONSchema.NAME and value not in backend_names:
                raise AquaError("Backend '{}' not valid for provider: '{}' backends: '{}'".format(value, provider_name, backend_names))

        # update value internally
        BaseParser._set_section_property(self._sections, section_name, property_name, value, types)

        if JSONSchema.BACKEND == section_name and property_name in [JSONSchema.PROVIDER, JSONSchema.NAME]:
            if property_name == JSONSchema.PROVIDER:
                backend_name = self.get_section_property(section_name, JSONSchema.NAME)
                if backend_name not in backend_names:
                    # use first backend available in provider
                    backend_name = backend_names[0] if len(backend_names) > 0 else ''
                    BaseParser._set_section_property(self._sections, section_name, JSONSchema.NAME, backend_name, ['string'])

            self._json_schema.update_backend_schema(self)
            return True

        if property_name == JSONSchema.NAME:
            if BaseParser.is_pluggable_section(section_name):
                self._json_schema.update_pluggable_schemas(self)
                self._update_dependency_sections(section_name)
                # remove any previous pluggable sections not in new dependency list
                old_dependencies = self._get_dependency_sections(section_name, old_value) if old_value is not None else set()
                new_dependencies = self._get_dependency_sections(section_name, value) if value is not None else set()
                for pluggable_name in old_dependencies.difference(new_dependencies):
                    if pluggable_name in self._sections:
                        del self._sections[pluggable_name]

                # reorder sections
                self._sections = self._order_sections(self._sections)
                return True

            if JSONSchema.PROBLEM == section_name:
                self._update_algorithm_problem()

            self.post_set_section_property(section_name, property_name)

        return True
Esempio n. 22
0
def run_qobj(qobj,
             backend,
             qjob_config=None,
             backend_options=None,
             noise_config=None,
             skip_qobj_validation=False):
    """
    An execution wrapper with Qiskit-Terra, with job auto recover capability.

    The auto-recovery feature is only applied for non-simulator backend.
    This wraper will try to get the result no matter how long it costs.

    Args:
        qobj (QasmQobj): qobj to execute
        backend (BaseBackend): backend instance
        qjob_config (dict, optional): configuration for quantum job object
        backend_options (dict, optional): configuration for simulator
        noise_config (dict, optional): configuration for noise model
        skip_qobj_validation (bool, optional): Bypass Qobj validation to decrease submission time

    Returns:
        Result: Result object

    Raises:
        AquaError: Any error except for JobError raised by Qiskit Terra
    """
    qjob_config = qjob_config or {}
    backend_options = backend_options or {}
    noise_config = noise_config or {}

    if backend is None or not isinstance(backend, BaseBackend):
        raise ValueError(
            'Backend is missing or not an instance of BaseBackend')

    with_autorecover = False if is_simulator_backend(backend) else True

    if MAX_CIRCUITS_PER_JOB is not None:
        max_circuits_per_job = int(MAX_CIRCUITS_PER_JOB)
    else:
        if is_local_backend(backend):
            max_circuits_per_job = sys.maxsize
        else:
            max_circuits_per_job = backend.configuration().max_experiments

    # split qobj if it exceeds the payload of the backend

    qobjs = _split_qobj_to_qobjs(qobj, max_circuits_per_job)
    jobs = []
    job_ids = []
    for qobj in qobjs:
        job, job_id = _safe_submit_qobj(qobj, backend, backend_options,
                                        noise_config, skip_qobj_validation)
        job_ids.append(job_id)
        jobs.append(job)

    results = []
    if with_autorecover:
        logger.info("Backend status: {}".format(backend.status()))
        logger.info("There are {} jobs are submitted.".format(len(jobs)))
        logger.info("All job ids:\n{}".format(job_ids))
        for idx in range(len(jobs)):
            job = jobs[idx]
            job_id = job_ids[idx]
            while True:
                logger.info("Running {}-th qobj, job id: {}".format(
                    idx, job_id))
                # try to get result if possible
                try:
                    result = job.result(**qjob_config)
                    if result.success:
                        results.append(result)
                        logger.info("COMPLETED the {}-th qobj, "
                                    "job id: {}".format(idx, job_id))
                        break
                    else:
                        logger.warning("FAILURE: the {}-th qobj, "
                                       "job id: {}".format(idx, job_id))
                except JobError as e:
                    # if terra raise any error, which means something wrong, re-run it
                    logger.warning("FAILURE: the {}-th qobj, job id: {} "
                                   "Terra job error: {} ".format(
                                       idx, job_id, e))
                except Exception as e:
                    raise AquaError("FAILURE: the {}-th qobj, job id: {} "
                                    "Unknown error: {} ".format(
                                        idx, job_id, e)) from e

                # something wrong here if reach here, querying the status to check how to handle it.
                # keep qeurying it until getting the status.
                while True:
                    try:
                        job_status = job.status()
                        break
                    except JobError as e:
                        logger.warning("FAILURE: job id: {}, "
                                       "status: 'FAIL_TO_GET_STATUS' "
                                       "Terra job error: {}".format(job_id, e))
                        time.sleep(5)
                    except Exception as e:
                        raise AquaError("FAILURE: job id: {}, "
                                        "status: 'FAIL_TO_GET_STATUS' "
                                        "Unknown error: ({})".format(
                                            job_id, e)) from e

                logger.info("Job status: {}".format(job_status))

                # handle the failure job based on job status
                if job_status == JobStatus.DONE:
                    logger.info(
                        "Job ({}) is completed anyway, retrieve result "
                        "from backend.".format(job_id))
                    job = backend.retrieve_job(job_id)
                elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED:
                    logger.info("Job ({}) is {}, but encounter an exception, "
                                "recover it from backend.".format(
                                    job_id, job_status))
                    job = backend.retrieve_job(job_id)
                else:
                    logger.info(
                        "Fail to run Job ({}), resubmit it.".format(job_id))
                    qobj = qobjs[idx]
                    #  assure job get its id
                    job, job_id = _safe_submit_qobj(qobj, backend,
                                                    backend_options,
                                                    noise_config,
                                                    skip_qobj_validation)
                    jobs[idx] = job
                    job_ids[idx] = job_id
    else:
        results = []
        for job in jobs:
            results.append(job.result(**qjob_config))

    result = _combine_result_objects(results) if len(results) != 0 else None

    return result
Esempio n. 23
0
    def cache_circuit(self, qobj, circuits, chunk):
        """
        A method for caching compiled qobjs by storing the compiled qobj
        and constructing a mapping array from the uncompiled operations in the circuit
        to the instructions in the qobj. Note that the "qobjs" list in the cache dict is a
        list of the cached chunks, each element of which contains a single qobj with as
        many experiments as is allowed by the execution backend. E.g. if the backend allows
        300 experiments per job and the user wants to run 500 circuits,
        len(circuit_cache['qobjs']) == 2,
        len(circuit_cache['qobjs'][0].experiments) == 300, and
        len(circuit_cache['qobjs'][1].experiments) == 200.

        This feature is only applied if 'circuit_caching' is True in the 'problem' Aqua
        dictionary section.

        Args:
            qobj (Qobj): A compiled qobj to be saved
            circuits (list): The original uncompiled QuantumCircuits
            chunk (int): If a larger list of circuits was broken into chunks by run_algorithm for separate runs,
            which chunk number `circuits` represents
        """

        self.qobjs.insert(chunk, copy.deepcopy(qobj))

        self.mappings.insert(chunk, [{} for i in range(len(circuits))])
        for circ_num, input_circuit in enumerate(circuits):

            qreg_sizes = [
                reg.size for reg in input_circuit.qregs
                if isinstance(reg, QuantumRegister)
            ]
            qreg_indeces = {
                reg.name: sum(qreg_sizes[0:i])
                for i, reg in enumerate(input_circuit.qregs)
            }
            op_graph = {}

            # Unroll circuit in case of composite gates
            raw_gates = []
            for gate in input_circuit.data:
                raw_gates += [gate]

            for i, (uncompiled_gate, regs, _) in enumerate(raw_gates):
                if not hasattr(uncompiled_gate,
                               'params') or len(uncompiled_gate.params) < 1:
                    continue
                if uncompiled_gate.name == 'snapshot': continue
                qubits = [
                    bit.index + qreg_indeces[bit.register.name] for bit in regs
                    if isinstance(bit, Qubit)
                ]
                gate_type = uncompiled_gate.name
                type_and_qubits = gate_type + qubits.__str__()
                op_graph[type_and_qubits] = \
                    op_graph.get(type_and_qubits, []) + [i]
            mapping = {}
            for compiled_gate_index, compiled_gate in enumerate(
                    qobj.experiments[circ_num].instructions):
                if not hasattr(compiled_gate,
                               'params') or len(compiled_gate.params) < 1:
                    continue
                if compiled_gate.name == 'snapshot': continue
                type_and_qubits = compiled_gate.name + compiled_gate.qubits.__str__(
                )
                if len(op_graph[type_and_qubits]) > 0:
                    uncompiled_gate_index = op_graph[type_and_qubits].pop(0)
                    (uncompiled_gate, regs,
                     _) = raw_gates[uncompiled_gate_index]
                    qubits = [
                        bit.index + qreg_indeces[bit.register.name]
                        for bit in regs if isinstance(bit, Qubit)
                    ]
                    if (compiled_gate.name == uncompiled_gate.name) and (
                            compiled_gate.qubits.__str__()
                            == qubits.__str__()):
                        mapping[compiled_gate_index] = uncompiled_gate_index
                else:
                    raise AquaError(
                        "Circuit shape does not match qobj, found extra {} instruction in qobj"
                        .format(type_and_qubits))
            self.mappings[chunk][circ_num] = mapping
            for type_and_qubits, ops in op_graph.items():
                if len(ops) > 0:
                    raise AquaError(
                        "Circuit shape does not match qobj, found extra {} in circuit"
                        .format(type_and_qubits))
        if self.cache_file is not None and len(self.cache_file) > 0:
            with open(self.cache_file, 'wb') as cache_handler:
                qobj_dicts = [qob.to_dict() for qob in self.qobjs]
                pickle.dump(
                    {
                        'qobjs': qobj_dicts,
                        'mappings': self.mappings,
                        'transpile': self.cache_transpiled_circuits
                    },
                    cache_handler,
                    protocol=pickle.HIGHEST_PROTOCOL)
                logger.debug("Circuit cache saved to file: {}".format(
                    self.cache_file))
Esempio n. 24
0
def compile_and_run_circuits(circuits,
                             backend,
                             backend_config,
                             compile_config,
                             run_config,
                             qjob_config=None,
                             backend_options=None,
                             noise_config=None,
                             show_circuit_summary=False,
                             has_shared_circuits=False,
                             circuit_cache=None,
                             skip_qobj_validation=False,
                             **kwargs):
    """
    An execution wrapper with Qiskit-Terra, with job auto recover capability.

    The autorecovery feature is only applied for non-simulator backend.
    This wraper will try to get the result no matter how long it costs.

    Args:
        circuits (QuantumCircuit or list[QuantumCircuit]): circuits to execute
        backend (BaseBackend): backend instance
        backend_config (dict): configuration for backend
        compile_config (dict): configuration for compilation
        run_config (RunConfig): configuration for running a circuit
        qjob_config (dict): configuration for quantum job object
        backend_options (dict): configuration for simulator
        noise_config (dict): configuration for noise model
        show_circuit_summary (bool): showing the summary of submitted circuits.
        has_shared_circuits (bool): use the 0-th circuits as initial state for other circuits.

    Returns:
        Result: Result object

    Raises:
        AquaError: Any error except for JobError raised by Qiskit Terra
    """
    qjob_config = qjob_config or {}
    backend_options = backend_options or {}
    noise_config = noise_config or {}

    if backend is None or not isinstance(backend, BaseBackend):
        raise ValueError(
            'Backend is missing or not an instance of BaseBackend')

    if not isinstance(circuits, list):
        circuits = [circuits]

    if 'statevector' in backend.name():
        circuits = _avoid_empty_circuits(circuits)

    if has_shared_circuits:
        return _reuse_shared_circuits(circuits, backend, backend_config,
                                      compile_config, run_config, qjob_config,
                                      backend_options)

    with_autorecover = False if backend.configuration().simulator else True

    if MAX_CIRCUITS_PER_JOB is not None:
        max_circuits_per_job = int(MAX_CIRCUITS_PER_JOB)
    else:
        if backend.configuration().local:
            max_circuits_per_job = sys.maxsize
        else:
            max_circuits_per_job = backend.configuration().max_experiments

    if circuit_cache is not None and circuit_cache.try_reusing_qobjs:
        # Check if all circuits are the same length. If not, don't try to use the same qobj.experiment for all of them.
        if len(set([len(circ.data) for circ in circuits])) > 1:
            circuit_cache.try_reusing_qobjs = False
        else:  # Try setting up the reusable qobj
            # Compile and cache first circuit if cache is empty. The load method will try to reuse it
            if circuit_cache.try_reusing_qobjs and circuit_cache.qobjs is None:
                qobj = q_compile([circuits[0]], backend, **execute_config)
                circuit_cache.cache_circuit(qobj, [circuits[0]], 0)

    qobjs = []
    jobs = []
    job_ids = []
    chunks = int(np.ceil(len(circuits) / max_circuits_per_job))
    for i in range(chunks):
        sub_circuits = circuits[i * max_circuits_per_job:(i + 1) *
                                max_circuits_per_job]
        if circuit_cache is not None and circuit_cache.misses < circuit_cache.allowed_misses:
            try:
                qobj = circuit_cache.load_qobj_from_cache(
                    sub_circuits, i, run_config=run_config)
            # cache miss, fail gracefully
            except (TypeError, IndexError, FileNotFoundError, EOFError,
                    AquaError, AttributeError) as e:
                circuit_cache.try_reusing_qobjs = False  # Reusing Qobj didn't work
                circuit_cache.clear_cache()
                logger.debug(
                    'Circuit cache miss, recompiling. Cache miss reason: ' +
                    repr(e))
                qobj = q_compile(sub_circuits, backend, **backend_config,
                                 **compile_config, **run_config.to_dict())
                circuit_cache.cache_circuit(qobj, sub_circuits, i)
                circuit_cache.misses += 1
        else:
            qobj = q_compile(sub_circuits, backend, **backend_config,
                             **compile_config, **run_config.to_dict())

        if 'expectation' in kwargs:
            from qiskit.providers.aer.utils.qobj_utils import snapshot_instr, append_instr
            # add others, how to derive the correct used number of qubits?
            # the compiled qobj could be wrong if coupling map is used.
            # if mulitple params are provided, we assume that each circuit is corresponding one param
            # otherwise, params are used for all circuits.
            params = kwargs['expectation']['params']
            num_qubits = kwargs['expectation']['num_qubits']
            if len(params) == 1:
                new_ins = snapshot_instr('expectation_value_pauli',
                                         'test',
                                         range(num_qubits),
                                         params=params[0])
                for ii in range(len(sub_circuits)):
                    qobj = append_instr(qobj, ii, new_ins)
            else:
                for ii in range(len(sub_circuits)):
                    new_ins = snapshot_instr('expectation_value_pauli',
                                             'test',
                                             range(num_qubits),
                                             params=params[ii])
                    qobj = append_instr(qobj, ii, new_ins)
        # assure get job ids
        while True:
            job = run_on_backend(backend,
                                 qobj,
                                 backend_options=backend_options,
                                 noise_config=noise_config,
                                 skip_qobj_validation=skip_qobj_validation)
            try:
                job_id = job.job_id()
                break
            except JobError as e:
                logger.warning(
                    "FAILURE: the {}-th chunk of circuits, can not get job id, "
                    "Resubmit the qobj to get job id. "
                    "Terra job error: {} ".format(i, e))
            except Exception as e:
                logger.warning(
                    "FAILURE: the {}-th chunk of circuits, can not get job id, "
                    "Resubmit the qobj to get job id. "
                    "Error: {} ".format(i, e))
        job_ids.append(job_id)
        jobs.append(job)
        qobjs.append(qobj)

    if logger.isEnabledFor(logging.DEBUG) and show_circuit_summary:
        logger.debug(summarize_circuits(circuits))

    results = []
    if with_autorecover:
        logger.info("Backend status: {}".format(backend.status()))
        logger.info(
            "There are {} circuits and they are chunked into {} chunks, "
            "each with {} circutis (max.).".format(len(circuits), chunks,
                                                   max_circuits_per_job))
        logger.info("All job ids:\n{}".format(job_ids))
        for idx in range(len(jobs)):
            while True:
                job = jobs[idx]
                job_id = job_ids[idx]
                logger.info("Running {}-th chunk circuits, job id: {}".format(
                    idx, job_id))
                # try to get result if possible
                try:
                    result = job.result(**qjob_config)
                    if result.success:
                        results.append(result)
                        logger.info("COMPLETED the {}-th chunk of circuits, "
                                    "job id: {}".format(idx, job_id))
                        break
                    else:
                        logger.warning("FAILURE: the {}-th chunk of circuits, "
                                       "job id: {}".format(idx, job_id))
                except JobError as e:
                    # if terra raise any error, which means something wrong, re-run it
                    logger.warning(
                        "FAILURE: the {}-th chunk of circuits, job id: {} "
                        "Terra job error: {} ".format(idx, job_id, e))
                except Exception as e:
                    raise AquaError(
                        "FAILURE: the {}-th chunk of circuits, job id: {} "
                        "Unknown error: {} ".format(idx, job_id, e)) from e

                # something wrong here, querying the status to check how to handle it.
                # keep qeurying it until getting the status.
                while True:
                    try:
                        job_status = job.status()
                        break
                    except JobError as e:
                        logger.warning("FAILURE: job id: {}, "
                                       "status: 'FAIL_TO_GET_STATUS' "
                                       "Terra job error: {}".format(job_id, e))
                        time.sleep(5)
                    except Exception as e:
                        raise AquaError("FAILURE: job id: {}, "
                                        "status: 'FAIL_TO_GET_STATUS' "
                                        "Unknown error: ({})".format(
                                            job_id, e)) from e

                logger.info("Job status: {}".format(job_status))

                # handle the failure job based on job status
                if job_status == JobStatus.DONE:
                    logger.info(
                        "Job ({}) is completed anyway, retrieve result "
                        "from backend.".format(job_id))
                    job = backend.retrieve_job(job_id)
                elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED:
                    logger.info("Job ({}) is {}, but encounter an exception, "
                                "recover it from backend.".format(
                                    job_id, job_status))
                    job = backend.retrieve_job(job_id)
                else:
                    logger.info(
                        "Fail to run Job ({}), resubmit it.".format(job_id))
                    qobj = qobjs[idx]
                    #  assure job get its id
                    while True:
                        job = run_on_backend(
                            backend,
                            qobj,
                            backend_options=backend_options,
                            noise_config=noise_config,
                            skip_qobj_validation=skip_qobj_validation)
                        try:
                            job_id = job.job_id()
                            break
                        except JobError as e:
                            logger.warning(
                                "FAILURE: the {}-th chunk of circuits, "
                                "can not get job id. Resubmit the qobj to get job id. "
                                "Terra job error: {} ".format(idx, e))
                        except Exception as e:
                            logger.warning(
                                "FAILURE: the {}-th chunk of circuits, "
                                "can not get job id, Resubmit the qobj to get job id. "
                                "Unknown error: {} ".format(idx, e))
                    jobs[idx] = job
                    job_ids[idx] = job_id
    else:
        results = []
        for job in jobs:
            results.append(job.result(**qjob_config))

    result = _combine_result_objects(results) if len(results) != 0 else None

    return result