Exemplo n.º 1
0
    def save_to_file(self, file_name):
        if file_name is None:
            raise AquaError('Missing file path')

        file_name = file_name.strip()
        if len(file_name) == 0:
            raise AquaError('Missing file path')

        with open(file_name, 'w') as f:
            print(json.dumps(self.get_sections(), sort_keys=True, indent=4),
                  file=f)
Exemplo n.º 2
0
    def _update_input_problem(self):
        problem_name = self.get_section_property(JSONSchema.PROBLEM,
                                                 JSONSchema.NAME)
        if problem_name is None:
            problem_name = self.get_property_default_value(
                JSONSchema.PROBLEM, JSONSchema.NAME)

        if problem_name is None:
            raise AquaError("No algorithm 'problem' section found on input.")

        input_name = self.get_section_property(PluggableType.INPUT.value,
                                               JSONSchema.NAME)
        if input_name is not None and problem_name in InputParser.get_input_problems(
                input_name):
            return

        for input_name in local_pluggables(PluggableType.INPUT):
            if problem_name in self.get_input_problems(input_name):
                # set to the first input to solve the problem
                self.set_section_property(PluggableType.INPUT.value,
                                          JSONSchema.NAME, input_name)
                return

        # no input solve this problem, remove section
        self.delete_section(PluggableType.INPUT.value)
Exemplo n.º 3
0
    def __init__(self, input=None):
        """Create InputParser object."""
        self._original_sections = None
        self._filename = None
        self._sections = None
        if input is not None:
            if isinstance(input, dict):
                self._sections = input
            elif isinstance(input, str):
                self._filename = input
            else:
                raise AquaError("Invalid parser input type.")

        self._section_order = [
            JSONSchema.PROBLEM, PluggableType.INPUT.value,
            PluggableType.ALGORITHM.value
        ]
        for pluggable_type in local_pluggables_types():
            if pluggable_type not in [
                    PluggableType.INPUT, PluggableType.ALGORITHM
            ]:
                self._section_order.append(pluggable_type.value)

        self._section_order.extend([JSONSchema.BACKEND, InputParser._UNKNOWN])

        self._json_schema = JSONSchema(
            os.path.join(os.path.dirname(__file__), 'input_schema.json'))
        self._json_schema.populate_problem_names()
        self._json_schema.commit_changes()
Exemplo n.º 4
0
    def set_section_property(self, section_name, property_name, value):
        section_name = JSONSchema.format_section_name(section_name)
        property_name = JSONSchema.format_property_name(property_name)
        value = self._json_schema.check_property_value(section_name,
                                                       property_name, value)
        types = self.get_property_types(section_name, property_name)

        sections_temp = copy.deepcopy(self._sections)
        InputParser._set_section_property(sections_temp, section_name,
                                          property_name, value, types)
        msg = self._json_schema.validate_property(sections_temp, section_name,
                                                  property_name)
        if msg is not None:
            raise AquaError("{}.{}: Value '{}': '{}'".format(
                section_name, property_name, value, msg))

        # check if this provider is loadable and valid
        if JSONSchema.BACKEND == section_name and property_name == JSONSchema.PROVIDER:
            get_backends_from_provider(value)

        InputParser._set_section_property(self._sections, section_name,
                                          property_name, value, types)
        if property_name == JSONSchema.NAME:
            if PluggableType.INPUT.value == section_name:
                self._update_algorithm_input_schema()
                # remove properties that are not valid for this section
                default_properties = self.get_section_default_properties(
                    section_name)
                if isinstance(default_properties, dict):
                    properties = self.get_section_properties(section_name)
                    for property_name in list(properties.keys()):
                        if property_name != JSONSchema.NAME and property_name not in default_properties:
                            self.delete_section_property(
                                section_name, property_name)
            elif JSONSchema.PROBLEM == section_name:
                self._update_algorithm_problem()
                self._update_input_problem()
            elif JSONSchema.BACKEND == section_name:
                self._json_schema.update_backend_schema()
            elif InputParser.is_pluggable_section(section_name):
                self._json_schema.update_pluggable_input_schemas(self)
                # remove properties that are not valid for this section
                default_properties = self.get_section_default_properties(
                    section_name)
                if isinstance(default_properties, dict):
                    properties = self.get_section_properties(section_name)
                    for property_name in list(properties.keys()):
                        if property_name != JSONSchema.NAME and property_name not in default_properties:
                            self.delete_section_property(
                                section_name, property_name)

                if section_name == PluggableType.ALGORITHM.value:
                    self._update_dependency_sections()

        self._sections = self._order_sections(self._sections)
Exemplo n.º 5
0
    def _validate_input_problem(self):
        input_name = self.get_section_property(PluggableType.INPUT.value,
                                               JSONSchema.NAME)
        if input_name is None:
            return

        problem_name = self.get_section_property(JSONSchema.PROBLEM,
                                                 JSONSchema.NAME)
        if problem_name is None:
            problem_name = self.get_property_default_value(
                JSONSchema.PROBLEM, JSONSchema.NAME)

        if problem_name is None:
            raise AquaError("No algorithm 'problem' section found on input.")

        problems = InputParser.get_input_problems(input_name)
        if problem_name not in problems:
            raise AquaError(
                "Problem: {} not in the list of problems: {} for input: {}.".
                format(problem_name, problems, input_name))
Exemplo n.º 6
0
    def parse(self):
        """Parse the data."""
        if self._sections is None:
            if self._filename is None:
                raise AquaError("Missing input file")

            with open(self._filename) as json_file:
                self._sections = json.load(json_file)

        self._json_schema.update_pluggable_input_schemas(self)
        self._update_algorithm_input_schema()
        self._sections = self._order_sections(self._sections)
        self._original_sections = copy.deepcopy(self._sections)
Exemplo n.º 7
0
 def get_section(self, section_name):
     """Return a Section by name.
     Args:
         section_name (str): the name of the section, case insensitive
     Returns:
         Section: The section with this name
     Raises:
         AquaError: if the section does not exist.
     """
     section_name = JSONSchema.format_section_name(section_name)
     try:
         return self._sections[section_name]
     except KeyError:
         raise AquaError('No section "{0}"'.format(section_name))
Exemplo n.º 8
0
def compile_and_run_circuits(circuits,
                             backend,
                             backend_config,
                             compile_config,
                             run_config,
                             qjob_config=None,
                             backend_options=None,
                             noise_config=None,
                             show_circuit_summary=False,
                             has_shared_circuits=False):
    """
    An execution wrapper with Qiskit-Terra, with job auto recover capability.

    The autorecovery feature is only applied for non-simulator backend.
    This wraper will try to get the result no matter how long it costs.

    Args:
        circuits (QuantumCircuit or list[QuantumCircuit]): circuits to execute
        backend (BaseBackend): backend instance
        backend_config (dict): configuration for backend
        compile_config (dict): configuration for compilation
        run_config (dict): configuration for running a circuit
        qjob_config (dict): configuration for quantum job object
        backend_options (dict): configuration for simulator
        noise_config (dict): configuration for noise model
        show_circuit_summary (bool): showing the summary of submitted circuits.
        has_shared_circuits (bool): use the 0-th circuits as initial state for other circuits.

    Returns:
        Result: Result object

    Raises:
        AquaError: Any error except for JobError raised by Qiskit Terra
    """
    qjob_config = qjob_config or {}
    backend_options = backend_options or {}
    noise_config = noise_config or {}

    if backend is None or not isinstance(backend, BaseBackend):
        raise ValueError(
            'Backend is missing or not an instance of BaseBackend')

    if not isinstance(circuits, list):
        circuits = [circuits]

    if 'statevector' in backend.name():
        circuits = _avoid_empty_circuits(circuits)

    if has_shared_circuits:
        return _reuse_shared_circuits(circuits, backend, backend_config,
                                      compile_config, run_config, qjob_config,
                                      backend_options)

    with_autorecover = False if backend.configuration().simulator else True

    if MAX_CIRCUITS_PER_JOB is not None:
        max_circuits_per_job = int(MAX_CIRCUITS_PER_JOB)
    else:
        if backend.configuration().local:
            max_circuits_per_job = sys.maxsize
        else:
            max_circuits_per_job = backend.configuration().max_experiments

    qobjs = []
    jobs = []
    job_ids = []
    chunks = int(np.ceil(len(circuits) / max_circuits_per_job))
    for i in range(chunks):
        sub_circuits = circuits[i * max_circuits_per_job:(i + 1) *
                                max_circuits_per_job]
        qobj = q_compile(sub_circuits, backend, **backend_config,
                         **compile_config, **run_config)
        # assure get job ids
        while True:
            job = backend.run(qobj, **backend_options, **noise_config)
            try:
                job_id = job.job_id()
                break
            except JobError as e:
                logger.warning(
                    "FAILURE: the {}-th chunk of circuits, can not get job id, "
                    "Resubmit the qobj to get job id. "
                    "Terra job error: {} ".format(i, e))
            except Exception as e:
                logger.warning(
                    "FAILURE: the {}-th chunk of circuits, can not get job id, "
                    "Resubmit the qobj to get job id. "
                    "Error: {} ".format(i, e))
        job_ids.append(job_id)
        jobs.append(job)
        qobjs.append(qobj)

    if logger.isEnabledFor(logging.DEBUG) and show_circuit_summary:
        logger.debug(summarize_circuits(circuits))

    results = []
    if with_autorecover:
        logger.info("Backend status: {}".format(backend.status()))
        logger.info(
            "There are {} circuits and they are chunked into {} chunks, "
            "each with {} circutis (max.).".format(len(circuits), chunks,
                                                   max_circuits_per_job))
        logger.info("All job ids:\n{}".format(job_ids))
        for idx in range(len(jobs)):
            while True:
                job = jobs[idx]
                job_id = job_ids[idx]
                logger.info("Running {}-th chunk circuits, job id: {}".format(
                    idx, job_id))
                # try to get result if possible
                try:
                    result = job.result(**qjob_config)
                    if result.success:
                        results.append(result)
                        logger.info("COMPLETED the {}-th chunk of circuits, "
                                    "job id: {}".format(idx, job_id))
                        break
                    else:
                        logger.warning("FAILURE: the {}-th chunk of circuits, "
                                       "job id: {}".format(idx, job_id))
                except JobError as e:
                    # if terra raise any error, which means something wrong, re-run it
                    logger.warning(
                        "FAILURE: the {}-th chunk of circuits, job id: {} "
                        "Terra job error: {} ".format(idx, job_id, e))
                except Exception as e:
                    raise AquaError(
                        "FAILURE: the {}-th chunk of circuits, job id: {} "
                        "Unknown error: {} ".format(idx, job_id, e)) from e

                # something wrong here, querying the status to check how to handle it.
                # keep qeurying it until getting the status.
                while True:
                    try:
                        job_status = job.status()
                        break
                    except JobError as e:
                        logger.warning("FAILURE: job id: {}, "
                                       "status: 'FAIL_TO_GET_STATUS' "
                                       "Terra job error: {}".format(job_id, e))
                        time.sleep(5)
                    except Exception as e:
                        raise AquaError("FAILURE: job id: {}, "
                                        "status: 'FAIL_TO_GET_STATUS' "
                                        "Unknown error: ({})".format(
                                            job_id, e)) from e

                logger.info("Job status: {}".format(job_status))

                # handle the failure job based on job status
                if job_status == JobStatus.DONE:
                    logger.info(
                        "Job ({}) is completed anyway, retrieve result "
                        "from backend.".format(job_id))
                    job = backend.retrieve_job(job_id)
                elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED:
                    logger.info("Job ({}) is {}, but encounter an exception, "
                                "recover it from backend.".format(
                                    job_id, job_status))
                    job = backend.retrieve_job(job_id)
                else:
                    logger.info(
                        "Fail to run Job ({}), resubmit it.".format(job_id))
                    qobj = qobjs[idx]
                    #  assure job get its id
                    while True:
                        job = backend.run(qobj, **backend_options,
                                          **noise_config)
                        try:
                            job_id = job.job_id()
                            break
                        except JobError as e:
                            logger.warning(
                                "FAILURE: the {}-th chunk of circuits, "
                                "can not get job id. Resubmit the qobj to get job id. "
                                "Terra job error: {} ".format(idx, e))
                        except Exception as e:
                            logger.warning(
                                "FAILURE: the {}-th chunk of circuits, "
                                "can not get job id, Resubmit the qobj to get job id. "
                                "Unknown error: {} ".format(idx, e))
                    jobs[idx] = job
                    job_ids[idx] = job_id
    else:
        results = []
        for job in jobs:
            results.append(job.result(**qjob_config))

    result = _combine_result_objects(results) if len(results) != 0 else None

    return result
Exemplo n.º 9
0
    def _update_algorithm_input_schema(self):
        # find algorithm input
        default_name = self.get_property_default_value(
            PluggableType.INPUT.value, JSONSchema.NAME)
        input_name = self.get_section_property(PluggableType.INPUT.value,
                                               JSONSchema.NAME, default_name)
        if input_name is None:
            # find the first valid input for the problem
            problem_name = self.get_section_property(JSONSchema.PROBLEM,
                                                     JSONSchema.NAME)
            if problem_name is None:
                problem_name = self.get_property_default_value(
                    JSONSchema.PROBLEM, JSONSchema.NAME)

            if problem_name is None:
                raise AquaError(
                    "No algorithm 'problem' section found on input.")

            for name in local_pluggables(PluggableType.INPUT):
                if problem_name in self.get_input_problems(name):
                    # set to the first input to solve the problem
                    input_name = name
                    break

        if input_name is None:
            # just remove fromm schema if none solves the problem
            if PluggableType.INPUT.value in self._json_schema.schema[
                    'properties']:
                del self._json_schema.schema['properties'][
                    PluggableType.INPUT.value]
            return

        if default_name is None:
            default_name = input_name

        config = {}
        try:
            config = get_pluggable_configuration(PluggableType.INPUT,
                                                 input_name)
        except:
            pass

        input_schema = config[
            'input_schema'] if 'input_schema' in config else {}
        properties = input_schema[
            'properties'] if 'properties' in input_schema else {}
        properties[JSONSchema.NAME] = {'type': 'string'}
        required = input_schema[
            'required'] if 'required' in input_schema else []
        additionalProperties = input_schema[
            'additionalProperties'] if 'additionalProperties' in input_schema else True
        if default_name is not None:
            properties[JSONSchema.NAME]['default'] = default_name
            required.append(JSONSchema.NAME)

        if PluggableType.INPUT.value not in self._json_schema.schema[
                'properties']:
            self._json_schema.schema['properties'][
                PluggableType.INPUT.value] = {
                    'type': 'object'
                }

        self._json_schema.schema['properties'][
            PluggableType.INPUT.value]['properties'] = properties
        self._json_schema.schema['properties'][
            PluggableType.INPUT.value]['required'] = required
        self._json_schema.schema['properties'][PluggableType.INPUT.value][
            'additionalProperties'] = additionalProperties
Exemplo n.º 10
0
def compile_and_run_circuits(circuits, backend, backend_config, compile_config, run_config, qjob_config=None,
                             show_circuit_summary=False, has_shared_circuits=False):
    """
    An execution wrapper with Qiskit-Terra, with job auto recover capability.

    The autorecovery feature is only applied for non-simulator backend.
    This wraper will try to get the result no matter how long it costs.

    Args:
        circuits (QuantumCircuit or list[QuantumCircuit]): circuits to execute
        backend (BaseBackend): backend instance
        backend_config (dict): configuration for backend
        compile_config (dict): configuration for compilation
        run_config (dict): configuration for running a circuit
        qjob_config (dict): configuration for quantum job object
        show_circuit_summary (bool): showing the summary of submitted circuits.
        has_shared_circuits (bool): use the 0-th circuits as initial state for other circuits.
    Returns:
        Result: Result object

    Raises:
        AquaError: Any error except for JobError raised by Qiskit Terra
    """

    qjob_config = qjob_config or {}

    if backend is None or not isinstance(backend, BaseBackend):
        raise ValueError('Backend is missing or not an instance of BaseBackend')

    if not isinstance(circuits, list):
        circuits = [circuits]

    if 'statevector' in backend.name():
        circuits = _avoid_empty_circuits(circuits)

    if has_shared_circuits:
        return _reuse_shared_circuits(circuits, backend, backend_config, compile_config, run_config, qjob_config)

    with_autorecover = False if backend.configuration().simulator else True
    max_circuits_per_job = sys.maxsize if backend.configuration().local else MAX_CIRCUITS_PER_JOB

    qobjs = []
    jobs = []
    chunks = int(np.ceil(len(circuits) / max_circuits_per_job))

    for i in range(chunks):
        sub_circuits = circuits[i *
                                max_circuits_per_job:(i + 1) * max_circuits_per_job]
        qobj = q_compile(sub_circuits, backend, **backend_config,
                         **compile_config, **run_config)
        job = backend.run(qobj)
        jobs.append(job)
        qobjs.append(qobj)

    if logger.isEnabledFor(logging.DEBUG) and show_circuit_summary:
        logger.debug(summarize_circuits(circuits))

    results = []
    if with_autorecover:

        logger.debug("There are {} circuits and they are chunked into {} chunks, "
                     "each with {} circutis.".format(len(circuits), chunks,
                                                     max_circuits_per_job))

        for idx in range(len(jobs)):
            job = jobs[idx]
            job_id = job.job_id()
            logger.info("Running {}-th chunk circuits, job id: {}".format(idx, job_id))
            while True:
                try:
                    result = job.result(**qjob_config)
                    if result.success:
                        results.append(result)
                        logger.info("COMPLETED the {}-th chunk of circuits, "
                                    "job id: {}".format(idx, job_id))
                        break
                    else:
                        logger.warning("FAILURE: the {}-th chunk of circuits, "
                                       "job id: {}".format(idx, job_id))
                except JobError as e:
                    # if terra raise any error, which means something wrong, re-run it
                    logger.warning("FAILURE: the {}-th chunk of circuits, job id: {}, "
                                   "Terra job error: {} ".format(idx, job_id, e))
                except Exception as e:
                    raise AquaError("FAILURE: the {}-th chunk of circuits, job id: {}, "
                                    "Terra unknown error: {} ".format(idx, job_id, e)) from e

                # keep querying the status until it is okay.
                while True:
                    try:
                        job_status = job.status()
                        break
                    except JobError as e:
                        logger.warning("FAILURE: job id: {}, "
                                       "status: 'FAIL_TO_GET_STATUS' "
                                       "Terra job error: {}".format(job_id, e))
                        time.sleep(5)
                    except Exception as e:
                        raise AquaError("FAILURE: job id: {}, "
                                        "status: 'FAIL_TO_GET_STATUS' "
                                        "({})".format(job_id, e)) from e

                logger.info("Job status: {}".format(job_status))
                # when reach here, it means the job fails. let's check what kinds of failure it is.
                if job_status == JobStatus.DONE:
                    logger.info("Job ({}) is completed anyway, retrieve result "
                                "from backend.".format(job_id))
                    job = backend.retrieve_job(job_id)
                elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED:
                    logger.info("Job ({}) is {}, but encounter an exception, "
                                "recover it from backend.".format(job_id, job_status))
                    job = backend.retrieve_job(job_id)
                else:
                    logger.info("Fail to run Job ({}), resubmit it.".format(job_id))
                    qobj = qobjs[idx]
                    job = backend.run(qobj)
    else:
        results = []
        for job in jobs:
            results.append(job.result(**qjob_config))

    if len(results) != 0:
        result = functools.reduce(lambda x, y: x + y, results)
    else:
        result = None
    return result
Exemplo n.º 11
0
    def load_qobj_from_cache(self, circuits, chunk, run_config=None):
        self.try_loading_cache_from_file()

        if self.try_reusing_qobjs and self.qobjs is not None and len(
                self.qobjs) <= chunk:
            self.mappings.insert(chunk, self.mappings[0])
            self.qobjs.insert(chunk, copy.deepcopy(qobjs[0]))

        for circ_num, input_circuit in enumerate(circuits):

            # If there are too few experiments in the cache, try reusing the first experiment.
            # Only do this for the first chunk. Subsequent chunks should rely on these copies through the deepcopy above.
            if self.try_reusing_qobjs and chunk == 0 and circ_num > 0 and len(self.qobjs[chunk].experiments) <= \
                    circ_num:
                self.qobjs[0].experiments.insert(
                    circ_num, copy.deepcopy(self.qobjs[0].experiments[0]))
                self.mappings[0].insert(circ_num, self.mappings[0][0])

            # Unroll circuit in case of composite gates
            raw_gates = []
            for gate in input_circuit.data:
                if isinstance(gate, CompositeGate):
                    raw_gates += gate.instruction_list()
                else:
                    raw_gates += [gate]
            self.qobjs[chunk].experiments[
                circ_num].header.name = input_circuit.name
            for gate_num, compiled_gate in enumerate(
                    self.qobjs[chunk].experiments[circ_num].instructions):
                if not hasattr(compiled_gate,
                               'params') or len(compiled_gate.params) < 1:
                    continue
                if compiled_gate.name == 'snapshot': continue
                cache_index = self.mappings[chunk][circ_num][gate_num]
                uncompiled_gate = raw_gates[cache_index]

                # Need the 'getattr' wrapper because measure has no 'params' field and breaks this.
                if not len(getattr(compiled_gate, 'params', [])) == len(getattr(uncompiled_gate, 'params', [])) or \
                    not compiled_gate.name == uncompiled_gate.name:
                    raise AquaError(
                        'Gate mismatch at gate {0} ({1}, {2} params) of circuit against '
                        'gate {3} ({4}, {5} params) '
                        'of cached qobj'.format(cache_index,
                                                uncompiled_gate.name,
                                                len(uncompiled_gate.params),
                                                gate_num, compiled_gate.name,
                                                len(compiled_gate.params)))
                compiled_gate.params = np.array(uncompiled_gate.params,
                                                dtype=float).tolist()
        exec_qobj = copy.copy(self.qobjs[chunk])
        if self.skip_qobj_deepcopy:
            exec_qobj.experiments = self.qobjs[chunk].experiments[
                0:len(circuits)]
        else:
            exec_qobj.experiments = copy.deepcopy(
                self.qobjs[chunk].experiments[0:len(circuits)])

        if not run_config: run_config = {}
        exec_qobj.config.shots = run_config.get('shots', 1024)
        exec_qobj.config.max_credits = run_config.get('max_credits', 10)
        exec_qobj.config.memory = run_config.get('memory', False)
        exec_qobj.config.memory_slots = max(
            experiment.config.memory_slots
            for experiment in exec_qobj.experiments)
        exec_qobj.config.n_qubits = max(
            experiment.config.n_qubits for experiment in exec_qobj.experiments)
        return exec_qobj
Exemplo n.º 12
0
def run_algorithm(params, algo_input=None, json_output=False, backend=None):
    """
    Run algorithm as named in params, using params and algo_input as input data
    and returning a result dictionary

    Args:
        params (dict): Dictionary of params for algo and dependent objects
        algo_input (AlgorithmInput): Main input data for algorithm. Optional, an algo may run entirely from params
        json_output (bool): False for regular python dictionary return, True for json conversion
        backend (BaseBackend): Backend object to be used in place of backend name

    Returns:
        Result dictionary containing result of algorithm computation
    """
    _discover_on_demand()

    inputparser = InputParser(params)
    inputparser.parse()
    # before merging defaults attempts to find a provider for the backend in case no
    # provider was passed
    if backend is None and inputparser.get_section_property(JSONSchema.BACKEND, JSONSchema.PROVIDER) is None:
        backend_name = inputparser.get_section_property(JSONSchema.BACKEND, JSONSchema.NAME)
        if backend_name is not None:
            inputparser.set_section_property(JSONSchema.BACKEND, JSONSchema.PROVIDER, get_provider_from_backend(backend_name))

    inputparser.validate_merge_defaults()
    logger.debug('Algorithm Input: {}'.format(json.dumps(inputparser.get_sections(), sort_keys=True, indent=4)))

    algo_name = inputparser.get_section_property(PluggableType.ALGORITHM.value, JSONSchema.NAME)
    if algo_name is None:
        raise AquaError('Missing algorithm name')

    if algo_name not in local_pluggables(PluggableType.ALGORITHM):
        raise AquaError('Algorithm "{0}" missing in local algorithms'.format(algo_name))

    if algo_input is None:
        input_name = inputparser.get_section_property('input', JSONSchema.NAME)
        if input_name is not None:
            input_params = copy.deepcopy(inputparser.get_section_properties('input'))
            del input_params[JSONSchema.NAME]
            convert_json_to_dict(input_params)
            algo_input = get_pluggable_class(PluggableType.INPUT, input_name).from_params(input_params)

    algo_params = copy.deepcopy(inputparser.get_sections())
    algorithm = get_pluggable_class(PluggableType.ALGORITHM,
                                    algo_name).init_params(algo_params, algo_input)
    random_seed = inputparser.get_section_property(JSONSchema.PROBLEM, 'random_seed')
    algorithm.random_seed = random_seed
    quantum_instance = None
    # setup backend
    backend_provider = inputparser.get_section_property(JSONSchema.BACKEND, JSONSchema.PROVIDER)
    backend_name = inputparser.get_section_property(JSONSchema.BACKEND, JSONSchema.NAME)
    if backend_provider is not None and backend_name is not None:  # quantum algorithm
        backend_cfg = {k: v for k, v in inputparser.get_section(JSONSchema.BACKEND).items() if k not in [JSONSchema.PROVIDER, JSONSchema.NAME]}
        noise_params = backend_cfg.pop('noise_params', None)
        backend_cfg['config'] = {}
        backend_cfg['config']['noise_params'] = noise_params
        backend_cfg['seed'] = random_seed
        backend_cfg['seed_mapper'] = random_seed
        pass_manager = PassManager() if backend_cfg.pop('skip_transpiler', False) else None
        if pass_manager is not None:
            backend_cfg['pass_manager'] = pass_manager

        if backend is not None and isinstance(backend, BaseBackend):
            backend_cfg['backend'] = backend
        else:
            backend_cfg['backend'] = get_backend_from_provider(backend_provider, backend_name)

        quantum_instance = QuantumInstance(**backend_cfg)

    value = algorithm.run(quantum_instance)
    if isinstance(value, dict) and json_output:
        convert_dict_to_json(value)

    return value