def set_section_property(self, section_name, property_name, value): section_name = InputParser._format_section_name(section_name) property_name = InputParser._format_property_name(property_name) types = self.get_property_types(section_name, property_name) value = InputParser._get_value(value, types) if len(types) > 0: validator = jsonschema.Draft4Validator(self._schema) valid = False for type in types: valid = validator.is_type(value, type) if valid: break if not valid: raise AlgorithmError( "{}.{}: Value '{}' is not of types: '{}'".format( section_name, property_name, value, types)) sections_temp = copy.deepcopy(self._sections) InputParser._set_section_property(sections_temp, section_name, property_name, value, types) msg = self._validate(sections_temp, section_name, property_name) if msg is not None: raise AlgorithmError("{}.{}: Value '{}': '{}'".format( section_name, property_name, value, msg)) InputParser._set_section_property(self._sections, section_name, property_name, value, types) if property_name == InputParser.NAME: if InputParser.INPUT == section_name: self._update_algorithm_input_schema() # remove properties that are not valid for this section default_properties = self.get_section_default_properties( section_name) if isinstance(default_properties, dict): properties = self.get_section_properties(section_name) for property_name in list(properties.keys()): if property_name != InputParser.NAME and property_name not in default_properties: self.delete_section_property( section_name, property_name) elif InputParser.PROBLEM == section_name: self._update_algorithm_problem() self._update_input_problem() elif InputParser.is_pluggable_section(section_name): self._update_pluggable_input_schemas() # remove properties that are not valid for this section default_properties = self.get_section_default_properties( section_name) if isinstance(default_properties, dict): properties = self.get_section_properties(section_name) for property_name in list(properties.keys()): if property_name != InputParser.NAME and property_name not in default_properties: self.delete_section_property( section_name, property_name) if section_name == InputParser.ALGORITHM: self._update_dependency_sections() self._sections = self._order_sections(self._sections)
def save_to_file(self,file_name): if file_name is None: raise AlgorithmError('Missing file path') file_name = file_name.strip() if len(file_name) == 0: raise AlgorithmError('Missing file path') with open(file_name, 'w') as f: print(json.dumps(self.get_sections(), sort_keys=True, indent=4), file=f)
def run_algorithm(params, algo_input=None, json_output=False): """ Run algorithm as named in params, using params and algo_input as input data and returning a result dictionary Args: params (dict): Dictionary of params for algo and dependent objects algo_input(algorithminput): Main input data for algorithm. Optional, an algo may run entirely from params json_output(bool): False for regular python dictionary return, True for json conversion Returns: Result dictionary containing result of algorithm computation """ _discover_on_demand() inputparser = InputParser(params) inputparser.parse() inputparser.validate_merge_defaults() logger.debug('Algorithm Input: {}'.format(json.dumps(inputparser.get_sections(), sort_keys=True, indent=4))) algo_name = inputparser.get_section_property(JSONSchema.ALGORITHM, JSONSchema.NAME) if algo_name is None: raise AlgorithmError('Missing algorithm name') if algo_name not in local_algorithms(): raise AlgorithmError('Algorithm "{0}" missing in local algorithms'.format(algo_name)) backend_cfg = None backend = inputparser.get_section_property(JSONSchema.BACKEND, JSONSchema.NAME) if backend is not None: backend_cfg = {k: v for k, v in inputparser.get_section(JSONSchema.BACKEND).items() if k != 'name'} backend_cfg['backend'] = backend algorithm = get_algorithm_instance(algo_name) algorithm.random_seed = inputparser.get_section_property(JSONSchema.PROBLEM, 'random_seed') if backend_cfg is not None: algorithm.setup_quantum_backend(**backend_cfg) algo_params = copy.deepcopy(inputparser.get_sections()) if algo_input is None: input_name = inputparser.get_section_property('input', JSONSchema.NAME) if input_name is not None: algo_input = get_input_instance(input_name) input_params = copy.deepcopy(inputparser.get_section_properties('input')) del input_params[JSONSchema.NAME] convert_json_to_dict(input_params) algo_input.from_params(input_params) algorithm.init_params(algo_params, algo_input) value = algorithm.run() if isinstance(value, dict) and json_output: convert_dict_to_json(value) return value
def _validate_input_problem(self): input_name = self.get_section_property(InputParser.INPUT,JSONSchema.NAME) if input_name is None: return problem_name = self.get_section_property(JSONSchema.PROBLEM,JSONSchema.NAME) if problem_name is None: problem_name = self.get_property_default_value(JSONSchema.PROBLEM,JSONSchema.NAME) if problem_name is None: raise AlgorithmError("No algorithm 'problem' section found on input.") problems = InputParser.get_input_problems(input_name) if problem_name not in problems: raise AlgorithmError( "Problem: {} not in the list of problems: {} for input: {}.".format(problem_name,problems,input_name))
def _update_input_problem(self): problem_name = self.get_section_property(JSONSchema.PROBLEM, JSONSchema.NAME) if problem_name is None: problem_name = self.get_property_default_value( JSONSchema.PROBLEM, JSONSchema.NAME) if problem_name is None: raise AlgorithmError( "No algorithm 'problem' section found on input.") input_name = self.get_section_property(InputParser.INPUT, JSONSchema.NAME) if input_name is not None and problem_name in InputParser.get_input_problems( input_name): return for input_name in local_inputs(): if problem_name in self.get_input_problems(input_name): # set to the first input to solve the problem self.set_section_property(InputParser.INPUT, JSONSchema.NAME, input_name) return # no input solve this problem, remove section self.delete_section(InputParser.INPUT)
def _update_algorithm_problem(self): problem_name = self.get_section_property(JSONSchema.PROBLEM, JSONSchema.NAME) if problem_name is None: problem_name = self.get_property_default_value( JSONSchema.PROBLEM, JSONSchema.NAME) if problem_name is None: raise AlgorithmError( "No algorithm 'problem' section found on input.") algo_name = self.get_section_property(JSONSchema.ALGORITHM, JSONSchema.NAME) if algo_name is not None and problem_name in InputParser.get_algorithm_problems( algo_name): return for algo_name in local_algorithms(): if problem_name in self.get_algorithm_problems(algo_name): # set to the first algorithm to solve the problem self.set_section_property(JSONSchema.ALGORITHM, JSONSchema.NAME, algo_name) return # no algorithm solve this problem, remove section self.delete_section(JSONSchema.ALGORITHM)
def __init__(self, input=None): """Create InputParser object.""" self._original_sections = None self._filename = None self._sections = None if input is not None: if isinstance(input, dict): self._sections = input elif isinstance(input, str): self._filename = input else: raise AlgorithmError("Invalid parser input type.") self._section_order = [ JSONSchema.PROBLEM, InputParser.INPUT, JSONSchema.ALGORITHM ] for pluggable_type in local_pluggables_types(): if pluggable_type != JSONSchema.ALGORITHM: self._section_order.append(pluggable_type) self._section_order.extend([JSONSchema.BACKEND, InputParser._UNKNOWN]) self._json_schema = JSONSchema( os.path.join(os.path.dirname(__file__), 'input_schema.json')) self._json_schema.populate_problem_names() self._json_schema.commit_changes()
def __init__(self, input=None): """Create InputParser object.""" self._original_sections = None self._filename = None self._sections = None if input is not None: if isinstance(input, dict): self._sections = input elif isinstance(input, str): self._filename = input else: raise AlgorithmError("Invalid parser input type.") self._section_order = [ InputParser.PROBLEM, InputParser.INPUT, InputParser.ALGORITHM ] for pluggable_type in local_pluggables_types(): if pluggable_type != InputParser.ALGORITHM: self._section_order.append(pluggable_type) self._section_order.extend([InputParser.BACKEND, InputParser._UNKNOWN]) problems_dict = OrderedDict() for algo_name in local_algorithms(): problems = InputParser.get_algorithm_problems(algo_name) for problem in problems: problems_dict[problem] = None problems_enum = {'enum': list(problems_dict.keys())} jsonfile = os.path.join(os.path.dirname(__file__), 'input_schema.json') with open(jsonfile) as json_file: self._schema = json.load(json_file) self._schema['definitions'][InputParser.PROBLEM]['properties'][ InputParser.NAME]['oneOf'] = [problems_enum] self._original_schema = copy.deepcopy(self._schema)
def _format_property_name(property_name): if property_name is None: property_name = '' property_name = property_name.strip() if len(property_name) == 0: raise AlgorithmError("Empty property name.") return property_name
def _format_section_name(section_name): if section_name is None: section_name = '' section_name = section_name.strip() if len(section_name) == 0: raise AlgorithmError("Empty section name.") return section_name
def _update_algorithm_input_schema(self): # find alogorithm input default_name = self.get_property_default_value( InputParser.INPUT, JSONSchema.NAME) input_name = self.get_section_property( InputParser.INPUT, JSONSchema.NAME, default_name) if input_name is None: # find the first valid input for the problem problem_name = self.get_section_property( JSONSchema.PROBLEM, JSONSchema.NAME) if problem_name is None: problem_name = self.get_property_default_value( JSONSchema.PROBLEM, JSONSchema.NAME) if problem_name is None: raise AlgorithmError( "No algorithm 'problem' section found on input.") for name in local_inputs(): if problem_name in self.get_input_problems(name): # set to the first input to solve the problem input_name = name break if input_name is None: # just remove fromm schema if none solves the problem if InputParser.INPUT in self._json_schema.schema['properties']: del self._json_schema.schema['properties'][InputParser.INPUT] return if default_name is None: default_name = input_name config = {} try: config = get_input_configuration(input_name) except: pass input_schema = config['input_schema'] if 'input_schema' in config else { } properties = input_schema['properties'] if 'properties' in input_schema else { } properties[JSONSchema.NAME] = {'type': 'string'} required = input_schema['required'] if 'required' in input_schema else [ ] additionalProperties = input_schema['additionalProperties'] if 'additionalProperties' in input_schema else True if default_name is not None: properties[JSONSchema.NAME]['default'] = default_name required.append(JSONSchema.NAME) if InputParser.INPUT not in self._json_schema.schema['properties']: self._json_schema.schema['properties'][InputParser.INPUT] = { 'type': 'object'} self._json_schema.schema['properties'][InputParser.INPUT]['properties'] = properties self._json_schema.schema['properties'][InputParser.INPUT]['required'] = required self._json_schema.schema['properties'][InputParser.INPUT]['additionalProperties'] = additionalProperties
def _validate_algorithm_problem(self): algo_name = self.get_section_property(InputParser.ALGORITHM, InputParser.NAME) if algo_name is None: return problem_name = self.get_section_property(InputParser.PROBLEM, InputParser.NAME) if problem_name is None: problem_name = self.get_property_default_value( InputParser.PROBLEM, InputParser.NAME) if problem_name is None: raise AlgorithmError( "No algorithm 'problem' section found on input.") problems = InputParser.get_algorithm_problems(algo_name) if problem_name not in problems: raise AlgorithmError( "Problem: {} not in the list of problems: {} for algorithm: {}." .format(problem_name, problems, algo_name))
def parse(self): """Parse the data.""" if self._sections is None: if self._filename is None: raise AlgorithmError("Missing input file") with open(self._filename) as json_file: self._sections = json.load(json_file) self._json_schema.update_pluggable_input_schemas(self) self._update_algorithm_input_schema() self._sections = self._order_sections(self._sections) self._original_sections = copy.deepcopy(self._sections)
def _avoid_empty_circuits(circuits): new_circuits = [] for qc in circuits: if len(qc) == 0: tmp_q = None for q_name, q in qc.get_qregs().items(): tmp_q = q break if tmp_q is None: raise AlgorithmError("A QASM without any quantum register is invalid.") qc.iden(tmp_q[0]) new_circuits.append(qc) return new_circuits
def get_section(self, section_name): """Return a Section by name. Args: section_name (str): the name of the section, case insensitive Returns: Section: The section with this name Raises: AlgorithmError: if the section does not exist. """ section_name = JSONSchema.format_section_name(section_name) try: return self._sections[section_name] except KeyError: raise AlgorithmError('No section "{0}"'.format(section_name))
def set_section_property(self, section_name, property_name, value): section_name = JSONSchema.format_section_name(section_name) property_name = JSONSchema.format_property_name(property_name) value = self._json_schema.check_property_value(section_name, property_name, value) types = self.get_property_types(section_name, property_name) sections_temp = copy.deepcopy(self._sections) InputParser._set_section_property(sections_temp, section_name, property_name, value, types) msg = self._json_schema.validate_property(sections_temp, section_name, property_name) if msg is not None: raise AlgorithmError("{}.{}: Value '{}': '{}'".format( section_name, property_name, value, msg)) InputParser._set_section_property(self._sections, section_name, property_name, value, types) if property_name == JSONSchema.NAME: if InputParser.INPUT == section_name: self._update_algorithm_input_schema() # remove properties that are not valid for this section default_properties = self.get_section_default_properties( section_name) if isinstance(default_properties, dict): properties = self.get_section_properties(section_name) for property_name in list(properties.keys()): if property_name != JSONSchema.NAME and property_name not in default_properties: self.delete_section_property( section_name, property_name) elif JSONSchema.PROBLEM == section_name: self._update_algorithm_problem() self._update_input_problem() elif InputParser.is_pluggable_section(section_name): self._json_schema.update_pluggable_input_schemas(self) # remove properties that are not valid for this section default_properties = self.get_section_default_properties( section_name) if isinstance(default_properties, dict): properties = self.get_section_properties(section_name) for property_name in list(properties.keys()): if property_name != JSONSchema.NAME and property_name not in default_properties: self.delete_section_property( section_name, property_name) if section_name == JSONSchema.ALGORITHM: self._update_dependency_sections() self._sections = self._order_sections(self._sections)
def validate_merge_defaults(self): try: self._merge_default_values() json_dict = self.get_sections() logger.debug('Algorithm Input: {}'.format( json.dumps(json_dict, sort_keys=True, indent=4))) logger.debug('Algorithm Input Schema: {}'.format( json.dumps(self._schema, sort_keys=True, indent=4))) jsonschema.validate(json_dict, self._schema) except jsonschema.exceptions.ValidationError as ve: logger.info('JSON Validation error: {}'.format(str(ve))) raise AlgorithmError(ve.message) self._validate_algorithm_problem() self._validate_input_problem()
def set_section_data(self, section_name, value): """ Sets a section data. Args: section_name (str): the name of the section, case insensitive value : value to set """ section_name = InputParser._format_section_name(section_name) types = self.get_section_types(section_name) value = InputParser._get_value(value, types) if len(types) > 0: validator = jsonschema.Draft4Validator(self._schema) valid = False for type in types: valid = validator.is_type(value, type) if valid: break if not valid: raise AlgorithmError( "{}: Value '{}' is not of types: '{}'".format( section_name, value, types)) self._sections[section_name] = value
def run_circuits(circuits, backend, execute_config, qjob_config={}, show_circuit_summary=False, has_shared_circuits=False): """ An execution wrapper with Qiskit-Terra, with job auto recover capability. The autorecovery feature is only applied for non-simulator backend. This wraper will try to get the result no matter how long it costs. Args: circuits (QuantumCircuit or list[QuantumCircuit]): circuits to execute backend (BaseBackend): backend instance execute_config (dict): settings for qiskit execute (or compile) qjob_config (dict): settings for job object, like timeout and wait show_circuit_summary (bool): showing the summary of submitted circuits. has_shared_circuits (bool): use the 0-th circuits as initial state for other circuits. Returns: Result: Result object Raises: AlgorithmError: Any error except for JobError raised by Qiskit Terra """ if backend is None or not isinstance(backend, BaseBackend): raise AlgorithmError('Backend is missing or not an instance of BaseBackend') if not isinstance(circuits, list): circuits = [circuits] if backend.configuration().get('name', '').startswith('statevector'): circuits = _avoid_empty_circuits(circuits) if has_shared_circuits and version.parse(qiskit.__version__) > version.parse('0.6.1'): return _reuse_shared_circuits(circuits, backend, execute_config, qjob_config) with_autorecover = False if backend.configuration()['simulator'] else True max_circuits_per_job = sys.maxsize if backend.configuration()['local'] \ else MAX_CIRCUITS_PER_JOB qobjs = [] jobs = [] chunks = int(np.ceil(len(circuits) / max_circuits_per_job)) for i in range(chunks): sub_circuits = circuits[i * max_circuits_per_job:(i + 1) * max_circuits_per_job] qobj = q_compile(sub_circuits, backend, **execute_config) job = backend.run(qobj) jobs.append(job) qobjs.append(qobj) if logger.isEnabledFor(logging.DEBUG) and show_circuit_summary: logger.debug(summarize_circuits(circuits)) results = [] if with_autorecover: logger.debug("There are {} circuits and they are chunked into {} chunks, " "each with {} circutis.".format(len(circuits), chunks, max_circuits_per_job)) for idx in range(len(jobs)): job = jobs[idx] job_id = job.job_id() logger.info("Running {}-th chunk circuits, job id: {}".format(idx, job_id)) while True: try: result = job.result(**qjob_config) if result.status == 'COMPLETED': results.append(result) logger.info("COMPLETED the {}-th chunk of circuits, " "job id: {}".format(idx, job_id)) break else: logger.warning("FAILURE: the {}-th chunk of circuits, " "job id: {}".format(idx, job_id)) except JobError as e: # if terra raise any error, which means something wrong, re-run it logger.warning("FAILURE: the {}-th chunk of circuits, job id: {}, " "Terra job error: {} ".format(idx, job_id, e)) except Exception as e: raise AlgorithmError("FAILURE: the {}-th chunk of circuits, job id: {}, " "Terra unknown error: {} ".format(idx, job_id, e)) from e # keep querying the status until it is okay. while True: try: job_status = job.status() break except JobError as e: logger.warning("FAILURE: job id: {}, " "status: 'FAIL_TO_GET_STATUS' " "Terra job error: {}".format(job_id, e)) time.sleep(5) except Exception as e: raise AlgorithmError("FAILURE: job id: {}, " "status: 'FAIL_TO_GET_STATUS' " "({})".format(job_id, e)) from e logger.info("Job status: {}".format(job_status)) # when reach here, it means the job fails. let's check what kinds of failure it is. if job_status == JobStatus.DONE: logger.info("Job ({}) is completed anyway, retrieve result " "from backend.".format(job_id)) job = backend.retrieve_job(job_id) elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED: logger.info("Job ({}) is {}, but encounter an exception, " "recover it from backend.".format(job_id, job_status)) job = backend.retrieve_job(job_id) else: logger.info("Fail to run Job ({}), resubmit it.".format(job_id)) qobj = qobjs[idx] job = backend.run(qobj) else: results = [] for job in jobs: results.append(job.result(**qjob_config)) if len(results) != 0: result = functools.reduce(lambda x, y: x + y, results) else: result = None return result
def run_circuits(circuits, backend, execute_config, qjob_config={}, max_circuits_per_job=sys.maxsize, show_circuit_summary=False): """ An execution wrapper with Qiskit-Terra, with job auto recover capability. The autorecovery feature is only applied for non-simulator backend. This wraper will try to get the result no matter how long it costs. Args: circuits (QuantumCircuit or list[QuantumCircuit]): circuits to execute backend (str): name of backend execute_config (dict): settings for qiskit execute (or compile) qjob_config (dict): settings for job object, like timeout and wait max_circuits_per_job (int): the maximum number of job, default is unlimited but 300 is limited if you submit to a remote backend show_circuit_summary (bool): showing the summary of submitted circuits. Returns: Result: Result object Raises: AlgorithmError: Any error except for JobError raised by Qiskit Terra """ if not isinstance(circuits, list): circuits = [circuits] my_backend = get_backend(backend) with_autorecover = False if my_backend.configuration( )['simulator'] else True qobjs = [] jobs = [] chunks = int(np.ceil(len(circuits) / max_circuits_per_job)) for i in range(chunks): sub_circuits = circuits[i * max_circuits_per_job:(i + 1) * max_circuits_per_job] qobj = q_compile(sub_circuits, my_backend, **execute_config) job = my_backend.run(qobj) jobs.append(job) qobjs.append(qobj) if logger.isEnabledFor(logging.DEBUG) and show_circuit_summary: logger.debug(summarize_circuits(circuits)) results = [] if with_autorecover: logger.info("There are {} circuits and they are chunked into " "{} chunks, each with {} circutis.".format( len(circuits), chunks, max_circuits_per_job)) for idx in range(len(jobs)): job = jobs[idx] job_id = job.id() logger.info("Running {}-th chunk circuits, job id: {}".format( idx, job_id)) while True: try: result = job.result(**qjob_config) if result.status == 'COMPLETED': results.append(result) logger.info( "COMPLETED the {}-th chunk of circuits, job id: {}" .format(idx, job_id)) break else: logger.warning( "FAILURE: the {}-th chunk of circuits, job id: {}". format(idx, job_id)) except JobError as e: # if terra raise any error, which means something wrong, re-run it logger.warning( "FAILURE: the {}-th chunk of circuits, job id: {}, " "Terra job error: {} ".format(idx, job_id, e)) except Exception as e: raise AlgorithmError( "FAILURE: the {}-th chunk of circuits, job id: {}, " "Terra unknown error: {} ".format(idx, job_id, e)) from e # keep querying the status until it is okay. while True: try: job_status = job.status() break except JobError as e: logger.warning( "FAILURE: job id: {}, " "status: 'FAIL_TO_GET_STATUS' Terra job error: {}". format(job_id, e)) time.sleep(5) except Exception as e: raise AlgorithmError( "FAILURE: job id: {}, " "status: 'FAIL_TO_GET_STATUS' ({})".format( job_id, e)) from e logger.info("Job status: {}".format(job_status)) # when reach here, it means the job fails. let's check what kinds of failure it is. if job_status == JobStatus.DONE: logger.info( "Job ({}) is completed anyway, retrieve result from backend." .format(job_id)) job = my_backend.retrieve_job(job_id) elif job_status == JobStatus.RUNNING or job_status == JobStatus.QUEUED: logger.info("Job ({}) is {}, but encounter an exception, " "recover it from backend.".format( job_id, job_status)) job = my_backend.retrieve_job(job_id) else: logger.info( "Fail to run Job ({}), resubmit it.".format(job_id)) qobj = qobjs[idx] job = my_backend.run(qobj) else: results = [] for job in jobs: results.append(job.result(**qjob_config)) if len(results) != 0: result = functools.reduce(lambda x, y: x + y, results) else: result = None return result