def test_assemble_no_run_config(self): """Test assembling with no run_config, relying on default. """ qr = QuantumRegister(2, name='q') qc = ClassicalRegister(2, name='c') circ = QuantumCircuit(qr, qc, name='circ') circ.h(qr[0]) circ.cx(qr[0], qr[1]) circ.measure(qr, qc) qobj = assemble(circ) validate_qobj_against_schema(qobj) self.assertIsInstance(qobj, QasmQobj) self.assertEqual(qobj.config.shots, 1024)
def test_assemble_with_delay(self): """Test that delay instruction is ignored in assembly.""" backend = FakeOpenPulse2Q() orig_schedule = self.schedule delay_schedule = orig_schedule + pulse.Delay(10)( self.backend_config.drive(0)) orig_qobj = assemble(orig_schedule, backend) validate_qobj_against_schema(orig_qobj) delay_qobj = assemble(delay_schedule, backend) validate_qobj_against_schema(delay_qobj) self.assertEqual(orig_qobj.experiments[0].to_dict(), delay_qobj.experiments[0].to_dict())
def test_missing_rep_time(self): """Test that assembly still works if rep_time is missing. The case of no rep_time will exist for a simulator. """ qobj = assemble(self.schedule, qubit_lo_freq=self.qubit_lo_freq, meas_lo_freq=self.meas_lo_freq, qubit_lo_range=self.qubit_lo_range, meas_lo_range=self.meas_lo_range, schedule_los=self.schedule_los, meas_map=self.meas_map, memory_slots=self.memory_slots, rep_time=None) validate_qobj_against_schema(qobj)
def submit(self): """Submit the job to the backend for execution. Raises: QobjValidationError: if the JSON serialization of the Qobj passed during construction does not validate against the Qobj schema. JobError: if trying to re-submit the job. """ if self._future is not None: raise JobError("We have already submitted the job!") validate_qobj_against_schema(self._qobj) self._future = self._executor.submit(self._fn, self._job_id, self._qobj, *self._args)
def test_missing_rep_time_and_delay(self): """Test qobj is valid if rep_time and rep_delay are missing.""" qobj = assemble(self.schedule, qubit_lo_freq=self.qubit_lo_freq, meas_lo_freq=self.meas_lo_freq, qubit_lo_range=self.qubit_lo_range, meas_lo_range=self.meas_lo_range, schedule_los=self.schedule_los, meas_map=self.meas_map, memory_slots=None, rep_time=None, rep_delay=None) validate_qobj_against_schema(qobj) self.assertEqual(hasattr(qobj, 'rep_time'), False) self.assertEqual(hasattr(qobj, 'rep_delay'), False)
def run(self, qobj: Qobj, job_name: Optional[str] = None, job_share_level: Optional[str] = None) -> IBMQJob: """Run a Qobj asynchronously. Args: qobj: description of job. job_name: custom name to be assigned to the job. This job name can subsequently be used as a filter in the ``jobs()`` function call. Job names do not need to be unique. job_share_level: allows sharing a job at the hub/group/project and global level. The possible job share levels are: "global", "hub", "group", "project", and "none". * global: the job is public to any user. * hub: the job is shared between the users in the same hub. * group: the job is shared between the users in the same group. * project: the job is shared between the users in the same project. * none: the job is not shared at any level. If the job share level is not specified, then the job is not shared at any level. Returns: an instance derived from BaseJob Raises: SchemaValidationError: If the job validation fails. IBMQBackendApiError: If an unexpected error occurred while submitting the job. IBMQBackendApiProtocolError: If an unexpected value received when the server. IBMQBackendValueError: If the specified job share level is not valid. """ # pylint: disable=arguments-differ api_job_share_level = None if job_share_level: try: api_job_share_level = ApiJobShareLevel(job_share_level) except ValueError: raise IBMQBackendValueError( '"{}" is not a valid job share level. ' 'Valid job share levels are: {}'.format( job_share_level, ', '.join(level.value for level in ApiJobShareLevel))) validate_qobj_against_schema(qobj) return self._submit_job(qobj, job_name, api_job_share_level)
def test_assemble_multi_schedules_with_multi_lo_configs(self): """Test assembling schedules, with the same number of lo configs (n:n setup).""" qobj = assemble([self.schedule, self.schedule], qobj_header=self.header, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, schedule_los=[self.user_lo_config, self.user_lo_config], **self.config) validate_qobj_against_schema(qobj) test_dict = qobj.to_dict() self.assertListEqual(test_dict['config']['qubit_lo_freq'], [4.9, 5.0]) self.assertEqual(len(test_dict['experiments']), 2) self.assertEqual(len(test_dict['experiments'][0]['instructions']), 2) self.assertDictEqual(test_dict['experiments'][0]['config'], {'qubit_lo_freq': [4.91, 5.0]})
def test_assemble_initialize(self): """Test assembling a circuit with an initialize. """ q = QuantumRegister(2, name='q') circ = QuantumCircuit(q, name='circ') circ.initialize([1 / np.sqrt(2), 0, 0, 1 / np.sqrt(2)], q[:]) qobj = assemble(circ) validate_qobj_against_schema(qobj) self.assertIsInstance(qobj, QasmQobj) self.assertEqual(qobj.experiments[0].instructions[0].name, 'initialize') np.testing.assert_almost_equal( qobj.experiments[0].instructions[0].params, [0.7071067811865, 0, 0, 0.707106781186])
def test_assemble_meas_map(self): """Test assembling a single schedule, no lo config.""" acquire = pulse.Acquire(5) schedule = acquire([AcquireChannel(0), AcquireChannel(1)], [MemorySlot(0), MemorySlot(1)]) qobj = assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0], [1]]) validate_qobj_against_schema(qobj) with self.assertRaises(QiskitError): assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0, 1, 2]])
def __init__(self, backend, job_id, api=None, qobj=None): """HoneywellJob init function. We can instantiate jobs from two sources: A QObj, and an already submitted job returned by the API servers. Args: backend (BaseBackend): The backend instance used to run this job. job_id (str or None): The job ID of an already submitted job. Pass `None` if you are creating a new job. api (HoneywellClient): Honeywell api client. qobj (Qobj): The Quantum Object. See notes below Notes: It is mandatory to pass either ``qobj`` or ``job_id``. Passing a ``qobj`` will ignore ``job_id`` and will create an instance to be submitted to the API server for job creation. Passing only a `job_id` will create an instance representing an already-created job retrieved from the API server. """ super().__init__(backend, job_id) if api: self._api = api else: self._api = HoneywellClient(backend.provider().credentials) print(backend.provider().credentials.api_url) self._creation_date = datetime.utcnow().replace(tzinfo=timezone.utc).isoformat() # Properties used for caching. self._cancelled = False self._api_error_msg = None self._result = None self._job_ids = [] self._experiment_results = [] if qobj: validate_qobj_against_schema(qobj) self._qobj_payload = qobj.to_dict() # Extract individual experiments # if we want user qobj headers, the third argument contains it self._experiments, self._qobj_config, _ = disassemble(qobj) self._status = JobStatus.INITIALIZING else: self._qobj_payload = {} self._status = JobStatus.INITIALIZING self._job_ids.append(job_id)
def test_assemble_meas_map(self): """Test assembling a single schedule, no lo config.""" schedule = Schedule(name='fake_experiment') schedule = schedule.insert(5, Acquire(5, AcquireChannel(0), MemorySlot(0))) schedule = schedule.insert(5, Acquire(5, AcquireChannel(1), MemorySlot(1))) qobj = assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0], [1]]) validate_qobj_against_schema(qobj) with self.assertRaises(QiskitError): assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0, 1, 2]])
def test_default_shots_greater_than_max_shots(self): """Test assembling with default shots greater than max shots""" qr = QuantumRegister(2, name='q') qc = ClassicalRegister(2, name='c') circ = QuantumCircuit(qr, qc, name='circ') circ.h(qr[0]) circ.cx(qr[0], qr[1]) circ.measure(qr, qc) backend = FakeYorktown() backend._configuration.max_shots = 5 qobj = assemble(circ, backend) validate_qobj_against_schema(qobj) self.assertIsInstance(qobj, QasmQobj) self.assertEqual(qobj.config.shots, 5)
def test_pulse_name_conflicts(self): """Test that pulse name conflicts can be resolved.""" name_conflict_pulse = pulse.SamplePulse(samples=np.array( [0.02, 0.05, 0.05, 0.05, 0.02], dtype=np.complex128), name='pulse0') self.schedule = self.schedule.insert( 1, Play(name_conflict_pulse, self.backend_config.drive(1))) qobj = assemble(self.schedule, qobj_header=self.header, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, schedule_los=[], **self.config) validate_qobj_against_schema(qobj) self.assertNotEqual(qobj.config.pulse_library[0].name, qobj.config.pulse_library[1].name)
def test_assemble_opaque_inst(self): """Test opaque instruction is assembled as-is""" opaque_inst = Instruction(name='my_inst', num_qubits=4, num_clbits=2, params=[0.5, 0.4]) q = QuantumRegister(6, name='q') c = ClassicalRegister(4, name='c') circ = QuantumCircuit(q, c, name='circ') circ.append(opaque_inst, [q[0], q[2], q[5], q[3]], [c[3], c[0]]) qobj = assemble(circ) validate_qobj_against_schema(qobj) self.assertIsInstance(qobj, QasmQobj) self.assertEqual(len(qobj.experiments[0].instructions), 1) self.assertEqual(qobj.experiments[0].instructions[0].name, 'my_inst') self.assertEqual(qobj.experiments[0].instructions[0].qubits, [0, 2, 5, 3]) self.assertEqual(qobj.experiments[0].instructions[0].memory, [3, 0]) self.assertEqual(qobj.experiments[0].instructions[0].params, [0.5, 0.4])
def test_assemble_single_circuit(self): """Test assembling a single circuit. """ qr = QuantumRegister(2, name='q') cr = ClassicalRegister(2, name='c') circ = QuantumCircuit(qr, cr, name='circ') circ.h(qr[0]) circ.cx(qr[0], qr[1]) circ.measure(qr, cr) qobj = assemble(circ, shots=2000, memory=True) validate_qobj_against_schema(qobj) self.assertIsInstance(qobj, QasmQobj) self.assertEqual(qobj.config.shots, 2000) self.assertEqual(qobj.config.memory, True) self.assertEqual(len(qobj.experiments), 1) self.assertEqual(qobj.experiments[0].instructions[1].name, 'cx')
def test_assemble_with_single_kernels(self): """Test that assembly works with both a single kernel.""" disc_one = Kernel('disc_one', test_params=True) schedule = Schedule() schedule = schedule.append( Acquire(5, AcquireChannel(0), MemorySlot(0), kernel=disc_one), ) schedule = schedule.append( Acquire(5, AcquireChannel(1), MemorySlot(1)), ) qobj = assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0, 1]]) validate_qobj_against_schema(qobj) qobj_kernels = qobj.experiments[0].instructions[0].kernels self.assertEqual(len(qobj_kernels), 1) self.assertEqual(qobj_kernels[0].name, 'disc_one') self.assertEqual(qobj_kernels[0].params['test_params'], True)
def test_assemble_memory_slots_for_schedules(self): """Test assembling schedules with different memory slots.""" n_memoryslots = [10, 5, 7] schedules = [] for n_memoryslot in n_memoryslots: schedule = Acquire(5, self.backend_config.acquire(0), mem_slot=pulse.MemorySlot(n_memoryslot-1)) schedules.append(schedule) qobj = assemble(schedules, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0], [1]]) validate_qobj_against_schema(qobj) self.assertEqual(qobj.config.memory_slots, max(n_memoryslots)) self.assertEqual(qobj.experiments[0].header.memory_slots, n_memoryslots[0]) self.assertEqual(qobj.experiments[1].header.memory_slots, n_memoryslots[1]) self.assertEqual(qobj.experiments[2].header.memory_slots, n_memoryslots[2])
def test_convert_to_bfunc_plus_conditional(self): """Verify assemble_circuits converts conditionals from QASM to Qobj.""" qr = QuantumRegister(1) cr = ClassicalRegister(1) qc = QuantumCircuit(qr, cr) qc.h(qr[0]).c_if(cr, 1) qobj = assemble(qc) validate_qobj_against_schema(qobj) bfunc_op, h_op = qobj.experiments[0].instructions self.assertEqual(bfunc_op.name, 'bfunc') self.assertEqual(bfunc_op.mask, '0x1') self.assertEqual(bfunc_op.val, '0x1') self.assertEqual(bfunc_op.relation, '==') self.assertTrue(hasattr(h_op, 'conditional')) self.assertEqual(bfunc_op.register, h_op.conditional)
def run(self, qobj: Qobj, job_name: Optional[str] = None) -> IBMQJob: """Run a Qobj asynchronously. Args: qobj: description of job. job_name: custom name to be assigned to the job. This job name can subsequently be used as a filter in the ``jobs()`` function call. Job names do not need to be unique. Returns: an instance derived from BaseJob Raises: SchemaValidationError: If the job validation fails. IBMQBackendError: If an unexpected error occurred while submitting the job. """ # pylint: disable=arguments-differ validate_qobj_against_schema(qobj) return self._submit_job(qobj, job_name)
def test_assemble_memory_slots(self): """Test assembling a schedule and inferring number of memoryslots.""" n_memoryslots = 10 # single acquisition schedule = Acquire(5, self.backend_config.acquire(0), mem_slot=pulse.MemorySlot(n_memoryslots - 1)) qobj = assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0], [1]]) validate_qobj_against_schema(qobj) self.assertEqual(qobj.config.memory_slots, n_memoryslots) # this should be in experimental header as well self.assertEqual(qobj.experiments[0].header.memory_slots, n_memoryslots) # multiple acquisition schedule = Acquire(5, self.backend_config.acquire(0), mem_slot=pulse.MemorySlot(n_memoryslots - 1)) schedule = schedule.insert( 10, Acquire(5, self.backend_config.acquire(0), mem_slot=pulse.MemorySlot(n_memoryslots - 1))) qobj = assemble(schedule, qubit_lo_freq=self.default_qubit_lo_freq, meas_lo_freq=self.default_meas_lo_freq, meas_map=[[0], [1]]) validate_qobj_against_schema(qobj) self.assertEqual(qobj.config.memory_slots, n_memoryslots) # this should be in experimental header as well self.assertEqual(qobj.experiments[0].header.memory_slots, n_memoryslots)
def test_measure_to_registers_when_conditionals(self): """Verify assemble_circuits maps all measure ops on to a register slot for a circuit containing conditionals.""" qr = QuantumRegister(2) cr1 = ClassicalRegister(1) cr2 = ClassicalRegister(2) qc = QuantumCircuit(qr, cr1, cr2) qc.measure(qr[0], cr1) # Measure not required for a later conditional qc.measure(qr[1], cr2[1]) # Measure required for a later conditional qc.h(qr[1]).c_if(cr2, 3) qobj = assemble(qc) validate_qobj_against_schema(qobj) first_measure, second_measure = [op for op in qobj.experiments[0].instructions if op.name == 'measure'] self.assertTrue(hasattr(first_measure, 'register')) self.assertEqual(first_measure.register, first_measure.memory) self.assertTrue(hasattr(second_measure, 'register')) self.assertEqual(second_measure.register, second_measure.memory)
def _run_job(self, job_id, qobj, backend_options, noise_model, validate): """Run a qobj job""" import numpy as np start = time.time() if validate: validate_qobj_against_schema(qobj) self._validate(qobj, backend_options, noise_model) qobj_str = self._format_qobj_str(qobj, backend_options, noise_model) reslist = self._controller(qobj_str) sv_list = reslist[1] result_str = reslist[0] #.decode('UTF-8') output = json.loads(result_str) #sv_parsed_and_checked=output['results'][0]['data'].pop('statevector') self._validate_controller_output(output) end = time.time() result_formatted = self._format_results(job_id, output, end - start) result_formatted.results[0].data.statevector = np.array(sv_list) #print("result arrived in python {}".format(datetime.datetime.now())) return result_formatted
def test_resize_value_to_register(self): """Verify assemble_circuits converts the value provided on the classical creg to its mapped location on the device register.""" qr = QuantumRegister(1) cr1 = ClassicalRegister(2) cr2 = ClassicalRegister(2) cr3 = ClassicalRegister(1) qc = QuantumCircuit(qr, cr1, cr2, cr3) qc.h(qr[0]).c_if(cr2, 2) qobj = assemble(qc) validate_qobj_against_schema(qobj) bfunc_op, h_op = qobj.experiments[0].instructions self.assertEqual(bfunc_op.name, 'bfunc') self.assertEqual(bfunc_op.mask, '0xC') self.assertEqual(bfunc_op.val, '0x8') self.assertEqual(bfunc_op.relation, '==') self.assertTrue(hasattr(h_op, 'conditional')) self.assertEqual(bfunc_op.register, h_op.conditional)
def _run_job(self, job_id, qobj, backend_options, noise_model, validate): """Run a qobj job""" start = time.time() if validate: validate_qobj_against_schema(qobj) self._validate(qobj, backend_options, noise_model) # convert to format that can run on our simulator # and extract flag from backend_options qasm_str = self._get_qasm_str_from_qobj(qobj) # get shots from qobj config shots = qobj.config.shots if "shots" in qobj.config.__dir__() else 1 # get method from backend_options if provided, default is `Counts Mode` where BBDBackend excels if backend_options is None: use_statevector = False self._logger.warning(msg=f"The simulator is using Counts Mode") elif backend_options["method"] == "statevector": use_statevector = True self._logger.warning( msg=f"The simulator is using Statevector Mode") elif backend_options["method"] in [ "counts", "count" ] or backend_options["method"] is None: use_statevector = False self._logger.warning(msg=f"The simulator is using Counts Mode") else: raise ALComError("the backend method is not supported") # Now the output is of type TrimResult # TODO after hackathon prototype: use QOBJ, Result and ExperimentResult together output = self._controller(qasm_str, use_statevector, shots) #output.replace("'", "\"") import ast output = ast.literal_eval(output) ########################## self._validate_controller_output(output) end = time.time() return self._format_results(job_id, output, end - start)
def __init__(self, backend, job_id, api, is_device, qobj=None, creation_date=None, api_status=None, **kwargs): """IBMQJob init function. We can instantiate jobs from two sources: A QObj, and an already submitted job returned by the API servers. Args: backend (str): The backend instance used to run this job. job_id (str): The job ID of an already submitted job. Pass `None` if you are creating a new one. api (IBMQuantumExperience): IBM Q API is_device (bool): whether backend is a real device # TODO: remove this after Qobj qobj (Qobj): The Quantum Object. See notes below creation_date (str): When the job was run. api_status (str): `status` field directly from the API response. kwargs (dict): You can pass `backend_name` to this function although it has been deprecated. Notes: It is mandatory to pass either ``qobj`` or ``job_id``. Passing a ``qobj`` will ignore ``job_id`` and will create an instance representing an already-created job retrieved from the API server. """ if 'backend_name' in kwargs: warnings.warn('Passing the parameter `backend_name` is deprecated, ' 'pass the `backend` parameter with the instance of ' 'the backend running the job.', DeprecationWarning) super().__init__(backend, job_id) self._job_data = None if qobj is not None: validate_qobj_against_schema(qobj) self._qobj_payload = qobj_to_dict(qobj, version='1.0.0') # TODO: No need for this conversion, just use the new equivalent members above old_qobj = qobj_to_dict(qobj, version='0.0.1') self._job_data = { 'circuits': old_qobj['circuits'], 'hpc': old_qobj['config'].get('hpc'), 'seed': old_qobj['circuits'][0]['config']['seed'], 'shots': old_qobj['config']['shots'], 'max_credits': old_qobj['config']['max_credits'] } self._future_captured_exception = None self._api = api self._backend = backend self._cancelled = False self._status = JobStatus.INITIALIZING # In case of not providing a `qobj`, it is assumed the job already # exists in the API (with `job_id`). if qobj is None: # Some API calls (`get_status_jobs`, `get_status_job`) provide # enough information to recreate the `Job`. If that is the case, try # to make use of that information during instantiation, as # `self.status()` involves an extra call to the API. if api_status == 'VALIDATING': self._status = JobStatus.VALIDATING elif api_status == 'COMPLETED': self._status = JobStatus.DONE elif api_status == 'CANCELLED': self._status = JobStatus.CANCELLED self._cancelled = True else: self.status() self._queue_position = None self._is_device = is_device def current_utc_time(): """Gets the current time in UTC format""" datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat() self._creation_date = creation_date or current_utc_time() self._future = None self._api_error_msg = None
def test_as_dict_against_schema(self): """Test dictionary representation of Qobj against its schema.""" try: validate_qobj_against_schema(self.valid_qobj) except jsonschema.ValidationError as validation_error: self.fail(str(validation_error))
def __init__(self, backend, job_id, api, qobj=None, creation_date=None, api_status=None, use_object_storage=False, use_websockets=False): """IBMQJob init function. We can instantiate jobs from two sources: A QObj, and an already submitted job returned by the API servers. Args: backend (BaseBackend): The backend instance used to run this job. job_id (str or None): The job ID of an already submitted job. Pass `None` if you are creating a new job. api (IBMQConnector or BaseClient): object for connecting to the API. qobj (Qobj): The Quantum Object. See notes below creation_date (str): When the job was run. api_status (str): `status` field directly from the API response. use_object_storage (bool): if `True`, signals that the Job will _attempt_ to use object storage for submitting jobs and retrieving results. use_websockets (bool): if `True`, signals that the Job will _attempt_ to use websockets when pooling for final status. Notes: It is mandatory to pass either ``qobj`` or ``job_id``. Passing a ``qobj`` will ignore ``job_id`` and will create an instance to be submitted to the API server for job creation. Passing only a `job_id` will create an instance representing an already-created job retrieved from the API server. """ # pylint: disable=unused-argument super().__init__(backend, job_id) # Properties common to all Jobs. self._api = api self._backend = backend self._creation_date = creation_date or current_utc_time() self._future = None self._future_captured_exception = None # Properties used for caching. self._cancelled = False self._api_error_msg = None self._result = None self._queue_position = None # Properties used for deciding the underlying API features to use. self._use_object_storage = use_object_storage self._use_websockets = use_websockets if qobj: validate_qobj_against_schema(qobj) self._qobj_payload = qobj.to_dict() self._status = JobStatus.INITIALIZING else: # In case of not providing a `qobj`, it is assumed the job already # exists in the API (with `job_id`). self._qobj_payload = {} # Some API calls (`get_status_jobs`, `get_status_job`) provide # enough information to recreate the `Job`. If that is the case, try # to make use of that information during instantiation, as # `self.status()` involves an extra call to the API. if api_status == ApiJobStatus.VALIDATING.value: self._status = JobStatus.VALIDATING elif api_status == ApiJobStatus.COMPLETED.value: self._status = JobStatus.DONE elif api_status == ApiJobStatus.CANCELLED.value: self._status = JobStatus.CANCELLED self._cancelled = True elif api_status in (ApiJobStatus.ERROR_CREATING_JOB.value, ApiJobStatus.ERROR_VALIDATING_JOB.value, ApiJobStatus.ERROR_RUNNING_JOB.value): self._status = JobStatus.ERROR else: self._status = JobStatus.INITIALIZING self.status()
def run( self, circuits: Union[QasmQobj, PulseQobj, QuantumCircuit, Schedule, List[Union[QuantumCircuit, Schedule]]], job_name: Optional[str] = None, job_share_level: Optional[str] = None, job_tags: Optional[List[str]] = None, experiment_id: Optional[str] = None, validate_qobj: bool = None, header: Optional[Dict] = None, shots: Optional[int] = None, memory: Optional[bool] = None, qubit_lo_freq: Optional[List[int]] = None, meas_lo_freq: Optional[List[int]] = None, schedule_los: Optional[Union[List[Union[Dict[PulseChannel, float], LoConfig]], Union[Dict[PulseChannel, float], LoConfig]]] = None, meas_level: Optional[Union[int, MeasLevel]] = None, meas_return: Optional[Union[str, MeasReturnType]] = None, memory_slots: Optional[int] = None, memory_slot_size: Optional[int] = None, rep_time: Optional[int] = None, rep_delay: Optional[float] = None, init_qubits: Optional[bool] = None, parameter_binds: Optional[List[Dict[Parameter, float]]] = None, **run_config: Dict ) -> IBMQJob: """Run on the backend. If a keyword specified here is also present in the ``options`` attribute/object, the value specified here will be used for this run. Args: circuits: An individual or a list of :class:`~qiskit.circuits.QuantumCircuit` or :class:`~qiskit.pulse.Schedule` objects to run on the backend. A :class:`~qiskit.qobj.QasmQobj` or a :class:`~qiskit.qobj.PulseQobj` object is also supported but is deprecated. job_name: Custom name to be assigned to the job. This job name can subsequently be used as a filter in the :meth:`jobs()` method. Job names do not need to be unique. job_share_level: Allows sharing a job at the hub, group, project, or global level. The possible job share levels are: ``global``, ``hub``, ``group``, ``project``, and ``none``. * global: The job is public to any user. * hub: The job is shared between the users in the same hub. * group: The job is shared between the users in the same group. * project: The job is shared between the users in the same project. * none: The job is not shared at any level. If the job share level is not specified, the job is not shared at any level. job_tags: Tags to be assigned to the job. The tags can subsequently be used as a filter in the :meth:`jobs()` function call. experiment_id: Used to add a job to an "experiment", which is a collection of jobs and additional metadata. validate_qobj: DEPRECATED. If ``True``, run JSON schema validation against the submitted payload. Only applicable if a Qobj is passed in. The following arguments are NOT applicable if a Qobj is passed in. header: User input that will be attached to the job and will be copied to the corresponding result header. Headers do not affect the run. This replaces the old ``Qobj`` header. shots: Number of repetitions of each circuit, for sampling. Default: 1024 or ``max_shots`` from the backend configuration, whichever is smaller. memory: If ``True``, per-shot measurement bitstrings are returned as well (provided the backend supports it). For OpenPulse jobs, only measurement level 2 supports this option. qubit_lo_freq: List of default qubit LO frequencies in Hz. Will be overridden by ``schedule_los`` if set. meas_lo_freq: List of default measurement LO frequencies in Hz. Will be overridden by ``schedule_los`` if set. schedule_los: Experiment LO configurations, frequencies are given in Hz. meas_level: Set the appropriate level of the measurement output for pulse experiments. meas_return: Level of measurement data for the backend to return. For ``meas_level`` 0 and 1: * ``single`` returns information from every shot. * ``avg`` returns average measurement output (averaged over number of shots). memory_slots: Number of classical memory slots to use. memory_slot_size: Size of each memory slot if the output is Level 0. rep_time: Time per program execution in seconds. Must be from the list provided by the backend (``backend.configuration().rep_times``). Defaults to the first entry. rep_delay: Delay between programs in seconds. Only supported on certain backends (if ``backend.configuration().dynamic_reprate_enabled=True``). If supported, ``rep_delay`` will be used instead of ``rep_time`` and must be from the range supplied by the backend (``backend.configuration().rep_delay_range``). Default is given by ``backend.configuration().default_rep_delay``. init_qubits: Whether to reset the qubits to the ground state for each shot. Default: ``True``. parameter_binds: List of Parameter bindings over which the set of experiments will be executed. Each list element (bind) should be of the form {Parameter1: value1, Parameter2: value2, ...}. All binds will be executed across all experiments; e.g., if parameter_binds is a length-n list, and there are m experiments, a total of m x n experiments will be run (one for each experiment/bind pair). **run_config: Extra arguments used to configure the run. Returns: The job to be executed. Raises: IBMQBackendApiError: If an unexpected error occurred while submitting the job. IBMQBackendApiProtocolError: If an unexpected value received from the server. IBMQBackendValueError: If an input parameter value is not valid. """ # pylint: disable=arguments-differ if job_share_level: try: api_job_share_level = ApiJobShareLevel(job_share_level.lower()) except ValueError: valid_job_share_levels_str = ', '.join(level.value for level in ApiJobShareLevel) raise IBMQBackendValueError( '"{}" is not a valid job share level. ' 'Valid job share levels are: {}.' .format(job_share_level, valid_job_share_levels_str)) from None else: api_job_share_level = ApiJobShareLevel.NONE validate_job_tags(job_tags, IBMQBackendValueError) sim_method = None if self.configuration().simulator: sim_method = getattr(self.configuration(), 'simulation_method', None) if isinstance(circuits, (QasmQobj, PulseQobj)): warnings.warn("Passing a Qobj to Backend.run is deprecated and will " "be removed in a future release. Please pass in circuits " "or pulse schedules instead.", DeprecationWarning, stacklevel=3) # need level 3 because of decorator qobj = circuits if sim_method and not hasattr(qobj.config, 'method'): qobj.config.method = sim_method else: qobj_header = run_config.pop('qobj_header', None) header = header or qobj_header run_config_dict = self._get_run_config( qobj_header=header, shots=shots, memory=memory, qubit_lo_freq=qubit_lo_freq, meas_lo_freq=meas_lo_freq, schedule_los=schedule_los, meas_level=meas_level, meas_return=meas_return, memory_slots=memory_slots, memory_slot_size=memory_slot_size, rep_time=rep_time, rep_delay=rep_delay, init_qubits=init_qubits, **run_config) if parameter_binds: run_config_dict['parameter_binds'] = parameter_binds if sim_method and 'method' not in run_config_dict: run_config_dict['method'] = sim_method qobj = assemble(circuits, self, **run_config_dict) if validate_qobj is not None: warnings.warn("The `validate_qobj` keyword is deprecated and will " "be removed in a future release. " "You can pull the schemas from the Qiskit/ibmq-schemas " "repo and directly validate your payloads with that.", DeprecationWarning, stacklevel=3) if validate_qobj: validate_qobj_against_schema(qobj) return self._submit_job(qobj, job_name, api_job_share_level, job_tags, experiment_id)
def test_assemble_single_instruction(self): """Test assembling schedules, no lo config.""" inst = pulse.Play(pulse.Constant(100, 1.0), pulse.DriveChannel(0)) qobj = assemble(inst, self.backend) validate_qobj_against_schema(qobj)
def __init__(self, backend, job_id, api, qobj=None, creation_date=None, api_status=None): """IBMQJob init function. We can instantiate jobs from two sources: A QObj, and an already submitted job returned by the API servers. Args: backend (BaseBackend): The backend instance used to run this job. job_id (str or None): The job ID of an already submitted job. Pass `None` if you are creating a new job. api (IBMQConnector): IBMQ connector. qobj (Qobj): The Quantum Object. See notes below creation_date (str): When the job was run. api_status (str): `status` field directly from the API response. Notes: It is mandatory to pass either ``qobj`` or ``job_id``. Passing a ``qobj`` will ignore ``job_id`` and will create an instance to be submitted to the API server for job creation. Passing only a `job_id` will create an instance representing an already-created job retrieved from the API server. """ # pylint: disable=unused-argument super().__init__(backend, job_id) if qobj: validate_qobj_against_schema(qobj) self._qobj_payload = qobj.as_dict() else: self._qobj_payload = {} self._future_captured_exception = None self._api = api self._backend = backend self._cancelled = False self._status = JobStatus.INITIALIZING # In case of not providing a `qobj`, it is assumed the job already # exists in the API (with `job_id`). if qobj is None: # Some API calls (`get_status_jobs`, `get_status_job`) provide # enough information to recreate the `Job`. If that is the case, try # to make use of that information during instantiation, as # `self.status()` involves an extra call to the API. if api_status == ApiJobStatus.VALIDATING.value: self._status = JobStatus.VALIDATING elif api_status == ApiJobStatus.COMPLETED.value: self._status = JobStatus.DONE elif api_status == ApiJobStatus.CANCELLED.value: self._status = JobStatus.CANCELLED self._cancelled = True else: self.status() self._queue_position = None def current_utc_time(): """Gets the current time in UTC format""" datetime.datetime.utcnow().replace( tzinfo=datetime.timezone.utc).isoformat() self._creation_date = creation_date or current_utc_time() self._future = None self._api_error_msg = None self._result = None