def _submit_job(
            self,
            qobj: Qobj,
            job_name: Optional[str] = None,
            job_share_level: Optional[ApiJobShareLevel] = None) -> IBMQJob:
        """Submit qobj job to IBM-Q.
        Args:
            qobj: description of job.
            job_name: custom name to be assigned to the job. This job
                name can subsequently be used as a filter in the
                ``jobs()`` function call. Job names do not need to be unique.
            job_share_level: level the job should be shared at.

        Returns:
            an instance derived from BaseJob

        Events:
            ibmq.job.start: The job has started.

        Raises:
            IBMQBackendApiError: If an unexpected error occurred while submitting
                the job.
            IBMQBackendError: If an unexpected error occurred after submitting
                the job.
            IBMQBackendApiProtocolError: If an unexpected value received when
                 the server.
        """
        try:
            qobj_dict = qobj.to_dict()
            submit_info = self._api.job_submit(
                backend_name=self.name(),
                qobj_dict=qobj_dict,
                use_object_storage=getattr(self.configuration(),
                                           'allow_object_storage', False),
                job_name=job_name,
                job_share_level=job_share_level)
        except ApiError as ex:
            raise IBMQBackendApiError('Error submitting job: {}'.format(
                str(ex)))

        # Error in the job after submission:
        # Transition to the `ERROR` final state.
        if 'error' in submit_info:
            raise IBMQBackendError('Error submitting job: {}'.format(
                str(submit_info['error'])))

        # Submission success.
        submit_info.update({
            '_backend': self,
            'api': self._api,
            'qObject': qobj_dict
        })
        try:
            job = IBMQJob.from_dict(submit_info)
        except ModelValidationError as err:
            raise IBMQBackendApiProtocolError(
                'Unexpected return value from the server '
                'when submitting job: {}'.format(str(err)))
        Publisher().publish("ibmq.job.start", job)
        return job
Пример #2
0
    def test_single_broker(self):
        """ Testing a single broker is instantiated no matter how many
        Publishers or Subscribers we have """

        publishers = [Publisher() for _ in range(10)]
        subscribers = [DummySubscriber() for _ in range(10)]

        for pub, sub in zip(publishers, subscribers):
            self.assertEqual(id(pub._broker), id(sub._broker))
Пример #3
0
    def test_pusbsub(self):
        """ Test subscribing works"""
        sub = DummySubscriber()

        def action_callback(test):
            """ Callback called when 'publisher.action` event occurs """
            test.assertTrue(True)

        sub.subscribe("publisher.action", action_callback)
        Publisher().publish("publisher.action", self)
Пример #4
0
    def test_unsubscribe_simple(self):
        """ Testing a simple unsubscribe works """
        sub = DummySubscriber()

        def callback(_who, test):
            """ This should have ever been called """
            test.fail("We shouldn't have reach this code!")

        sub.subscribe("publisher.action", callback)
        sub.unsubscribe("publisher.action", callback)
        Publisher().publish("publisher.action", self)
Пример #5
0
    def submit(self):
        """Submit job to IBM-Q.

        Events:
            ibmq.job.start: The job has started.

        Raises:
            JobError: If we have already submitted the job.
        """
        # TODO: Validation against the schema should be done here and not
        # during initialization. Once done, we should document that the method
        # can raise QobjValidationError.
        if self._future is not None or self._job_id is not None:
            raise JobError("We have already submitted the job!")
        self._future = self._executor.submit(self._submit_callback)
        Publisher().publish("ibmq.job.start", self)
Пример #6
0
    def test_unsubscribe_multiple(self):
        """ Testing unsubscribe works with many other subscribed event works """

        sub = DummySubscriber()

        def callback(test):
            """ This should have ever been called """
            test.fail("We shouldn't have reach this code!")

        def dummy_callback(_test):
            """ Just a dummy callback, it won't be executed"""
            pass

        sub.subscribe("publisher.action", callback)
        sub.subscribe("publisher.action", dummy_callback)
        sub.unsubscribe("publisher.action", callback)
        Publisher().publish("publisher.action", self)
Пример #7
0
def parallel_map(  # pylint: disable=dangerous-default-value
        task,
        values,
        task_args=tuple(),
        task_kwargs={},
        num_processes=CPU_COUNT):
    """
    Parallel execution of a mapping of `values` to the function `task`. This
    is functionally equivalent to::

        result = [task(value, *task_args, **task_kwargs) for value in values]

    On Windows this function defaults to a serial implementation to avoid the
    overhead from spawning processes in Windows.

    Args:
        task (func): Function that is to be called for each value in ``values``.
        values (array_like): List or array of values for which the ``task``
                            function is to be evaluated.
        task_args (list): Optional additional arguments to the ``task`` function.
        task_kwargs (dict): Optional additional keyword argument to the ``task`` function.
        num_processes (int): Number of processes to spawn.

    Returns:
        result: The result list contains the value of
                ``task(value, *task_args, **task_kwargs)`` for
                    each value in ``values``.

    Raises:
        QiskitError: If user interrupts via keyboard.

    Events:
        terra.parallel.start: The collection of parallel tasks are about to start.
        terra.parallel.update: One of the parallel task has finished.
        terra.parallel.finish: All the parallel tasks have finished.
    """
    if len(values) == 0:
        return []
    if len(values) == 1:
        return [task(values[0], *task_args, **task_kwargs)]

    Publisher().publish("terra.parallel.start", len(values))
    nfinished = [0]

    def _callback(_):
        nfinished[0] += 1
        Publisher().publish("terra.parallel.done", nfinished[0])

    # Run in parallel if not Win and not in parallel already
    if (num_processes > 1 and os.getenv("QISKIT_IN_PARALLEL") == "FALSE"
            and CONFIG.get("parallel_enabled", PARALLEL_DEFAULT)):
        os.environ["QISKIT_IN_PARALLEL"] = "TRUE"
        try:
            results = []
            with ProcessPoolExecutor(max_workers=num_processes) as executor:
                param = map(
                    lambda value: (task, value, task_args, task_kwargs),
                    values)
                future = executor.map(_task_wrapper, param)

            results = list(future)
            Publisher().publish("terra.parallel.done", len(results))

        except (KeyboardInterrupt, Exception) as error:
            if isinstance(error, KeyboardInterrupt):
                Publisher().publish("terra.parallel.finish")
                os.environ["QISKIT_IN_PARALLEL"] = "FALSE"
                raise QiskitError(
                    "Keyboard interrupt in parallel_map.") from error
            # Otherwise just reset parallel flag and error
            os.environ["QISKIT_IN_PARALLEL"] = "FALSE"
            raise error

        Publisher().publish("terra.parallel.finish")
        os.environ["QISKIT_IN_PARALLEL"] = "FALSE"
        return results

    # Cannot do parallel on Windows , if another parallel_map is running in parallel,
    # or len(values) == 1.
    results = []
    for _, value in enumerate(values):
        result = task(value, *task_args, **task_kwargs)
        results.append(result)
        _callback(0)
    Publisher().publish("terra.parallel.finish")
    return results
Пример #8
0
 def _callback(_):
     nfinished[0] += 1
     Publisher().publish("terra.parallel.done", nfinished[0])
Пример #9
0
    def _submit_job(
            self,
            qobj: Union[QasmQobj, PulseQobj],
            job_name: Optional[str] = None,
            job_share_level: Optional[ApiJobShareLevel] = None,
            job_tags: Optional[List[str]] = None,
            experiment_id: Optional[str] = None
    ) -> IBMQJob:
        """Submit the Qobj to the backend.

        Args:
            qobj: The Qobj to be executed.
            job_name: Custom name to be assigned to the job. This job
                name can subsequently be used as a filter in the
                ``jobs()``method.
                Job names do not need to be unique.
            job_share_level: Level the job should be shared at.
            job_tags: Tags to be assigned to the job.
            experiment_id: Used to add a job to an experiment.

        Returns:
            The job to be executed.

        Events:
            ibmq.job.start: The job has started.

        Raises:
            IBMQBackendApiError: If an unexpected error occurred while submitting
                the job.
            IBMQBackendError: If an unexpected error occurred after submitting
                the job.
            IBMQBackendApiProtocolError: If an unexpected value is received from
                 the server.
            IBMQBackendJobLimitError: If the job could not be submitted because
                the job limit has been reached.
        """
        try:
            qobj_dict = qobj.to_dict()
            submit_info = self._api_client.job_submit(
                backend_name=self.name(),
                qobj_dict=qobj_dict,
                job_name=job_name,
                job_share_level=job_share_level,
                job_tags=job_tags,
                experiment_id=experiment_id)
        except ApiError as ex:
            if 'Error code: 3458' in str(ex):
                raise IBMQBackendJobLimitError('Error submitting job: {}'.format(str(ex))) from ex
            raise IBMQBackendApiError('Error submitting job: {}'.format(str(ex))) from ex

        # Error in the job after submission:
        # Transition to the `ERROR` final state.
        if 'error' in submit_info:
            raise IBMQBackendError(
                'Error submitting job: {}'.format(str(submit_info['error'])))

        # Submission success.
        try:
            job = IBMQJob(backend=self, api_client=self._api_client, qobj=qobj, **submit_info)
            logger.debug('Job %s was successfully submitted.', job.job_id())
        except TypeError as err:
            logger.debug("Invalid job data received: %s", submit_info)
            raise IBMQBackendApiProtocolError('Unexpected return value received from the server '
                                              'when submitting job: {}'.format(str(err))) from err
        Publisher().publish("ibmq.job.start", job)
        return job
Пример #10
0
def parallel_map(task, values, task_args=tuple(), task_kwargs={},  # pylint: disable=W0102
                 num_processes=CPU_COUNT):
    """
    Parallel execution of a mapping of `values` to the function `task`. This
    is functionally equivalent to::

        result = [task(value, *task_args, **task_kwargs) for value in values]

    On Windows this function defaults to a serial implementation to avoid the
    overhead from spawning processes in Windows.

    Args:
        task (func): Function that is to be called for each value in ``task_vec``.
        values (array_like): List or array of values for which the ``task``
                            function is to be evaluated.
        task_args (list): Optional additional arguments to the ``task`` function.
        task_kwargs (dict): Optional additional keyword argument to the ``task`` function.
        num_processes (int): Number of processes to spawn.

    Returns:
        result: The result list contains the value of
                ``task(value, *task_args, **task_kwargs)`` for
                    each value in ``values``.

    Raises:
        QiskitError: If user interrupts via keyboard.

    Events:
        terra.parallel.start: The collection of parallel tasks are about to start.
        terra.parallel.update: One of the parallel task has finished.
        terra.parallel.finish: All the parallel tasks have finished.
    """
    if len(values) == 1:
        return [task(values[0], *task_args, **task_kwargs)]

    Publisher().publish("terra.parallel.start", len(values))
    nfinished = [0]

    def _callback(_):
        nfinished[0] += 1
        Publisher().publish("terra.parallel.done", nfinished[0])

    # Run in parallel if not Win and not in parallel already
    if platform.system() != 'Windows' and num_processes > 1 \
       and os.getenv('QISKIT_IN_PARALLEL') == 'FALSE':
        os.environ['QISKIT_IN_PARALLEL'] = 'TRUE'
        try:
            pool = Pool(processes=num_processes)

            async_res = [pool.apply_async(task, (value,) + task_args, task_kwargs,
                                          _callback) for value in values]

            while not all([item.ready() for item in async_res]):
                for item in async_res:
                    item.wait(timeout=0.1)

            pool.terminate()
            pool.join()

        except KeyboardInterrupt:
            pool.terminate()
            pool.join()
            Publisher().publish("terra.parallel.finish")
            raise QiskitError('Keyboard interrupt in parallel_map.')

        Publisher().publish("terra.parallel.finish")
        os.environ['QISKIT_IN_PARALLEL'] = 'FALSE'
        return [ar.get() for ar in async_res]

    # Cannot do parallel on Windows , if another parallel_map is running in parallel,
    # or len(values) == 1.
    results = []
    for _, value in enumerate(values):
        result = task(value, *task_args, **task_kwargs)
        results.append(result)
        _callback(0)
    Publisher().publish("terra.parallel.finish")
    return results
Пример #11
0
    def _submit_job(
        self,
        qobj: Union[QasmQobj, PulseQobj],
        job_name: Optional[str] = None,
        job_tags: Optional[List[str]] = None,
        composite_job_id: Optional[str] = None,
        live_data_enabled: Optional[bool] = None,
    ) -> IBMJob:
        """Submit the Qobj to the backend.

        Args:
            qobj: The Qobj to be executed.
            job_name: Custom name to be assigned to the job. This job
                name can subsequently be used as a filter in the
                ``jobs()``method.
                Job names do not need to be unique.
            job_tags: Tags to be assigned to the job.
            composite_job_id: Composite job ID, if this Qobj belongs to a composite job.
            live_data_enabled: Used to activate/deactivate live data on the backend.

        Returns:
            The job to be executed.

        Events:
            ibm.job.start: The job has started.

        Raises:
            IBMBackendApiError: If an unexpected error occurred while submitting
                the job.
            IBMBackendError: If an unexpected error occurred after submitting
                the job.
            IBMBackendApiProtocolError: If an unexpected value is received from
                 the server.
            IBMBackendJobLimitError: If the job could not be submitted because
                the job limit has been reached.
        """
        try:
            qobj_dict = qobj.to_dict()
            submit_info = self._api_client.job_submit(
                backend_name=self.name,
                qobj_dict=qobj_dict,
                job_name=job_name,
                job_tags=job_tags,
                experiment_id=composite_job_id,
                live_data_enabled=live_data_enabled,
            )
        except ApiError as ex:
            if "Error code: 3458" in str(ex):
                raise IBMBackendJobLimitError(
                    "Error submitting job: {}".format(str(ex))) from ex
            raise IBMBackendApiError("Error submitting job: {}".format(
                str(ex))) from ex

        # Error in the job after submission:
        # Transition to the `ERROR` final state.
        if "error" in submit_info:
            raise IBMBackendError("Error submitting job: {}".format(
                str(submit_info["error"])))

        # Submission success.
        try:
            job = IBMCircuitJob(backend=self,
                                api_client=self._api_client,
                                qobj=qobj,
                                **submit_info)
            logger.debug("Job %s was successfully submitted.", job.job_id())
        except TypeError as err:
            logger.debug("Invalid job data received: %s", submit_info)
            raise IBMBackendApiProtocolError(
                "Unexpected return value received from the server "
                "when submitting job: {}".format(str(err))) from err
        Publisher().publish("ibm.job.start", job)
        return job
Пример #12
0
    def _submit_job(self,
                    qobj: Qobj,
                    job_name: Optional[str] = None,
                    job_share_level: Optional[ApiJobShareLevel] = None,
                    job_tags: Optional[List[str]] = None) -> IBMQJob:
        """Submit the Qobj to the backend.

        Args:
            qobj: The Qobj to be executed.
            job_name: Custom name to be assigned to the job. This job
                name can subsequently be used as a filter in the
                ``jobs()``method.
                Job names do not need to be unique.
            job_share_level: Level the job should be shared at.
            job_tags: Tags to be assigned to the job.

        Returns:
            The job to be executed, an instance derived from BaseJob.

        Events:
            ibmq.job.start: The job has started.

        Raises:
            IBMQBackendApiError: If an unexpected error occurred while submitting
                the job.
            IBMQBackendError: If an unexpected error occurred after submitting
                the job.
            IBMQBackendApiProtocolError: If an unexpected value is received from
                 the server.
        """
        try:
            qobj_dict = qobj.to_dict()
            submit_info = self._api.job_submit(backend_name=self.name(),
                                               qobj_dict=qobj_dict,
                                               job_name=job_name,
                                               job_share_level=job_share_level,
                                               job_tags=job_tags)
        except ApiError as ex:
            raise IBMQBackendApiError('Error submitting job: {}'.format(
                str(ex))) from ex

        # Error in the job after submission:
        # Transition to the `ERROR` final state.
        if 'error' in submit_info:
            raise IBMQBackendError('Error submitting job: {}'.format(
                str(submit_info['error'])))

        # Submission success.
        submit_info.update({
            '_backend': self,
            'api': self._api,
            'qObject': qobj
        })
        try:
            job = IBMQJob.from_dict(submit_info)
            logger.debug('Job %s was successfully submitted.', job.job_id())
        except ModelValidationError as err:
            raise IBMQBackendApiProtocolError(
                'Unexpected return value received from the server '
                'when submitting job: {}'.format(str(err))) from err
        Publisher().publish("ibmq.job.start", job)
        return job