예제 #1
0
    def __init__(
        self,
        maxiter: int = 1000,
        maxfail: int = 10,
        maxmp: int = None,
        verbose: bool = False,
    ) -> None:
        """
        Args:
            maxiter: Maximum number of function evaluations.
            maxmp: Maximum number of  model points requested for the local fit.
                 Default = 2 * number of parameters + 6 set to this value when None.
            maxfail: Maximum number of failures to improve the solution. Stops the algorithm
                    after maxfail is reached.
            verbose: Provide verbose (debugging) output.

        Raises:
            MissingOptionalLibraryError: scikit-quant or SQSnobFit not installed
        """
        if not _HAS_SKQUANT:
            raise MissingOptionalLibraryError(
                libname='scikit-quant',
                name='SNOBFIT',
                pip_install="pip install 'qiskit-aqua[skquant]'")
        if not _HAS_SKSNOBFIT:
            raise MissingOptionalLibraryError(
                libname='SQSnobFit',
                name='SNOBFIT',
                pip_install='pip install SQSnobFit')
        super().__init__()
        self._maxiter = maxiter
        self._maxfail = maxfail
        self._maxmp = maxmp
        self._verbose = verbose
예제 #2
0
    def __init__(
        self,
        token: Optional[str] = None,
        tickers: Optional[Union[str, List[str]]] = None,
        start: datetime.datetime = datetime.datetime(2016, 1, 1),
        end: datetime.datetime = datetime.datetime(2016, 1, 30)
    ) -> None:
        """
        Initializer
        Args:
            token: quandl access token, which is not needed, strictly speaking
            tickers: tickers
            start: start time
            end: end time
         Raises:
            MissingOptionalLibraryError: Quandl not installed
        """
        super().__init__()
        if not _HAS_QUANDL:
            raise MissingOptionalLibraryError(libname='Quandl',
                                              name='WikipediaDataProvider',
                                              pip_install='pip install quandl')
        self._tickers = None  # type: Optional[Union[str, List[str]]]
        tickers = tickers if tickers is not None else []
        if isinstance(tickers, list):
            self._tickers = tickers
        else:
            self._tickers = tickers.replace('\n', ';').split(";")
        self._n = len(self._tickers)

        self._token = token
        self._tickers = tickers
        self._start = start.strftime('%Y-%m-%d')
        self._end = end.strftime('%Y-%m-%d')
        self._data = []
예제 #3
0
    def __init__(self, n_features: int = 1, n_out: int = 1) -> None:
        """
        Args:
            n_features: Dimension of input data vector.
            n_out: Dimension of the discriminator's output vector.

        Raises:
            MissingOptionalLibraryError: Pytorch not installed
        """
        super().__init__()
        if not _HAS_TORCH:
            raise MissingOptionalLibraryError(
                libname='Pytorch',
                name='PyTorchDiscriminator',
                pip_install="pip install 'qiskit-aqua[torch]'")

        self._n_features = n_features
        self._n_out = n_out
        # discriminator_net: torch.nn.Module or None, Discriminator network.
        # pylint: disable=import-outside-toplevel
        from ._pytorch_discriminator_net import DiscriminatorNet
        self._discriminator = DiscriminatorNet(self._n_features, self._n_out)
        # optimizer: torch.optim.Optimizer or None, Optimizer initialized w.r.t
        # discriminator network parameters.
        self._optimizer = optim.Adam(self._discriminator.parameters(),
                                     lr=1e-5,
                                     amsgrad=True)

        self._ret = {}  # type: Dict[str, Any]
    def __init__(
        self,
        tickers: Optional[Union[str, List[str]]] = None,
        start: datetime.datetime = datetime.datetime(2016, 1, 1),
        end: datetime.datetime = datetime.datetime(2016, 1, 30)
    ) -> None:
        """
        Initializer
        Args:
            tickers: tickers
            start: start time
            end: end time
        Raises:
            MissingOptionalLibraryError: YFinance not installed
        """
        super().__init__()
        if not _HAS_YFINANCE:
            raise MissingOptionalLibraryError(
                libname='YFinance',
                name='YahooDataProvider',
                pip_install='pip install yfinance')
        self._tickers = None  # type: Optional[Union[str, List[str]]]
        tickers = tickers if tickers is not None else []
        if isinstance(tickers, list):
            self._tickers = tickers
        else:
            self._tickers = tickers.replace('\n', ';').split(";")
        self._n = len(self._tickers)

        self._tickers = tickers
        self._start = start.strftime('%Y-%m-%d')
        self._end = end.strftime('%Y-%m-%d')
        self._data = []
    def __init__(self, max_evals: int = 1000) -> None:  # pylint: disable=unused-argument
        """
        Args:
            max_evals: Maximum allowed number of function evaluations.

        Raises:
            MissingOptionalLibraryError: NLopt library not installed.
        """
        if not _HAS_NLOPT:
            raise MissingOptionalLibraryError(
                libname='nlopt',
                name='NLoptOptimizer',
                msg='See https://qiskit.org/documentation/apidoc/'
                    'qiskit.aqua.components.optimizers.nlopts.html'
                    ' for installation information')

        super().__init__()
        for k, v in list(locals().items()):
            if k in self._OPTIONS:
                self._options[k] = v

        self._optimizer_names = {
            NLoptOptimizerType.GN_CRS2_LM: nlopt.GN_CRS2_LM,
            NLoptOptimizerType.GN_DIRECT_L_RAND: nlopt.GN_DIRECT_L_RAND,
            NLoptOptimizerType.GN_DIRECT_L: nlopt.GN_DIRECT_L,
            NLoptOptimizerType.GN_ESCH: nlopt.GN_ESCH,
            NLoptOptimizerType.GN_ISRES: nlopt.GN_ISRES,
        }
예제 #6
0
    def __init__(self,
                 tickers: Optional[Union[str, List[str]]] = None,
                 start: datetime.datetime = datetime.datetime(2016, 1, 1),
                 end: datetime.datetime = datetime.datetime(2016, 1, 30),
                 seed: Optional[int] = None) -> None:
        """
        Initializer
        Args:
            tickers: tickers
            start: first data point
            end: last data point precedes this date
            seed: shall a seed be used?
        Raises:
            MissingOptionalLibraryError: Pandas not installed
        """
        super().__init__()
        if not _HAS_PANDAS:
            raise MissingOptionalLibraryError(
                libname='Pandas',
                name='RandomDataProvider',
                pip_install='pip install pandas')
        tickers = tickers if tickers is not None else ["TICKER1", "TICKER2"]
        if isinstance(tickers, list):
            self._tickers = tickers
        else:
            self._tickers = tickers.replace('\n', ';').split(";")
        self._n = len(self._tickers)

        self._start = start
        self._end = end
        self._seed = seed
예제 #7
0
def _safe_submit_qobj(qobj: QasmQobj, backend: Union[Backend, BaseBackend],
                      backend_options: Dict, noise_config: Dict,
                      skip_qobj_validation: bool) -> Tuple[BaseJob, str]:
    # assure get job ids
    while True:
        try:
            job = run_on_backend(backend,
                                 qobj,
                                 backend_options=backend_options,
                                 noise_config=noise_config,
                                 skip_qobj_validation=skip_qobj_validation)
            job_id = job.job_id()
            break
        except QiskitError as ex:
            failure_warn = True
            if is_ibmq_provider(backend):
                try:
                    from qiskit.providers.ibmq import IBMQBackendJobLimitError
                except ImportError as ex1:
                    raise MissingOptionalLibraryError(
                        libname='qiskit-ibmq-provider',
                        name='_safe_submit_qobj',
                        pip_install='pip install qiskit-ibmq-provider'
                    ) from ex1
                if isinstance(ex, IBMQBackendJobLimitError):

                    oldest_running = backend.jobs(
                        limit=1,
                        descending=False,
                        status=['QUEUED', 'VALIDATING', 'RUNNING'])
                    if oldest_running:
                        oldest_running = oldest_running[0]
                        logger.warning(
                            "Job limit reached, waiting for job %s to finish "
                            "before submitting the next one.",
                            oldest_running.job_id())
                        failure_warn = False  # Don't issue a second warning.
                        try:
                            oldest_running.wait_for_final_state(timeout=300)
                        except Exception:  # pylint: disable=broad-except
                            # If the wait somehow fails or times out, we'll just re-try
                            # the job submit and see if it works now.
                            pass
            if failure_warn:
                logger.warning(
                    "FAILURE: Can not get job id, Resubmit the qobj to get job id. "
                    "Terra job error: %s ", ex)
        except Exception as ex:  # pylint: disable=broad-except
            logger.warning(
                "FAILURE: Can not get job id, Resubmit the qobj to get job id."
                "Error: %s ", ex)

    return job, job_id
예제 #8
0
def digits(training_size, test_size, n, plot_data=False):
    """ returns digits dataset """
    class_labels = [r'A', r'B', r'C', r'D', r'E', r'F', r'G', r'H', r'I', r'J']
    data = datasets.load_digits()
    # pylint: disable=no-member
    sample_train, sample_test, label_train, label_test = train_test_split(
        data.data, data.target, test_size=0.3, random_state=22)

    # Now we standardize for gaussian around 0 with unit variance
    std_scale = StandardScaler().fit(sample_train)
    sample_train = std_scale.transform(sample_train)
    sample_test = std_scale.transform(sample_test)

    # Now reduce number of features to number of qubits
    pca = PCA(n_components=n).fit(sample_train)
    sample_train = pca.transform(sample_train)
    sample_test = pca.transform(sample_test)

    # Scale to the range (-1,+1)
    samples = np.append(sample_train, sample_test, axis=0)
    minmax_scale = MinMaxScaler((-1, 1)).fit(samples)
    sample_train = minmax_scale.transform(sample_train)
    sample_test = minmax_scale.transform(sample_test)

    # Pick training size number of samples from each distro
    training_input = {
        key: (sample_train[label_train == k, :])[:training_size]
        for k, key in enumerate(class_labels)
    }
    test_input = {
        key: (sample_test[label_test == k, :])[:test_size]
        for k, key in enumerate(class_labels)
    }

    if plot_data:
        try:
            import matplotlib.pyplot as plt
        except ImportError as ex:
            raise MissingOptionalLibraryError(
                libname='Matplotlib',
                name='digits',
                pip_install='pip install matplotlib') from ex
        for k in range(0, 9):
            plt.scatter(sample_train[label_train == k, 0][:training_size],
                        sample_train[label_train == k, 1][:training_size])

        plt.title("PCA dim. reduced Digits dataset")
        plt.show()

    return sample_train, training_input, test_input, class_labels
예제 #9
0
def iris(training_size, test_size, n, plot_data=False):
    """ returns iris dataset """
    class_labels = [r'A', r'B', r'C']
    data, target = datasets.load_iris(return_X_y=True)
    sample_train, sample_test, label_train, label_test = \
        train_test_split(data, target, test_size=1, random_state=42)

    # Now we standardize for gaussian around 0 with unit variance
    std_scale = StandardScaler().fit(sample_train)
    sample_train = std_scale.transform(sample_train)
    sample_test = std_scale.transform(sample_test)

    # Now reduce number of features to number of qubits
    pca = PCA(n_components=n).fit(sample_train)
    sample_train = pca.transform(sample_train)
    sample_test = pca.transform(sample_test)

    # Scale to the range (-1,+1)
    samples = np.append(sample_train, sample_test, axis=0)
    minmax_scale = MinMaxScaler((-1, 1)).fit(samples)
    sample_train = minmax_scale.transform(sample_train)
    sample_test = minmax_scale.transform(sample_test)

    # Pick training size number of samples from each distro
    training_input = {
        key: (sample_train[label_train == k, :])[:training_size]
        for k, key in enumerate(class_labels)
    }
    test_input = {
        key: (sample_test[label_test == k, :])[:test_size]
        for k, key in enumerate(class_labels)
    }

    if plot_data:
        try:
            import matplotlib.pyplot as plt
        except ImportError:
            raise MissingOptionalLibraryError(
                libname='Matplotlib',
                name='iris',
                pip_install='pip install matplotlib')
        for k in range(0, 3):
            plt.scatter(sample_train[label_train == k, 0][:training_size],
                        sample_train[label_train == k, 1][:training_size])

        plt.title("Iris dataset")
        plt.show()

    return sample_train, training_input, test_input, class_labels
예제 #10
0
    def __init__(self, disp: Optional[bool] = False) -> None:
        """Initializes the CplexOptimizer.

        Args:
            disp: Whether to print CPLEX output or not.

        Raises:
            MissingOptionalLibraryError: CPLEX is not installed.
        """
        if not _HAS_CPLEX:
            raise MissingOptionalLibraryError(
                libname='CPLEX',
                name='CplexOptimizer',
                pip_install='pip install qiskit-aqua[cplex]')

        self._disp = disp
예제 #11
0
    def __init__(self, cplex=None):
        if not _HAS_CPLEX:
            raise MissingOptionalLibraryError(
                libname='CPLEX',
                name='SimpleCPLEX',
                pip_install='pip install qiskit-aqua[cplex]')

        if cplex:
            self._model = Cplex(cplex._model)
        else:
            self._model = Cplex()

        self._init_lin()
        # to avoid a variable with index 0
        self._model.variables.add(names=['_dummy_'], types=[self._model.variables.type.continuous])
        self._var_id = {'_dummy_': 0}
예제 #12
0
    def __init__(
        self,
        token: str,
        tickers: Union[str, List[str]],
        stockmarket: StockMarket = StockMarket.LONDON,
        start: datetime.datetime = datetime.datetime(2016, 1, 1),
        end: datetime.datetime = datetime.datetime(2016, 1, 30)
    ) -> None:
        """
        Initializer
        Args:
            token: quandl access token
            tickers: tickers
            stockmarket: LONDON, EURONEXT, or SINGAPORE
            start: first data point
            end: last data point precedes this date
        Raises:
            MissingOptionalLibraryError: Quandl not installed
            QiskitFinanceError: provider doesn't support given stock market
        """
        super().__init__()
        if not _HAS_QUANDL:
            raise MissingOptionalLibraryError(libname='Quandl',
                                              name='ExchangeDataProvider',
                                              pip_install='pip install quandl')
        self._tickers = []  # type: Union[str, List[str]]
        if isinstance(tickers, list):
            self._tickers = tickers
        else:
            self._tickers = tickers.replace('\n', ';').split(";")
        self._n = len(self._tickers)

        if stockmarket not in [
                StockMarket.LONDON, StockMarket.EURONEXT, StockMarket.SINGAPORE
        ]:
            msg = "ExchangeDataProvider does not support "
            msg += stockmarket.value
            msg += " as a stock market."
            raise QiskitFinanceError(msg)

        # This is to aid serialization; string is ok to serialize
        self._stockmarket = str(stockmarket.value)

        self._token = token
        self._tickers = tickers
        self._start = start.strftime('%Y-%m-%d')
        self._end = end.strftime('%Y-%m-%d')
예제 #13
0
def run_on_backend(backend,
                   qobj,
                   backend_options=None,
                   noise_config=None,
                   skip_qobj_validation=False):
    """ run on backend """
    if skip_qobj_validation:
        job_id = str(uuid.uuid4())
        if is_aer_provider(backend):
            # pylint: disable=import-outside-toplevel
            try:
                from qiskit.providers.aer.aerjob import AerJob
            except ImportError as ex:
                raise MissingOptionalLibraryError(
                    libname='qiskit-aer',
                    name='run_on_backend',
                    pip_install='pip install qiskit-aer') from ex
            temp_backend_options = \
                backend_options['backend_options'] if backend_options != {} else None
            temp_noise_config = noise_config[
                'noise_model'] if noise_config != {} else None

            # Add new options
            if temp_backend_options is not None or temp_noise_config is not None:
                config = qobj.config.to_dict()
                if temp_backend_options is not None:
                    for key, val in temp_backend_options.items():
                        config[key] = val if not hasattr(
                            val, 'to_dict') else val.to_dict()
                if temp_noise_config is not None:
                    config['noise_model'] = temp_noise_config
                qobj.config = QasmQobjConfig.from_dict(config)

            job = AerJob(backend, job_id, backend._run, qobj)
            job.submit()
        elif is_basicaer_provider(backend):
            backend._set_options(qobj_config=qobj.config, **backend_options)
            job = BasicAerJob(backend, job_id, backend._run_job, qobj)
            job._future = job._executor.submit(job._fn, job._job_id, job._qobj)
        else:
            logger.info("Can't skip qobj validation for the %s provider.",
                        backend.provider().__class__.__name__)
            job = backend.run(qobj, **backend_options, **noise_config)
        return job
    else:
        job = backend.run(qobj, **backend_options, **noise_config)
        return job
예제 #14
0
    def __init__(self,
                 maxiter: int = 1000,
                 ) -> None:
        """
        Args:
            maxiter: Maximum number of function evaluations.

        Raises:
            MissingOptionalLibraryError: scikit-quant not installed
        """
        if not _HAS_SKQUANT:
            raise MissingOptionalLibraryError(
                libname='scikit-quant',
                name='IMFIL',
                pip_install="pip install 'qiskit-aqua[skquant]'")
        super().__init__()
        self._maxiter = maxiter
예제 #15
0
    def _replace_pauli_sums(cls, operator):
        try:
            from qiskit.providers.aer.extensions import SnapshotExpectationValue
        except ImportError as ex:
            raise MissingOptionalLibraryError(
                libname='qiskit-aer',
                name='AerPauliExpectation',
                pip_install='pip install qiskit-aer') from ex
        # The 'expval_measurement' label on the snapshot instruction is special - the
        # CircuitSampler will look for it to know that the circuit is a Expectation
        # measurement, and not simply a
        # circuit to replace with a DictStateFn

        # Change to Pauli representation if necessary
        if not {'Pauli'} == operator.primitive_strings():
            logger.warning(
                'Measured Observable is not composed of only Paulis, converting to '
                'Pauli representation, which can be expensive.')
            # Setting massive=False because this conversion is implicit. User can perform this
            # action on the Observable with massive=True explicitly if they so choose.
            operator = operator.to_pauli_op(massive=False)

        if isinstance(operator, SummedOp):
            paulis = [[meas.coeff, meas.primitive] for meas in operator.oplist]
            snapshot_instruction = SnapshotExpectationValue(
                'expval_measurement', paulis)
            snapshot_op = CircuitStateFn(snapshot_instruction,
                                         is_measurement=True)
            return snapshot_op
        if isinstance(operator, PauliOp):
            paulis = [[operator.coeff, operator.primitive]]
            snapshot_instruction = SnapshotExpectationValue(
                'expval_measurement', paulis)
            snapshot_op = CircuitStateFn(snapshot_instruction,
                                         is_measurement=True)
            return snapshot_op
        if isinstance(operator, ListOp):
            return operator.traverse(cls._replace_pauli_sums)
예제 #16
0
"""
Discriminator
"""

import logging
from qiskit.aqua import MissingOptionalLibraryError

logger = logging.getLogger(__name__)

try:
    import torch
    from torch import nn
except ImportError:
    if logger.isEnabledFor(logging.INFO):
        EXC = MissingOptionalLibraryError(
            libname='Pytorch',
            name='DiscriminatorNet',
            pip_install='pip install qiskit-aqua[torch]')
        logger.info(str(EXC))

# torch 1.6.0 fixed a mypy error about not applying contravariance rules
# to inputs by defining forward as a value, rather than a function.  See also
# https://github.com/python/mypy/issues/8795
# The fix introduced an error on Module class about '_forward_unimplemented'
# not being implemented.
# The pylint disable=abstract-method fixes it.


class DiscriminatorNet(torch.nn.Module):  # pylint: disable=abstract-method
    """
    Discriminator
    """
예제 #17
0
def ad_hoc_data(training_size, test_size, n, gap, plot_data=False):
    """ returns ad hoc dataset """
    class_labels = [r'A', r'B']
    count = 0
    if n == 2:
        count = 100
    elif n == 3:
        count = 20   # coarseness of data separation

    label_train = np.zeros(2 * (training_size + test_size))
    sample_train = []
    sample_a = [[0 for x in range(n)] for y in range(training_size + test_size)]
    sample_b = [[0 for x in range(n)] for y in range(training_size + test_size)]

    sample_total = [[[0 for x in range(count)] for y in range(count)] for z in range(count)]

    # interactions = np.transpose(np.array([[1, 0], [0, 1], [1, 1]]))

    steps = 2 * np.pi / count

    # sx = np.array([[0, 1], [1, 0]])
    # X = np.asmatrix(sx)
    # sy = np.array([[0, -1j], [1j, 0]])
    # Y = np.asmatrix(sy)
    s_z = np.array([[1, 0], [0, -1]])
    z_m = np.asmatrix(s_z)
    j_m = np.array([[1, 0], [0, 1]])
    j_m = np.asmatrix(j_m)
    h_m = np.array([[1, 1], [1, -1]]) / np.sqrt(2)
    h_2 = np.kron(h_m, h_m)
    h_3 = np.kron(h_m, h_2)
    h_m = np.asmatrix(h_m)
    h_2 = np.asmatrix(h_2)
    h_3 = np.asmatrix(h_3)

    f_a = np.arange(2**n)

    my_array = [[0 for x in range(n)] for y in range(2 ** n)]

    for arindex, _ in enumerate(my_array):
        temp_f = bin(f_a[arindex])[2:].zfill(n)
        for findex in range(n):
            my_array[arindex][findex] = int(temp_f[findex])

    my_array = np.asarray(my_array)
    my_array = np.transpose(my_array)

    # Define decision functions
    maj = (-1) ** (2 * my_array.sum(axis=0) > n)
    parity = (-1) ** (my_array.sum(axis=0))
    # dict1 = (-1) ** (my_array[0])
    d_m = None
    if n == 2:
        d_m = np.diag(parity)
    elif n == 3:
        d_m = np.diag(maj)

    basis = aqua_globals.random.random((2 ** n, 2 ** n)) + \
        1j * aqua_globals.random.random((2 ** n, 2 ** n))
    basis = np.asmatrix(basis).getH() * np.asmatrix(basis)

    [s_a, u_a] = np.linalg.eig(basis)

    idx = s_a.argsort()[::-1]
    s_a = s_a[idx]
    u_a = u_a[:, idx]

    m_m = (np.asmatrix(u_a)).getH() * np.asmatrix(d_m) * np.asmatrix(u_a)

    psi_plus = np.transpose(np.ones(2)) / np.sqrt(2)
    psi_0 = 1
    for k in range(n):
        psi_0 = np.kron(np.asmatrix(psi_0), np.asmatrix(psi_plus))

    sample_total_a = []
    sample_total_b = []
    sample_total_void = []
    if n == 2:
        for n_1 in range(count):
            for n_2 in range(count):
                x_1 = steps * n_1
                x_2 = steps * n_2
                phi = x_1 * np.kron(z_m, j_m) + x_2 * np.kron(j_m, z_m) + \
                    (np.pi - x_1) * (np.pi - x_2) * np.kron(z_m, z_m)
                u_u = scipy.linalg.expm(1j * phi)  # pylint: disable=no-member
                psi = np.asmatrix(u_u) * h_2 * np.asmatrix(u_u) * np.transpose(psi_0)
                temp = np.real(psi.getH() * m_m * psi).item()
                if temp > gap:
                    sample_total[n_1][n_2] = +1
                elif temp < -gap:
                    sample_total[n_1][n_2] = -1
                else:
                    sample_total[n_1][n_2] = 0

        # Now sample randomly from sample_Total a number of times training_size+testing_size
        t_r = 0
        while t_r < (training_size + test_size):
            draw1 = aqua_globals.random.choice(count)
            draw2 = aqua_globals.random.choice(count)
            if sample_total[draw1][draw2] == +1:
                sample_a[t_r] = [2 * np.pi * draw1 / count, 2 * np.pi * draw2 / count]
                t_r += 1

        t_r = 0
        while t_r < (training_size + test_size):
            draw1 = aqua_globals.random.choice(count)
            draw2 = aqua_globals.random.choice(count)
            if sample_total[draw1][draw2] == -1:
                sample_b[t_r] = [2 * np.pi * draw1 / count, 2 * np.pi * draw2 / count]
                t_r += 1

        sample_train = [sample_a, sample_b]

        for lindex in range(training_size + test_size):
            label_train[lindex] = 0
        for lindex in range(training_size + test_size):
            label_train[training_size + test_size + lindex] = 1
        label_train = label_train.astype(int)
        sample_train = np.reshape(sample_train, (2 * (training_size + test_size), n))
        training_input = {key: (sample_train[label_train == k, :])[:training_size]
                          for k, key in enumerate(class_labels)}
        test_input = {key: (sample_train[label_train == k, :])[training_size:(
            training_size + test_size)] for k, key in enumerate(class_labels)}

        if plot_data:
            try:
                import matplotlib.pyplot as plt
            except ImportError as ex:
                raise MissingOptionalLibraryError(
                    libname='Matplotlib',
                    name='ad_hoc_data',
                    pip_install='pip install matplotlib') from ex

            plt.show()
            fig2 = plt.figure()
            for k in range(0, 2):
                plt.scatter(sample_train[label_train == k, 0][:training_size],
                            sample_train[label_train == k, 1][:training_size])

            plt.title("Ad-hoc Data")
            plt.show()

    elif n == 3:
        for n_1 in range(count):
            for n_2 in range(count):
                for n_3 in range(count):
                    x_1 = steps * n_1
                    x_2 = steps * n_2
                    x_3 = steps * n_3
                    phi = x_1 * np.kron(np.kron(z_m, j_m), j_m) + \
                        x_2 * np.kron(np.kron(j_m, z_m), j_m) + \
                        x_3 * np.kron(np.kron(j_m, j_m), z_m) + \
                        (np.pi - x_1) * (np.pi - x_2) * np.kron(np.kron(z_m, z_m), j_m) + \
                        (np.pi - x_2) * (np.pi - x_3) * np.kron(np.kron(j_m, z_m), z_m) + \
                        (np.pi - x_1) * (np.pi - x_3) * np.kron(np.kron(z_m, j_m), z_m)
                    u_u = scipy.linalg.expm(1j * phi)  # pylint: disable=no-member
                    psi = np.asmatrix(u_u) * h_3 * np.asmatrix(u_u) * np.transpose(psi_0)
                    temp = np.real(psi.getH() * m_m * psi).item()
                    if temp > gap:
                        sample_total[n_1][n_2][n_3] = +1
                        sample_total_a.append([n_1, n_2, n_3])
                    elif temp < -gap:
                        sample_total[n_1][n_2][n_3] = -1
                        sample_total_b.append([n_1, n_2, n_3])
                    else:
                        sample_total[n_1][n_2][n_3] = 0
                        sample_total_void.append([n_1, n_2, n_3])

        # Now sample randomly from sample_Total a number of times training_size+testing_size
        t_r = 0
        while t_r < (training_size + test_size):
            draw1 = aqua_globals.random.choice(count)
            draw2 = aqua_globals.random.choice(count)
            draw3 = aqua_globals.random.choice(count)
            if sample_total[draw1][draw2][draw3] == +1:
                sample_a[t_r] = [2 * np.pi * draw1 / count,
                                 2 * np.pi * draw2 / count, 2 * np.pi * draw3 / count]
                t_r += 1

        t_r = 0
        while t_r < (training_size + test_size):
            draw1 = aqua_globals.random.choice(count)
            draw2 = aqua_globals.random.choice(count)
            draw3 = aqua_globals.random.choice(count)
            if sample_total[draw1][draw2][draw3] == -1:
                sample_b[t_r] = [2 * np.pi * draw1 / count,
                                 2 * np.pi * draw2 / count, 2 * np.pi * draw3 / count]
                t_r += 1

        sample_train = [sample_a, sample_b]

        for lindex in range(training_size + test_size):
            label_train[lindex] = 0
        for lindex in range(training_size + test_size):
            label_train[training_size + test_size + lindex] = 1
        label_train = label_train.astype(int)
        sample_train = np.reshape(sample_train, (2 * (training_size + test_size), n))
        training_input = {key: (sample_train[label_train == k, :])[:training_size]
                          for k, key in enumerate(class_labels)}
        test_input = {key: (sample_train[label_train == k, :])[training_size:(
            training_size + test_size)] for k, key in enumerate(class_labels)}

        if plot_data:
            try:
                import matplotlib.pyplot as plt
            except ImportError as ex:
                raise MissingOptionalLibraryError(
                    libname='Matplotlib',
                    name='ad_hoc_data',
                    pip_install='pip install matplotlib') from ex
            sample_total_a = np.asarray(sample_total_a)
            sample_total_b = np.asarray(sample_total_b)
            x_1 = sample_total_a[:, 0]
            y_1 = sample_total_a[:, 1]
            z_1 = sample_total_a[:, 2]

            x_2 = sample_total_b[:, 0]
            y_2 = sample_total_b[:, 1]
            z_2 = sample_total_b[:, 2]

            fig1 = plt.figure()
            ax_1 = fig1.add_subplot(1, 1, 1, projection='3d')
            ax_1.scatter(x_1, y_1, z_1, c='#8A360F')
            plt.show()

            fig2 = plt.figure()
            ax_2 = fig2.add_subplot(1, 1, 1, projection='3d')
            ax_2.scatter(x_2, y_2, z_2, c='#683FC8')
            plt.show()

            sample_training_a = training_input['A']
            sample_training_b = training_input['B']

            x_1 = sample_training_a[:, 0]
            y_1 = sample_training_a[:, 1]
            z_1 = sample_training_a[:, 2]

            x_2 = sample_training_b[:, 0]
            y_2 = sample_training_b[:, 1]
            z_2 = sample_training_b[:, 2]

            fig1 = plt.figure()
            ax_1 = fig1.add_subplot(1, 1, 1, projection='3d')
            ax_1.scatter(x_1, y_1, z_1, c='#8A360F')
            ax_1.scatter(x_2, y_2, z_2, c='#683FC8')
            plt.show()

    return sample_total, training_input, test_input, class_labels
예제 #18
0
def optimize_svm(
        kernel_matrix: np.ndarray,
        y: np.ndarray,
        scaling: Optional[float] = None,
        maxiter: int = 500,
        show_progress: bool = False,
        max_iters: Optional[int] = None,
        lambda2: float = 0.001) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
    """
    Solving quadratic programming problem for SVM; thus, some constraints are fixed.

    Args:
        kernel_matrix: NxN array
        y: Nx1 array
        scaling: the scaling factor to renormalize the `y`, if it is None,
                 use L2-norm of `y` for normalization
        maxiter: number of iterations for QP solver
        show_progress: showing the progress of QP solver
        max_iters: Deprecated, use maxiter.
        lambda2: L2 Norm regularization factor

    Returns:
        np.ndarray: Sx1 array, where S is the number of supports
        np.ndarray: Sx1 array, where S is the number of supports
        np.ndarray: Sx1 array, where S is the number of supports

    Raises:
        MissingOptionalLibraryError: If cvxpy is not installed
    """
    # pylint: disable=invalid-name, unused-argument
    try:
        import cvxpy
    except ImportError as ex:
        raise MissingOptionalLibraryError(
            libname='CVXPY',
            name='optimize_svm',
            pip_install="pip install 'qiskit-aqua[cvx]'",
            msg=str(ex)) from ex

    if max_iters is not None:
        warnings.warn(
            'The max_iters parameter is deprecated as of '
            '0.8.0 and will be removed no sooner than 3 months after the release. '
            'You should use maxiter instead.', DeprecationWarning)
        maxiter = max_iters
    if y.ndim == 1:
        y = y[:, np.newaxis]
    H = np.outer(y, y) * kernel_matrix
    f = -np.ones(y.shape)
    if scaling is None:
        scaling = np.sum(np.sqrt(f * f))
    f /= scaling

    tolerance = 1e-2
    n = kernel_matrix.shape[1]

    P = np.array(H)
    q = np.array(f)
    G = -np.eye(n)
    I = np.eye(n)
    h = np.zeros(n)
    A = y.reshape(y.T.shape)
    b = np.zeros((1, 1))
    x = cvxpy.Variable(n)
    prob = cvxpy.Problem(
        cvxpy.Minimize((1 / 2) * cvxpy.quad_form(x, P) + q.T @ x +
                       lambda2 * cvxpy.quad_form(x, I)),
        [G @ x <= h, A @ x == b])
    prob.solve(verbose=show_progress, qcp=True)
    result = np.asarray(x.value).reshape((n, 1))
    alpha = result * scaling
    avg_y = np.sum(y)
    avg_mat = (alpha * y).T.dot(kernel_matrix.dot(np.ones(y.shape)))
    b = (avg_y - avg_mat) / n

    support = alpha > tolerance
    logger.debug('Solving QP problem is completed.')
    return alpha.flatten(), b.flatten(), support.flatten()
예제 #19
0
def gaussian(training_size, test_size, n, plot_data=False):
    """ returns gaussian dataset """
    sigma = 1
    if n == 2:
        class_labels = [r'A', r'B']
        label_train = np.zeros(2 * (training_size + test_size))
        sample_train = []
        sample_a = [[0 for x in range(n)]
                    for y in range(training_size + test_size)]
        sample_b = [[0 for x in range(n)]
                    for y in range(training_size + test_size)]
        randomized_vector1 = aqua_globals.random.integers(2, size=n)
        randomized_vector2 = (randomized_vector1 + 1) % 2
        for t_r in range(training_size + test_size):
            for feat in range(n):
                if randomized_vector1[feat] == 0:
                    sample_a[t_r][feat] = aqua_globals.random.normal(
                        -1 / 2, sigma, None)
                elif randomized_vector1[feat] == 1:
                    sample_a[t_r][feat] = aqua_globals.random.normal(
                        1 / 2, sigma, None)

                if randomized_vector2[feat] == 0:
                    sample_b[t_r][feat] = aqua_globals.random.normal(
                        -1 / 2, sigma, None)
                elif randomized_vector2[feat] == 1:
                    sample_b[t_r][feat] = aqua_globals.random.normal(
                        1 / 2, sigma, None)

        sample_train = [sample_a, sample_b]
        for lindex in range(training_size + test_size):
            label_train[lindex] = 0
        for lindex in range(training_size + test_size):
            label_train[training_size + test_size + lindex] = 1
        label_train = label_train.astype(int)
        sample_train = np.reshape(sample_train,
                                  (2 * (training_size + test_size), n))
        training_input = {
            key: (sample_train[label_train == k, :])[:training_size]
            for k, key in enumerate(class_labels)
        }
        test_input = {
            key:
            (sample_train[label_train == k, :])[training_size:(training_size +
                                                               test_size)]
            for k, key in enumerate(class_labels)
        }

        if plot_data:
            try:
                import matplotlib.pyplot as plt
            except ImportError as ex:
                raise MissingOptionalLibraryError(
                    libname='Matplotlib',
                    name='gaussian',
                    pip_install='pip install matplotlib') from ex

            for k in range(0, 2):
                plt.scatter(sample_train[label_train == k, 0][:training_size],
                            sample_train[label_train == k, 1][:training_size])

            plt.title("Gaussians")
            plt.show()

        return sample_train, training_input, test_input, class_labels
    elif n == 3:
        class_labels = [r'A', r'B', r'C']
        label_train = np.zeros(3 * (training_size + test_size))
        sample_train = []
        sample_a = [[0 for x in range(n)]
                    for y in range(training_size + test_size)]
        sample_b = [[0 for x in range(n)]
                    for y in range(training_size + test_size)]
        sample_c = [[0 for x in range(n)]
                    for y in range(training_size + test_size)]
        randomized_vector1 = aqua_globals.random.integers(3, size=n)
        randomized_vector2 = (randomized_vector1 + 1) % 3
        randomized_vector3 = (randomized_vector2 + 1) % 3
        for t_r in range(training_size + test_size):
            for feat in range(n):
                if randomized_vector1[feat] == 0:
                    sample_a[t_r][feat] = aqua_globals.random.normal(
                        2 * 1 * np.pi / 6, sigma, None)
                elif randomized_vector1[feat] == 1:
                    sample_a[t_r][feat] = aqua_globals.random.normal(
                        2 * 3 * np.pi / 6, sigma, None)
                elif randomized_vector1[feat] == 2:
                    sample_a[t_r][feat] = aqua_globals.random.normal(
                        2 * 5 * np.pi / 6, sigma, None)

                if randomized_vector2[feat] == 0:
                    sample_b[t_r][feat] = aqua_globals.random.normal(
                        2 * 1 * np.pi / 6, sigma, None)
                elif randomized_vector2[feat] == 1:
                    sample_b[t_r][feat] = aqua_globals.random.normal(
                        2 * 3 * np.pi / 6, sigma, None)
                elif randomized_vector2[feat] == 2:
                    sample_b[t_r][feat] = aqua_globals.random.normal(
                        2 * 5 * np.pi / 6, sigma, None)

                if randomized_vector3[feat] == 0:
                    sample_c[t_r][feat] = aqua_globals.random.normal(
                        2 * 1 * np.pi / 6, sigma, None)
                elif randomized_vector3[feat] == 1:
                    sample_c[t_r][feat] = aqua_globals.random.normal(
                        2 * 3 * np.pi / 6, sigma, None)
                elif randomized_vector3[feat] == 2:
                    sample_c[t_r][feat] = aqua_globals.random.normal(
                        2 * 5 * np.pi / 6, sigma, None)

        sample_train = [sample_a, sample_b, sample_c]
        for lindex in range(training_size + test_size):
            label_train[lindex] = 0
        for lindex in range(training_size + test_size):
            label_train[training_size + test_size + lindex] = 1
        for lindex in range(training_size + test_size):
            label_train[training_size + test_size + training_size + test_size +
                        lindex] = 2
        label_train = label_train.astype(int)
        sample_train = np.reshape(sample_train,
                                  (3 * (training_size + test_size), n))
        training_input = {
            key: (sample_train[label_train == k, :])[:training_size]
            for k, key in enumerate(class_labels)
        }
        test_input = {
            key:
            (sample_train[label_train == k, :])[training_size:(training_size +
                                                               test_size)]
            for k, key in enumerate(class_labels)
        }

        if plot_data:
            try:
                import matplotlib.pyplot as plt
            except ImportError as ex:
                raise MissingOptionalLibraryError(
                    libname='Matplotlib',
                    name='gaussian',
                    pip_install='pip install matplotlib') from ex

            for k in range(0, 3):
                plt.scatter(sample_train[label_train == k, 0][:training_size],
                            sample_train[label_train == k, 1][:training_size])

            plt.title("Gaussians")
            plt.show()

        return sample_train, training_input, test_input, class_labels
    else:
        raise ValueError("Gaussian presently only supports 2 or 3 qubits")