コード例 #1
0
ファイル: aqgd.py プロジェクト: tamiya-onodera/qiskit-terra
    def __init__(self,
                 maxiter: Union[int, List[int]] = 1000,
                 eta: Union[float, List[float]] = 1.0,
                 tol: float = 1e-6,  # this is tol
                 disp: bool = False,
                 momentum: Union[float, List[float]] = 0.25,
                 param_tol: float = 1e-6,
                 averaging: int = 10) -> None:
        """
        Performs Analytical Quantum Gradient Descent (AQGD) with Epochs.

        Args:
            maxiter: Maximum number of iterations (full gradient steps)
            eta: The coefficient of the gradient update. Increasing this value
                results in larger step sizes: param = previous_param - eta * deriv
            tol: Tolerance for change in windowed average of objective values.
                Convergence occurs when either objective tolerance is met OR parameter
                tolerance is met.
            disp: Set to True to display convergence messages.
            momentum: Bias towards the previous gradient momentum in current
                update. Must be within the bounds: [0,1)
            param_tol: Tolerance for change in norm of parameters.
            averaging: Length of window over which to average objective values for objective
                convergence criterion

        Raises:
            AlgorithmError: If the length of ``maxiter``, `momentum``, and ``eta`` is not the same.
        """
        super().__init__()
        if isinstance(maxiter, int):
            maxiter = [maxiter]
        if isinstance(eta, (int, float)):
            eta = [eta]
        if isinstance(momentum, (int, float)):
            momentum = [momentum]
        if len(maxiter) != len(eta) or len(maxiter) != len(momentum):
            raise AlgorithmError("AQGD input parameter length mismatch. Parameters `maxiter`, "
                                 "`eta`, and `momentum` must have the same length.")
        for m in momentum:
            validate_range_exclusive_max('momentum', m, 0, 1)

        self._eta = eta
        self._maxiter = maxiter
        self._momenta_coeff = momentum
        self._param_tol = param_tol
        self._tol = tol
        self._averaging = averaging
        if disp:
            warnings.warn('The disp parameter is deprecated as of '
                          '0.8.0 and will be removed no sooner than 3 months after the release. '
                          'The information is now available if you enable INFO level logging.',
                          DeprecationWarning, stacklevel=2)
        self._disp = disp

        # state
        self._avg_objval = None
        self._prev_param = None
        self._eval_count = 0    # function evaluations
        self._prev_loss = []    # type: List[float]
        self._prev_grad = []    # type: List[List[float]]
コード例 #2
0
    def __init__(
        self,
        maxiter: Union[int, List[int]] = 1000,
        eta: Union[float, List[float]] = 1.0,
        tol: float = 1e-6,  # this is tol
        momentum: Union[float, List[float]] = 0.25,
        param_tol: float = 1e-6,
        averaging: int = 10,
    ) -> None:
        """
        Performs Analytical Quantum Gradient Descent (AQGD) with Epochs.

        Args:
            maxiter: Maximum number of iterations (full gradient steps)
            eta: The coefficient of the gradient update. Increasing this value
                results in larger step sizes: param = previous_param - eta * deriv
            tol: Tolerance for change in windowed average of objective values.
                Convergence occurs when either objective tolerance is met OR parameter
                tolerance is met.
            momentum: Bias towards the previous gradient momentum in current
                update. Must be within the bounds: [0,1)
            param_tol: Tolerance for change in norm of parameters.
            averaging: Length of window over which to average objective values for objective
                convergence criterion

        Raises:
            AlgorithmError: If the length of ``maxiter``, `momentum``, and ``eta`` is not the same.
        """
        super().__init__()
        if isinstance(maxiter, int):
            maxiter = [maxiter]
        if isinstance(eta, (int, float)):
            eta = [eta]
        if isinstance(momentum, (int, float)):
            momentum = [momentum]
        if len(maxiter) != len(eta) or len(maxiter) != len(momentum):
            raise AlgorithmError(
                "AQGD input parameter length mismatch. Parameters `maxiter`, "
                "`eta`, and `momentum` must have the same length.")
        for m in momentum:
            validate_range_exclusive_max("momentum", m, 0, 1)

        self._eta = eta
        self._maxiter = maxiter
        self._momenta_coeff = momentum
        self._param_tol = param_tol
        self._tol = tol
        self._averaging = averaging

        # state
        self._avg_objval = None
        self._prev_param = None
        self._eval_count = 0  # function evaluations
        self._prev_loss = []  # type: List[float]
        self._prev_grad = []  # type: List[List[float]]
コード例 #3
0
 def test_validate_range(self):
     """validate range test"""
     test_value = 2.5
     with self.assertRaises(ValueError):
         validate_range("test_value", test_value, 0, 2)
     with self.assertRaises(ValueError):
         validate_range("test_value", test_value, 3, 4)
     validate_range("test_value", test_value, 2.5, 3)
     validate_range_exclusive("test_value", test_value, 0, 3)
     with self.assertRaises(ValueError):
         validate_range_exclusive("test_value", test_value, 0, 2.5)
         validate_range_exclusive("test_value", test_value, 2.5, 3)
     validate_range_exclusive_min("test_value", test_value, 0, 3)
     with self.assertRaises(ValueError):
         validate_range_exclusive_min("test_value", test_value, 2.5, 3)
     validate_range_exclusive_min("test_value", test_value, 0, 2.5)
     validate_range_exclusive_max("test_value", test_value, 2.5, 3)
     with self.assertRaises(ValueError):
         validate_range_exclusive_max("test_value", test_value, 0, 2.5)
     validate_range_exclusive_max("test_value", test_value, 2.5, 3)
コード例 #4
0
    def __init__(
        self,
        data: Union[str, Tuple[str, complex], List[Tuple[str, complex]]],
        register_length: Optional[int] = None,
    ):
        """
        Args:
            data: Input data for FermionicOp. The allowed data is label str,
                  tuple (label, coeff), or list [(label, coeff)].
            register_length: positive integer that represents the length of registers.

        Raises:
            ValueError: given data is invalid value.
            TypeError: given data has invalid type.
        """
        self._register_length: int
        self._coeffs: np.ndarray
        self._labels: List[str]

        if not isinstance(data, (tuple, list, str)):
            raise TypeError(f"Type of data must be str, tuple, or list, not {type(data)}.")

        if isinstance(data, tuple):
            if not isinstance(data[0], str) or not isinstance(data[1], (int, float, complex)):
                raise TypeError(
                    f"Data tuple must be (str, number), not ({type(data[0])}, {type(data[1])})."
                )
            data = [data]

        if isinstance(data, str):
            data = [(data, 1)]

        if not all(
            isinstance(label, str) and isinstance(coeff, (int, float, complex))
            for label, coeff in data
        ):
            raise TypeError("Data list must be [(str, number)].")

        labels, coeffs = zip(*data)
        self._coeffs = np.array(coeffs, np.complex128)

        if register_length is None:  # Dense label
            self._register_length = len(labels[0])
            if not all(len(label) == self._register_length for label in labels):
                raise ValueError("Lengths of strings of label are different.")
            label_pattern = re.compile(r"^[I\+\-NE]+$")
            invalid_labels = [label for label in labels if not label_pattern.match(label)]
            if invalid_labels:
                raise ValueError(f"Invalid labels for dense labels are given: {invalid_labels}")
            self._labels = list(labels)
        else:  # Sparse label
            validate_min("register_length", register_length, 1)
            self._register_length = register_length
            label_pattern = re.compile(r"^[I\+\-NE]_\d+$")
            invalid_labels = [
                label for label in labels if not all(label_pattern.match(l) for l in label.split())
            ]
            if invalid_labels:
                raise ValueError(f"Invalid labels for sparse labels are given: {invalid_labels}")
            list_label = [["I"] * self._register_length for _ in labels]
            prev_index: Optional[int] = None
            for term, label in enumerate(labels):
                for split_label in label.split():
                    op_label, index_str = split_label.split("_", 1)
                    index = int(index_str)
                    validate_range_exclusive_max("index", index, 0, self._register_length)
                    if prev_index is not None and prev_index > index:
                        raise ValueError("Indices of labels must be in ascending order.")
                    if list_label[term][index] != "I":
                        raise ValueError(f"Duplicate index {index} is given.")
                    list_label[term][index] = op_label
                    prev_index = index

            self._labels = ["".join(l) for l in list_label]