Ejemplo n.º 1
0
    def __init__(self,
                 maxiter: int = 1000,
                 offset: float = 10,
                 initialEta: float = 1,
                 tol: float = 1e-6,
                 disp: bool = False,
                 momentum: float = 0.25) -> None:
        """
        Args:
            maxiter: Maximum number of iterations, each iteration evaluation gradient.
            offset: Number of iterations before eta starts decreasing
            tol: The convergence criteria that must be reached before stopping.
                 Optimization stops when: absolute(loss - previous_loss) < tol
            disp: Set to True to display convergence messages.
            momentum: Bias towards the previous gradient momentum in current update.
                      Must be within the bounds: [0,1)

        """
        validate_range_exclusive_max('momentum', momentum, 0, 1)
        super().__init__()

        self._initialEta = initialEta
        self._offset = offset
        self._maxiter = maxiter
        self._tol = tol if tol is not None else 1e-6
        self._disp = disp
        self._momentum_coeff = momentum
        self._previous_loss = None
Ejemplo n.º 2
0
    def __init__(self,
                 maxiter: int = 1000,
                 eta: float = 3.0,
                 tol: float = 1e-6,
                 disp: bool = False,
                 momentum: float = 0.25) -> None:
        """
        Args:
            maxiter: Maximum number of iterations, each iteration evaluation gradient.
            eta: The coefficient of the gradient update. Increasing this value
                 results in larger step sizes: param = previous_param - eta * deriv
            tol: The convergence criteria that must be reached before stopping.
                 Optimization stops when: absolute(loss - previous_loss) < tol
            disp: Set to True to display convergence messages.
            momentum: Bias towards the previous gradient momentum in current update.
                      Must be within the bounds: [0,1)

        """
        validate_range_exclusive_max('momentum', momentum, 0, 1)
        super().__init__()

        self._eta = eta
        self._maxiter = maxiter
        self._tol = tol if tol is not None else 1e-6
        self._disp = disp
        self._momentum_coeff = momentum
        self._previous_loss = None
Ejemplo n.º 3
0
    def __init__(self,
                 maxiter: Union[int, List[int]] = 1000,
                 eta: Union[float, List[float]] = 1.0,
                 tol: float = 1e-6,  # this is tol
                 disp: bool = False,
                 momentum: Union[float, List[float]] = 0.25,
                 param_tol: float = 1e-6,
                 averaging: int = 10) -> None:
        """
        Performs Analytical Quantum Gradient Descent (AQGD) with Epochs.

        Args:
            maxiter: Maximum number of iterations (full gradient steps)
            eta: The coefficient of the gradient update. Increasing this value
                results in larger step sizes: param = previous_param - eta * deriv
            tol: Tolerance for change in windowed average of objective values.
                Convergence occurs when either objective tolerance is met OR parameter
                tolerance is met.
            disp: Set to True to display convergence messages.
            momentum: Bias towards the previous gradient momentum in current
                update. Must be within the bounds: [0,1)
            param_tol: Tolerance for change in norm of parameters.
            averaging: Length of window over which to average objective values for objective
                convergence criterion

        Raises:
            AquaError: If the length of ``maxiter``, `momentum``, and ``eta`` is not the same.
        """
        super().__init__()
        if isinstance(maxiter, int):
            maxiter = [maxiter]
        if isinstance(eta, float):
            eta = [eta]
        if isinstance(momentum, float):
            momentum = [momentum]
        if len(maxiter) != len(eta) or len(maxiter) != len(momentum):
            raise AquaError("AQGD input parameter length mismatch. Parameters `maxiter`, `eta`, "
                            "and `momentum` must have the same length.")
        for m in momentum:
            validate_range_exclusive_max('momentum', m, 0, 1)

        self._eta = eta
        self._maxiter = maxiter
        self._momenta_coeff = momentum
        self._param_tol = param_tol
        self._tol = tol
        self._averaging = averaging
        if disp:
            warnings.warn('The disp parameter is deprecated as of '
                          '0.8.0 and will be removed no sooner than 3 months after the release. '
                          'The information is now available if you enable INFO level logging.',
                          DeprecationWarning, stacklevel=2)
        self._disp = disp

        # state
        self._avg_objval = None
        self._prev_param = None
        self._eval_count = 0    # function evaluations
        self._prev_loss = []    # type: List[float]
        self._prev_grad = []    # type: List[List[float]]
 def test_validate_range(self):
     """ validate range test """
     test_value = 2.5
     with self.assertRaises(ValueError):
         validate_range('test_value', test_value, 0, 2)
     with self.assertRaises(ValueError):
         validate_range('test_value', test_value, 3, 4)
     validate_range('test_value', test_value, 2.5, 3)
     validate_range_exclusive('test_value', test_value, 0, 3)
     with self.assertRaises(ValueError):
         validate_range_exclusive('test_value', test_value, 0, 2.5)
         validate_range_exclusive('test_value', test_value, 2.5, 3)
     validate_range_exclusive_min('test_value', test_value, 0, 3)
     with self.assertRaises(ValueError):
         validate_range_exclusive_min('test_value', test_value, 2.5, 3)
     validate_range_exclusive_min('test_value', test_value, 0, 2.5)
     validate_range_exclusive_max('test_value', test_value, 2.5, 3)
     with self.assertRaises(ValueError):
         validate_range_exclusive_max('test_value', test_value, 0, 2.5)
     validate_range_exclusive_max('test_value', test_value, 2.5, 3)