def test_logic(types):
    for option_set in [
            types["scalar"],
            types["vector"],
            types["matrix"],
    ]:
        for x in option_set:
            for y in option_set:
                ### Comparisons
                """
                Note: if warnings appear here, they're from `np.array(1) == cas.MX(1)` - 
                sensitive to order, as `cas.MX(1) == np.array(1)` is fine.
                
                However, checking the outputs, these seem to be yielding correct results despite
                the warning sooo...
                """
                x == y  # Warnings coming from here
                x != y  # Warnings coming from here
                x > y
                x >= y
                x < y
                x <= y

                ### Conditionals
                np.where(x > 1, x**2, 0)

                ### Elementwise min/max
                np.fmax(x, y)
                np.fmin(x, y)

    for x in types["all"]:
        np.fabs(x)
        np.floor(x)
        np.ceil(x)
        np.clip(x, 0, 1)
def airfoil_CL(alpha, Re, Ma):
    alpha_rad = alpha * pi / 180
    beta = (1 - Ma ** 2) ** 0.5
    cl_0 = 0.5
    cl_alpha = 5.8
    cl_min = -0.3
    cl_max = 1.2
    cl = (alpha_rad * cl_alpha + cl_0) / beta
    Cl = np.fmin(np.fmax(cl, cl_min), cl_max)
    return Cl
示例#3
0
def smoothmax(value1, value2, hardness):
    """
    A smooth maximum between two functions. Also referred to as the logsumexp() function.
    Useful because it's differentiable and preserves convexity!
    Great writeup by John D Cook here:
        https://www.johndcook.com/soft_maximum.pdf
    :param value1: Value of function 1.
    :param value2: Value of function 2.
    :param hardness: Hardness parameter. Higher values make this closer to max(x1, x2).
    :return: Soft maximum of the two supplied values.
    """
    value1 = value1 * hardness
    value2 = value2 * hardness
    max = np.fmax(value1, value2)
    min = np.fmin(value1, value2)
    out = max + np.log(1 + np.exp(min - max))
    out /= hardness
    return out
示例#4
0
def softmax(*args, hardness=1):
    """
    An element-wise softmax between two or more arrays. Also referred to as the logsumexp() function.

    Useful for optimization because it's differentiable and preserves convexity!

    Great writeup by John D Cook here:
        https://www.johndcook.com/soft_maximum.pdf

    Args:
        Provide any number of arguments as values to take the softmax of.

        hardness: Hardness parameter. Higher values make this closer to max(x1, x2).

    Returns:
        Soft maximum of the supplied values.
    """
    if hardness <= 0:
        raise ValueError("The value of `hardness` must be positive.")

    if len(args) <= 1:
        raise ValueError("You must call softmax with the value of two or more arrays that you'd like to take the "
                         "element-wise softmax of.")

    ### Scale the args by hardness
    args = [arg * hardness for arg in args]

    ### Find the element-wise max and min of the arrays:
    min = args[0]
    max = args[0]
    for arg in args[1:]:
        min = _np.fmin(min, arg)
        max = _np.fmax(max, arg)

    out = max + _np.log(sum(
            [_np.exp(array - max) for array in args]
        )
    )
    out = out / hardness
    return out