Пример #1
0
def test_owa():

    def almost(a, b, prec=0.00001):
        return np.abs(a - b) < prec

    X = np.array([1.0, 1.0, 1.0])
    w = [0.5, 0.3, 0.2]

    owa = fl.owa(w)

    r = owa(X)

    assert 1.0 == r

    owa = fl.owa(0.5, 0.3, 0.2)
    r2 = owa(X)

    assert r == r2

    with pytest.raises(ValueError) as v:
        owa = fl.owa(0.5, 0.3, 0.2)
        owa(np.array([0, 1, 0.5, 0.2, 0.3]))

    assert "len(X) != len(v)" in str(v.value)

    with pytest.raises(ValueError) as v:
        owa = fl.owa(0.5, 0.3, 0.2, 0.4)
        owa(np.array([0, 1, 0.5]))

    assert "len(X) != len(v)" in str(v.value)
Пример #2
0
def test_owa():

    def almost(a, b, prec=0.00001):
        return np.abs(a - b) < prec

    X = np.array([1.0, 1.0, 1.0])
    w = [0.5, 0.3, 0.2]

    owa = fl.owa(w)

    r = owa(X)

    assert 1.0 == r

    owa = fl.owa(0.5, 0.3, 0.2)
    r2 = owa(X)

    assert r == r2

    with pytest.raises(ValueError) as v:
        owa = fl.owa(0.5, 0.3, 0.2)
        owa(np.array([0, 1, 0.5, 0.2, 0.3]))

    assert "len(X) != len(v)" in str(v.value)

    with pytest.raises(ValueError) as v:
        owa = fl.owa(0.5, 0.3, 0.2, 0.4)
        owa(np.array([0, 1, 0.5]))

    assert "len(X) != len(v)" in str(v.value)
Пример #3
0
def test_owa_matrix():

    X = np.array([[1.0, 1.0, 1.0], [0.5, 0.5, 0.5]])
    w = [0.5, 0.3, 0.2]

    owa = fl.owa(w)

    r = owa(X)

    assert 1.0 == r[0]
    assert 0.5 == r[1]
Пример #4
0
def test_owa_matrix():

    X = np.array([[1.0, 1.0, 1.0], [0.5, 0.5, 0.5]])
    w = [0.5, 0.3, 0.2]

    owa = fl.owa(w)

    r = owa(X)

    assert 1.0 == r[0]
    assert 0.5 == r[1]
Пример #5
0
def build_owa_operator(andness, m):
    beta = andness / (1.0 - andness)
    v = np.array(range(m)) + 1.0
    w = ((v / m)**beta) - (((v - 1.0) / m)**beta)
    return fl.owa(w)
Пример #6
0
    Trans. Fuzzy Syst., vol. 19, no. 2, pp. 241-252, Apr. 2011.

[3] R. Senge, and E. Hüllermeier, "Pattern trees for regression and fuzzy systems
    modeling," in Proc. IEEE Int. Conf. on Fuzzy Syst., 2010.
"""

import numpy as np
import heapq
from sklearn.metrics import mean_squared_error
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.utils.validation import check_array
import fylearn.fuzzylogic as fl

# aggregation operators to use
OPERATORS = (fl.min, fl.einstein_i, fl.lukasiewicz_i, fl.prod,
             fl.owa([0.2, 0.8]), fl.owa([0.4,
                                         0.6]), fl.mean, fl.owa([0.6, 0.4]),
             fl.owa([0.8, 0.2]), fl.algebraic_sum, fl.lukasiewicz_u,
             fl.einstein_u, fl.max)


def _tree_iterator(root):
    Q = [root]
    while Q:
        tree = Q.pop(0)
        if isinstance(tree, Inner):
            Q.extend(tree.branches_)
        yield tree


def _tree_leaves(root):
Пример #7
0
def build_owa_operator(andness, m):
    beta = andness / (1.0 - andness)
    v = np.array(range(m)) + 1.0
    w = ((v / m) ** beta) - (((v - 1.0) / m) ** beta)
    return fl.owa(w)
"""

import numpy as np
import heapq
from sklearn.metrics import mean_squared_error
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.utils.validation import check_array
import fylearn.fuzzylogic as fl

# aggregation operators to use
OPERATORS = (
    fl.min,
    fl.einstein_i,
    fl.lukasiewicz_i,
    fl.prod,
    fl.owa([0.2, 0.8]),
    fl.owa([0.4, 0.6]),
    fl.mean,
    fl.owa([0.6, 0.4]),
    fl.owa([0.8, 0.2]),
    fl.algebraic_sum,
    fl.lukasiewicz_u,
    fl.einstein_u,
    fl.max
)

def _tree_iterator(root):
    Q = [ root ]
    while Q:
        tree = Q.pop(0)
        if isinstance(tree, Inner):