예제 #1
0
    def stand(self):
        global X_standardized

        try:
            X = np.array(file_array_bis)
            standardscaler = StandardScaler(epsilon=1e-2)
            X_standardized = standardscaler.transform(X)
            QMessageBox.information(None, 'Information',
                                    'Data_set Standardized With Success',
                                    QMessageBox.Ok)
        except ValueError:
            QMessageBox.warning(None, 'ERROR', 'Please Load Your File',
                                QMessageBox.Ok)
예제 #2
0
 def __init__(self,
              nu=0.5,
              gamma=0.1,
              tol=1e-3,
              degree=3,
              kernel='lcs',
              sax_size=4,
              quantiles='gaussian',
              paa_size=8):
     """
         Constructor accepts some args for sklearn.svm.OneClassSVM and SAX inside.
         Default params are choosen as the most appropriate for flight-anomaly-detection problem
         according the original article.
     """
     self.nu = nu
     self.gamma = gamma
     self.tol = tol
     self.degree = degree
     self.kernel = kernel
     self.stand_scaler = StandardScaler(epsilon=1e-2)
     self.paa = PAA(window_size=None,
                    output_size=paa_size,
                    overlapping=True)
     self.sax = SAX(n_bins=sax_size, quantiles=quantiles)
예제 #3
0
def test_StandardScaler():
    """Testing 'StandardScaler'."""
    # Parameter
    X = np.arange(0, 7)

    # Test 1
    standardscaler = StandardScaler(epsilon=0.)
    arr_actual = standardscaler.transform(X[np.newaxis, :])[0]
    arr_desired = np.arange(-1.5, 2., 0.5)
    np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0.)

    # Test 2
    standardscaler = StandardScaler(epsilon=2.)
    arr_actual = standardscaler.transform(X[np.newaxis, :])[0]
    arr_desired = np.arange(-.75, 1., 0.25)
    np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0.)

    # Test 3
    standardscaler = StandardScaler(epsilon=2.)
    arr_actual = standardscaler.fit_transform(X[np.newaxis, :])[0]
    arr_desired = np.arange(-.75, 1., 0.25)
    np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0.)
예제 #4
0
class MultipleKernelAnomalyDetector:
    """
        Multiple Kernel anomaly-detection method implementation
    """
    def __init__(self,
                 nu=0.5,
                 gamma=0.1,
                 tol=1e-3,
                 degree=3,
                 kernel='lcs',
                 sax_size=4,
                 quantiles='gaussian',
                 paa_size=8):
        """
            Constructor accepts some args for sklearn.svm.OneClassSVM and SAX inside.
            Default params are choosen as the most appropriate for flight-anomaly-detection problem
            according the original article.
        """
        self.nu = nu
        self.gamma = gamma
        self.tol = tol
        self.degree = degree
        self.kernel = kernel
        self.stand_scaler = StandardScaler(epsilon=1e-2)
        self.paa = PAA(window_size=None,
                       output_size=paa_size,
                       overlapping=True)
        self.sax = SAX(n_bins=sax_size, quantiles=quantiles)

    def compute_matrix_of_equals(self, sequence1, sequence2):
        """
            Computes matrix, where at (i, j) coordinate is the lcs for sequence1[:i+1] and sequence2[:j+1]
        """
        lengths = np.zeros((len(sequence1) + 1, len(sequence2) + 1))
        for i, element1 in enumerate(sequence1):
            for j, element2 in enumerate(sequence2):
                if element1 == element2:
                    lengths[i + 1][j + 1] = lengths[i][j] + 1
                else:
                    lengths[i + 1][j + 1] = max(lengths[i + 1][j],
                                                lengths[i][j + 1])
        return lengths

    def lcs(self, sequence1, sequence2):
        """
            Computes largest common subsequence of sequence1 and sequence2
        """
        lengths = self.compute_matrix_of_equals(sequence1, sequence2)
        result = ""
        i, j = len(sequence1), len(sequence2)
        while i != 0 and j != 0:
            if lengths[i][j] == lengths[i - 1][j]:
                i -= 1
            elif lengths[i][j] == lengths[i][j - 1]:
                j -= 1
            else:
                assert sequence1[i - 1] == sequence2[j - 1]
                result = sequence1[i - 1] + result
                i -= 1
                j -= 1
        return result

    def nlcs(self, sequence1, sequence2):
        """
            Computes normalized common subsequence of sequence1 and sequence2
        """
        return len(self.lcs(
            sequence1, sequence2)) / (len(sequence1) * len(sequence2))**0.5

    def get_sax(self, sequence):
        sequence = np.reshape(sequence, (1, len(sequence)))
        return self.sax.transform(
            self.paa.transform(self.stand_scaler.transform(sequence)))[0]

    def lcs_kernel_function(self, x1, x2):
        """
            LCS - kernel for Multiple Kernel Anomaly Detector
        """
        res = np.zeros((x1.shape[0], x2.shape[0]))
        for ind1 in tqdm(range(x1.shape[0])):
            for ind2 in range(ind1, x2.shape[0]):
                if len(Counter(x1[ind1])) > 0.3 and len(Counter(x2[ind2])):
                    for i in range(0, len(x1[ind1]), self.x_shape[-1]):
                        res[ind1][ind2] += self.nlcs(
                            self.get_sax(x1[ind1][i:i + self.x_shape[-1]]),
                            self.get_sax(x2[ind2][i:i + self.x_shape[-1]]))
                        res[ind2][ind1] = res[ind1][ind2]
                else:
                    for i in range(0, len(x1[ind1]), self.x_shape[-1]):
                        res[ind1][ind2] += self.nlcs(
                            x1[ind1][i:i + self.x_shape[-1]],
                            x2[ind2][i:i + self.x_shape[-1]])
                        res[ind2][ind1] = res[ind1][ind2]
        return res

    def transformation(self, x):
        """
            Transforms X from 3D to 2D array for OneClassSVM
        """
        return x.transpose(0, 1, 2).reshape(x.shape[0], -1)

    def gaussian_kernel(self, x, y):
        return np.exp((euclidean_distances(x, y)**2) * (-1 / (0.5**2)))

    def fit(self, x):
        """
            With lcs kernel X must have shape (n, d, l),
            where n - number of samples, d - number of dimensions, l - feature length.
            With rbf kernel X must have shape (n, l)
            where n - number of samples, l - feature length.
        """
        self.x_shape = x.shape
        if self.kernel == 'lcs':
            x_transformed = self.transformation(x)
            kernel = lambda x, y: self.lcs_kernel_function(x, y)
            self.one_class_svm = OneClassSVM(kernel=kernel,
                                             nu=self.nu,
                                             gamma='auto',
                                             degree=self.degree)
            self.one_class_svm.fit(x_transformed)
        else:
            x_transformed = x
            self.one_class_svm = OneClassSVM(kernel='rbf',
                                             nu=self.nu,
                                             gamma=self.gamma,
                                             degree=self.degree)
            self.one_class_svm.fit(x_transformed)

    def predict(self, x):
        """
            With lcs kernel X must have shape (n, d, l),
            where n - number of samples, d - number of dimensions, l - feature length.
            With rbf kernel X must have shape (n, l)
            where n - number of samples, l - feature length.
            Function returns y-array with +1;-1
        """
        if len(x.shape) > 2:
            x = self.transformation(x)
        return self.one_class_svm.predict(x)
예제 #5
0
ytrain = test.iloc[:, 1]
ytrain = ytrain.values

xtest = test.iloc[:, 3:64]
xtest = xtest.values
ytest = test.iloc[:, 2]
ytest = ytest.values

from keras.utils import to_categorical

y_train = to_categorical(ytrain)
y_test = to_categorical(ytest)

from pyts.transformation import StandardScaler

standardscaler = StandardScaler(epsilon=1e-2)
X_standardized = standardscaler.transform(xtrain)
Xt_standardized = standardscaler.transform(xtest)

from pyts.transformation import GASF, GADF
from pyts import transformation, classification, visualization

gasf = GASF(image_size=61, overlapping=False, scale='-1')
X_gasf = gasf.transform(X_standardized)
X_gasf.ndim
#3
Xt_gasf = gasf.transform(Xt_standardized)

gadf = GADF(image_size=61, overlapping=False, scale='-1')
X_gadf = gadf.transform(X_standardized)
Xt_gadf = gadf.transform(Xt_standardized)
예제 #6
0
dt = 1
x = 0.

X = np.zeros((n_samples, n_features))
X[:, 0] = x

for i in range(n_samples):
    start = x
    for k in range(1, n_features):
        start += norm.rvs(scale=delta**2 * dt)
        X[i][k] = start

y = np.random.randint(n_classes, size=n_samples)
from pyts.transformation import StandardScaler

standardscaler = StandardScaler(epsilon=1e-2)
X_standardized = standardscaler.transform(X)

from pyts.transformation import MTF

mtf = MTF(image_size=61, n_bins=1, quantiles='empirical', overlapping=False)
X_mtf = mtf.transform(X_standardized)

from pyts.visualization import plot_mtf
plot_mtf(X_standardized[0],
         image_size=61,
         n_bins=4,
         quantiles='empirical',
         overlapping=False)

plot_mtf(X_standardized[0],