def PcaCustomFor2Variables(matrix):
    #subtract average from each item
    xAv = _math.AverageList(matrix[0])
    yAv = _math.AverageList(matrix[1])    
    for i in range(len(matrix[0])):
        matrix[0][i] = matrix[0][i] - xAv
    for i in range(len(matrix[1])):
        matrix[1][i] = matrix[1][i] - yAv

    matrix = _matrix.Transpose(matrix)
    covMatrix = _matrix.Covariance(matrix)
    eValues = EigenValues(covMatrix)
    eVector = EigenVector(covMatrix, eValues)

    return eValues, _matrix.Transpose(eVector)
def LinearLeastSquares(matrix, y):  # B = (X^T * X)^-1 * X^t * y
    matrix = _matrix.AddBeginColumn(matrix, 1)
    matrixtranspose = _matrix.Transpose(matrix)
    section_1 = _matrix.Inverse(_matrix.Multiplication(matrixtranspose,
                                                       matrix),
                                1)  # (X^T * X)^-1
    section_2 = _matrix.Multiplication(section_1,
                                       matrixtranspose)  # (X^T * X)^-1 * X^t
    section_3 = _matrix.Multiplication(section_2, y)  # (X^T * X)^-1 * X^t * y

    return section_3
def QuadraticLeastSquares(matrix, y):  # B = (X^T * X)^-1 * X^t * y
    matrix = _matrix.AddEndColumnPotentialLast(
        _matrix.AddBeginColumn(matrix, 1), 2)
    matrixtranspose = _matrix.Transpose(matrix)
    section_1 = _matrix.Inverse(_matrix.Multiplication(matrixtranspose,
                                                       matrix),
                                1)  # (X^T * X)^-1
    section_2 = _matrix.Multiplication(section_1,
                                       matrixtranspose)  # (X^T * X)^-1 * X^t
    section_3 = _matrix.Multiplication(section_2, y)  # (X^T * X)^-1 * X^t * y

    return section_3
def PcaNumpy(matrix):
    covMatrix = _matrix.Covariance(_matrix.Transpose(matrix))
    eigenValues, eigenVectors = np.linalg.eig(covMatrix)
    auxZip = sorted(zip(eigenValues, eigenVectors.T))
    
    eVector = []
    eValues = []    
    for i in auxZip:
        eValues.append(i[0])
        eVector.append(i[1])
   
    eValues = eValues[::-1]
    eVector = np.asarray(eVector[::-1])
    
    return eValues, eVector
def RobustLeastSquares(matrix, y):  # B = (X^T * W.X)^-1 * X^t * W.y
    matrixlinear = LinearLeastSquares(matrix, y)
    newy = LinearPredictMatrix(matrix, matrixlinear)
    w = _matrix.Copy(newy)

    # calculating w
    for i in range(len(w)):
        w[i][0] = 1 / (abs(y[i][0] - newy[i][0]))

    y = _matrix.MultiplicationEscalarMatrix(y, w)
    matrix = _matrix.AddBeginColumn(matrix, 1)

    matrix = _matrix.MultiplicationEscalarMatrix(matrix, w)
    matrixtranspose = _matrix.Transpose(matrix)
    section_1 = _matrix.Inverse(_matrix.Multiplication(matrixtranspose,
                                                       matrix),
                                1)  # (X^T * X)^-1
    section_2 = _matrix.Multiplication(section_1,
                                       matrixtranspose)  # (X^T * X)^-1 * X^t
    section_3 = _matrix.Multiplication(section_2, y)  # (X^T * X)^-1 * X^t * y

    return section_3
    def dataset_test(self, path, datatype="float"):
        print(">>>>>> " + path + " <<<<<<")
        originalMatrix = _matrix.ReadCsv(path, ";", datatype)
        originalMatrix = _matrix.Transpose(originalMatrix)
        auxMatrix = _matrix.Copy(originalMatrix)
        eValues, eVectors = pca.Pca(auxMatrix)

        _matrix.matrix_print("EigenVector", eVectors)
        _matrix.matrix_print("EigenValue", [eValues])

        # plot relevance components
        pca.PlotRelevance(eValues, "Relevance Components " + path)

        # plot data transformed in the new space
        plot.SimplePointData2D(originalMatrix, "Original " + path, "PC1",
                               "PC2")

        # plot data transformed in the new space
        transformedData = pca.Transformation(originalMatrix, eVectors)
        plot.SimplePointData2D(transformedData, "Transformed " + path, "PC1",
                               "PC2")

        print("\n--------------------------------------------------")