Exemplo n.º 1
0
def getLatentSemantic(k, decompType, dataMatrix, modelType, label,
                      imageDirName, imagePaths):
    folderName = "{}_{}_{}_{}_{}".format(imageDirName, modelType.name,
                                         decompType.name, k, label)
    lsPath = getLatentSemanticPath(os.path.basename(imageDirName), modelType,
                                   decompType, k, label)
    latent_semantic = latentSemanticsHelper.getSemanticsFromFolder(lsPath)
    if latent_semantic is None:
        if decompType == reduction.ReductionType.SVD:
            u, v = SVD(dataMatrix, k).getDecomposition()
            latent_semantic = u, v
        elif decompType == reduction.ReductionType.PCA:
            latent_semantic = PCA(dataMatrix, k).getDecomposition()
        elif decompType == reduction.ReductionType.NMF:
            latent_semantic = NMF(dataMatrix, k).getDecomposition()
        elif decompType == reduction.ReductionType.LDA:
            latent_semantic = LDA(dataMatrix, k).getDecomposition()
        else:
            print("Check later")
            return None
        print("Image path example ", imagePaths[0])
        latentSemanticsHelper.saveSemantics(os.path.basename(imageDirName),
                                            modelType,
                                            label,
                                            decompType,
                                            k,
                                            latent_semantic[0],
                                            latent_semantic[1],
                                            imagePaths=imagePaths)
    return latent_semantic
Exemplo n.º 2
0
def initTask5_2(folderPath, imagePath):
    classificatonMeta = {
        "Dorsal": "palmar",
        "left": "Right-Handed",
        "With-Accessories": "Without-Accessories",
        "male": "female",
        "palmar": "Dorsal",
        "Right-Handed": "left",
        "Without-Accessories": "With-Accessories",
        "female": "male"
    }

    _, modelType, dimRedType, k, label = getParams(folderPath)
    # print(folderPath)
    u, vt, imagePaths = getSemanticsFromFolder(folderPath)
    u = preprocessing.scale(u)
    uMean = np.mean(u, axis=0)
    maxdis, mindis = -10000000, 10000000
    for item in u:
        d = np.linalg.norm(item - uMean)
        mindis = min(mindis, d)
        maxdis = max(maxdis, d)
    print(mindis, maxdis, label)

    queryImage = getQueryImageRep(vt, imagePath, modelType)
    queryImageNormalised = preprocessing.scale(queryImage)
    qdis = np.linalg.norm(queryImageNormalised - uMean)
    print(qdis)
Exemplo n.º 3
0
def task4(foldername, folderPath, imagePath, m):
    print(" EXECUTING TASK 4 ")
    # print(folderPath)
    # print("Image Path:" + imagePath)
    U, V, imagePaths = getSemanticsFromFolder(folderPath)
    dir, modelType, dimRidTechnique, K, label = getParams(foldername)
    query_image_features = getQueryImageRep(V, imagePath, modelType)
    list = comparisonHelper.getMSimilarImages(U, query_image_features, m,
                                              imagePaths)
    plotFigures(list, 3)
Exemplo n.º 4
0
def initTask5(folderPath, imagePath):
    classificatonMeta = {
        "Dorsal": "Palmer",
        "Left-Handed": "Right-Handed",
        "With-Accessories": "Without-Accessories",
        "Male": "Female",
        "Palmer": "Dorsal",
        "Right-Handed": "Left-Handed",
        "Without-Accessories": "With-Accessories",
        "Female": "Male"
    }

    labelInfo = {
        "1": "Left-Handed",
        "2": "Right-Handed",
        "3": "Dorsal",
        "4": "Palmer",
        "5": "With-Accessories",
        "6": "Without-Accessories",
        "7": "Male",
        "8": "Female"
    }

    _, modelType, dimRedType, k, label = getParams(folderPath)
    u, vt, imagePaths = getSemanticsFromFolder(folderPath)
    uNomalised = preprocessing.scale(u)

    oc_svm_clf = svm.OneClassSVM(gamma=0.01, kernel='rbf', nu=0.1)
    oc_svm_clf.fit(uNomalised)

    queryImage = getQueryImageRep(vt, imagePath, modelType)
    queryImageNormalised = preprocessing.scale(queryImage)
    queryPrediction = oc_svm_clf.predict(
        queryImageNormalised.reshape((1, queryImageNormalised.shape[0])))

    if len(queryPrediction) < 1:
        raise ValueError("Query prediction not available")

    if queryPrediction[0] == 1:
        print("Predicted the image as '{}'".format(labelInfo[label]))
    else:
        print("Predicted the image as '{}'".format(
            classificatonMeta[labelInfo[label]]))