Esempio n. 1
0
def get_scaledLaplacian(coord,neighborNum,pointNumber):
    coord = coord[0]
    tree = cKDTree(coord)
    dd, ii = tree.query(coord, k=neighborNum)
    A = adjacency(dd, ii)
    scaledLaplacian = scaled_laplacian(A)
    flattenLaplacian = scaledLaplacian.tolil().reshape((1, pointNumber * pointNumber))
    return flattenLaplacian
def prepareGraph(inputData, neighborNumber, pointNumber, dataType):
    scaledLaplacianDict = dict()
    #baseDir = os.path.dirname(os.path.abspath(__file__))
    # baseDir ='/raid60/yingxue.zhang2/ICASSP_code'
    baseDir = os.path.abspath(os.path.dirname(os.getcwd()))
    if para.dataset == 'ModelNet40':
        fileDir = baseDir + '/graph/' + dataType + '_pn_' + str(
            pointNumber) + '_nn_' + str(neighborNumber)
    elif para.dataset == 'ModelNet10':
        fileDir = baseDir + '/graph_ModelNet10/' + dataType + '_pn_' + str(
            pointNumber) + '_nn_' + str(neighborNumber)
    else:
        print("Please enter a valid dataset")

    if (not os.path.isdir(fileDir)):
        print("calculating the graph data")
        os.makedirs(fileDir)
        print('>> Prepare graph. Batches count: {}'.format(len(inputData)))
        for batchIndex in range(len(inputData)):
            batchInput = inputData[batchIndex]
            for i in range(len(batchInput)):
                print(i)
                pcCoordinates = batchInput[i]
                tree = cKDTree(pcCoordinates)
                dd, ii = tree.query(pcCoordinates, k=neighborNumber)
                A = adjacency(dd, ii)
                scaledLaplacian = scaled_laplacian(A)
                flattenLaplacian = scaledLaplacian.tolil().reshape(
                    (1, pointNumber * pointNumber))
                if i == 0:
                    batchFlattenLaplacian = flattenLaplacian
                else:
                    batchFlattenLaplacian = scipy.sparse.vstack(
                        [batchFlattenLaplacian, flattenLaplacian])
            scaledLaplacianDict.update({batchIndex: batchFlattenLaplacian})
            with open(fileDir + '/batchGraph_' + str(batchIndex),
                      'wb') as handle:
                pickle.dump(batchFlattenLaplacian, handle)
            # files.download(handle)
            print("Saving the graph data batch" + str(batchIndex))

    else:
        print("Loading the graph data from " + dataType + 'Data')
        scaledLaplacianDict = loadGraph(inputData, neighborNumber, pointNumber,
                                        fileDir)
    return scaledLaplacianDict
Esempio n. 3
0
def prepareGraph(inputData, neighborNumber, pointNumber, dataType):
    #Description: generate graph structure and store in the system for reuse
    #input: (1)inputData: input point coordinates (2)neighborNumber: neighbor number when constructing nearest neighbor graph
    #       (3)pointNumber: point number of each object (4)dataType: training set or testing set
    #return: scaled Laplacian matrix of each object

    scaledLaplacianDict = dict()
    #baseDir = os.path.dirname(os.path.abspath(__file__))
    #baseDir ='/raid60/yingxue.zhang2/ICASSP_code'

    baseDir = os.path.abspath(os.path.dirname(os.getcwd()))
    fileDir = baseDir + '/graph/' + dataType + '_pn_' + str(
        pointNumber) + '_nn_' + str(neighborNumber)
    if (not os.path.isdir(fileDir)):
        print "calculating the graph data"
        os.makedirs(fileDir)
        for batchIndex in range(len(inputData)):
            batchInput = inputData[batchIndex]
            for i in range(len(batchInput)):
                print i
                pcCoordinates = batchInput[i]
                tree = cKDTree(pcCoordinates)
                dd, ii = tree.query(pcCoordinates, k=neighborNumber)
                A = adjacency(dd, ii)
                scaledLaplacian = scaled_laplacian(A)
                flattenLaplacian = scaledLaplacian.tolil().reshape(
                    (1, pointNumber * pointNumber))
                if i == 0:
                    batchFlattenLaplacian = flattenLaplacian
                else:
                    batchFlattenLaplacian = scipy.sparse.vstack(
                        [batchFlattenLaplacian, flattenLaplacian])
            scaledLaplacianDict.update({batchIndex: batchFlattenLaplacian})
            with open(fileDir + '/batchGraph_' + str(batchIndex),
                      'wb') as handle:
                pickle.dump(batchFlattenLaplacian, handle)
            print "Saving the graph data batch" + str(batchIndex)

    else:
        print("Loading the graph data from " + dataType + 'Data')
        scaledLaplacianDict = loadGraph(inputData, neighborNumber, pointNumber,
                                        fileDir)
    return scaledLaplacianDict
def prepareGraph(inputData, neighborNumber, pointNumber, dataType):
    scaledLaplacianDict = dict()
    #baseDir = os.path.dirname(os.path.abspath(__file__))
    baseDir ='/raid60/yingxue.zhang2/ICASSP_code'  
    #baseDir= os.path.abspath(os.path.dirname(os.getcwd()))
    if para.dataset == 'ModelNet40':
        fileDir =  baseDir+ '/graph/' + dataType+'_pn_'+str(pointNumber)+'_nn_'+str(neighborNumber)
    elif para.dataset == 'ModelNet10':
        fileDir =  baseDir+ '/graph_ModelNet10/' + dataType+'_pn_'+str(pointNumber)+'_nn_'+str(neighborNumber)
    else:
        print "Please enter a valid dataset"
        
    if (not os.path.isdir(fileDir)):
        print "calculating the graph data"
        os.makedirs(fileDir)
        for batchIndex in range(len(inputData)):
            batchInput = inputData[batchIndex]
            for i in range(len(batchInput)):
                print i
                pcCoordinates = batchInput[i]
                tree = cKDTree(pcCoordinates)
                dd, ii = tree.query(pcCoordinates, k = neighborNumber)
                A = adjacency(dd, ii)
                scaledLaplacian = scaled_laplacian(A)
                flattenLaplacian = scaledLaplacian.tolil().reshape((1, pointNumber*pointNumber))
                if i ==0:
                    batchFlattenLaplacian = flattenLaplacian
                else:
                    batchFlattenLaplacian = scipy.sparse.vstack([batchFlattenLaplacian, flattenLaplacian])
            scaledLaplacianDict.update({batchIndex: batchFlattenLaplacian})
            with open(fileDir+'/batchGraph_'+str(batchIndex), 'wb') as handle:
                pickle.dump(batchFlattenLaplacian, handle)
            print "Saving the graph data batch"+str(batchIndex)
        
    else:
        print("Loading the graph data from "+dataType+'Data')
        scaledLaplacianDict = loadGraph(inputData, neighborNumber, pointNumber, fileDir)
    return scaledLaplacianDict
def prepareGraph(inputData, neighborNumber, pointNumber, dataType):
    #Description: generate graph structure and store in the system for reuse
    #input: (1)inputData: input point coordinates (2)neighborNumber: neighbor number when constructing nearest neighbor graph
    #       (3)pointNumber: point number of each object (4)dataType: training set or testing set
    #return: scaled Laplacian matrix of each object

    scaledLaplacianDict = dict()
    #baseDir = os.path.dirname(os.path.abspath(__file__))
    baseDir ='/raid60/yingxue.zhang2/ICASSP_code'  
    #baseDir= os.path.abspath(os.path.dirname(os.getcwd()))
    fileDir =  baseDir+ '/graph/' + dataType+'_pn_'+str(pointNumber)+'_nn_'+str(neighborNumber)
    if (not os.path.isdir(fileDir)):
        print "calculating the graph data"
        os.makedirs(fileDir)
        for batchIndex in range(len(inputData)):
            batchInput = inputData[batchIndex]
            for i in range(len(batchInput)):
                print i
                pcCoordinates = batchInput[i]
                tree = cKDTree(pcCoordinates)
                dd, ii = tree.query(pcCoordinates, k = neighborNumber)
                A = adjacency(dd, ii)
                scaledLaplacian = scaled_laplacian(A)
                flattenLaplacian = scaledLaplacian.tolil().reshape((1, pointNumber*pointNumber))
                if i ==0:
                    batchFlattenLaplacian = flattenLaplacian
                else:
                    batchFlattenLaplacian = scipy.sparse.vstack([batchFlattenLaplacian, flattenLaplacian])
            scaledLaplacianDict.update({batchIndex: batchFlattenLaplacian})
            with open(fileDir+'/batchGraph_'+str(batchIndex), 'wb') as handle:
                pickle.dump(batchFlattenLaplacian, handle)
            print "Saving the graph data batch"+str(batchIndex)
        
    else:
        print("Loading the graph data from "+dataType+'Data')
        scaledLaplacianDict = loadGraph(inputData, neighborNumber, pointNumber, fileDir)
    return scaledLaplacianDict