Esempio n. 1
0
def calImgHisFeatures(imgFile,
                      gridSize,
                      sizeRange,
                      imResize=None,
                      gridList=None,
                      norm=True,
                      HisFeaDim=64):
    # print imgFile
    if imResize:
        gridPatchData, positions, im = esg.generateGridPatchData(
            imgFile,
            gridSize,
            sizeRange,
            imResize=imResize,
            gridList=gridList,
            imNorm=False)
    else:
        gridPatchData, positions, im = esg.generateGridPatchData(
            imgFile, gridSize, sizeRange, gridList=gridList, imNorm=False)
    feaVecs = np.zeros((len(gridPatchData), HisFeaDim))
    for i in range(len(gridPatchData)):

        feaVec, _ = histogramOfPatch(gridPatchData[i], HisFeaDim)

        feaVecs[i, :] = feaVec
    if norm:
        feaVecs = nv.normalizeVecs(feaVecs)
    return feaVecs, np.array(positions)
def intensityFeature(imgFile=None,
                     gridSize=None,
                     sizeRange=None,
                     gridPatchData=None,
                     imResize=None,
                     gridList=None,
                     diffResolution=True):
    if gridPatchData is None:
        if imResize:
            gridPatchData, positions, im = esg.generateGridPatchData(
                imgFile,
                gridSize,
                sizeRange,
                imResize=imResize,
                gridList=gridList)
        else:
            gridPatchData, positions, im = esg.generateGridPatchData(
                imgFile,
                gridSize,
                sizeRange,
                gridList=gridList,
                diffResolution=diffResolution)

    if not diffResolution:
        gridPatchData_v = np.array(gridPatchData)
        patch_num = len(gridPatchData)
        mean_v = np.mean(np.mean(gridPatchData_v, axis=1), axis=1).reshape(
            (patch_num, 1))

        # i = 16
        # patch_i = gridPatchData[i]
        # mean_i = patch_i.mean()
        shape = gridPatchData_v.shape
        gridPatchData_reshape = gridPatchData_v.reshape(
            (shape[0], shape[1] * shape[2]))
        min_v = np.min(gridPatchData_reshape, axis=1).reshape((patch_num, 1))
        max_v = np.max(gridPatchData_reshape, axis=1).reshape((patch_num, 1))
        # min_i = patch_i.min()
        # max_i = patch_i.max()
        intensityFeas = np.hstack((min_v, mean_v, max_v))
    else:
        i = 0
        patch_i = gridPatchData[i]
        mean_i = patch_i.mean()
        # print mean_i
        feas_list = [[np.min(x), np.mean(x), np.max(x)] for x in gridPatchData]
        intensityFeas = np.array(feas_list)
        mean_i = intensityFeas[i, 1]
        # print mean_i
    # print min_v.shape
    # print gridPatchData_v.shape
    # print mean_i, mean_v[i], min_i, min_v[i], max_i, max_v[i]
    # print intensityFeas.min(), intensityFeas.max()
    return intensityFeas
Esempio n. 3
0
def selectSpecialPatch(imgFile,
                       wordsFile,
                       feaType,
                       gridSize,
                       sizeRange,
                       nk,
                       filter_radius=3,
                       spaceSize=10):
    feaVecs, posVecs = glf.genImgLocalFeas(imgFile, feaType, gridSize,
                                           sizeRange)
    # print feaVecs.shape, posVecs.shape
    labelVecs = chm.calPatchLabels2(wordsFile,
                                    feaVecs,
                                    k=nk,
                                    two_classes=['1', '2'],
                                    isH1=True)
    posVecs_f, labelVecs_f = filterPos(posVecs,
                                       labelVecs,
                                       radius=filter_radius,
                                       spaceSize=spaceSize)
    specialIDs = list(np.argwhere(labelVecs_f == 0)[:, 0])
    specialPos = list(posVecs_f[specialIDs, :])
    patchData, _, _ = esg.generateGridPatchData(imgFile,
                                                gridSize,
                                                sizeRange,
                                                gridList=specialPos)
    # patchData_arr = np.array(patchData)
    return patchData, specialPos
Esempio n. 4
0
def threshHoldFilterPatch(im, gridSize, sizeRange):
    patchData, gridList, _ = esg.generateGridPatchData(im, gridSize, sizeRange)
    patchData_arr = np.array(patchData)
    mm = np.mean(np.mean(patchData_arr, axis=2), axis=1)
    ths = mm - 26
    ids = list(np.where(ths > 0))
    print ids
    gridList = [gridList[x] for x in ids[0]]

    return gridList
def calImgSDAEFea(imgFile,
                  model,
                  gridSize,
                  sizeRange,
                  channels,
                  patch_mean,
                  gridList=None,
                  imResize=None,
                  patchMean=True,
                  norm=True):
    patchSize = sizeRange[0]
    if imResize:
        gridPatchData, gridList, im = esg.generateGridPatchData(
            imgFile, gridSize, sizeRange, imResize=imResize, gridList=gridList)
    else:
        gridPatchData, gridList, im = esg.generateGridPatchData(
            imgFile, gridSize, sizeRange, gridList=gridList)
    # gridPatchData, gridList, im = esg.generateGridPatchData(imgFile, gridSize, sizeRange, gridList=gridList)
    patchData = [
        p.reshape(channels, patchSize, patchSize) for p in gridPatchData
    ]
    patchData = np.array(patchData) - patch_mean
    if patchMean:
        means = np.mean(np.mean(patchData, axis=-1), axis=-1)
        means = means.reshape(means.shape[0], means.shape[1], 1, 1)
        means = np.tile(means, (1, 1, patchSize, patchSize))
        patchData -= means
    labelData = np.full((len(gridList), ), int(0), dtype='float32')

    model.set_input_arrays(patchData, labelData)
    out = model.forward()
    out_name = model.blobs.items()[-1][0]
    feaVec = out[out_name]
    posVec = np.array(gridList)
    if norm:
        feaVec = nv.normalizeVecs(feaVec)
    return feaVec, posVec
Esempio n. 6
0
def calImgDSift(imgFile,
                gridSize,
                sizeRange,
                gridList=None,
                imResize=None,
                withIntensity=None,
                diffResolution=True):
    # print imgFile
    siftFeaDim = 128
    # if withIntensity:
    #     siftFeaDim += 3
    if imResize:
        patches, positions, im = esg.generateGridPatchData(
            imgFile,
            gridSize,
            sizeRange,
            imResize=imResize,
            gridList=gridList,
            diffResolution=diffResolution)
    else:
        patches, positions, im = esg.generateGridPatchData(
            imgFile,
            gridSize,
            sizeRange,
            gridList=gridList,
            diffResolution=diffResolution)
    feaVecs = np.zeros((len(patches), siftFeaDim))
    for i in range(len(patches)):
        patchSize = int(positions[i][-1])
        extractor = dsift.SingleSiftExtractor(patchSize)
        feaVec = extractor.process_image(patches[i])
        feaVecs[i, :] = feaVec
    if withIntensity is True:
        intensityFeas = intensityFeature(gridPatchData=patches,
                                         diffResolution=diffResolution)
        feaVecs = np.hstack((feaVecs, intensityFeas))
    return feaVecs, np.array(positions)
Esempio n. 7
0
def saveImgHeatMaps(imgFile, isReduce=False):
    sdae_wordsFile_h1 = '../../Data/Features/type4_SDAEWords_h1.hdf5'
    sdae_wordsFile_h2 = '../../Data/Features/type4_SDAEWords_h2.hdf5'
    sdae_wordsFile_h1_diff_mean = '../../Data/Features/type4_SDAEWords_h1_diff_mean.hdf5'
    sdae_wordsFile_h2_diff_mean = '../../Data/Features/type4_SDAEWords_h2_diff_mean.hdf5'
    sdae_wordsFile_h1_reduce = '../../Data/Features/type4_SDAEWords_h1_reduce_sameRatio.hdf5'
    sdae_wordsFile_h2_reduce = '../../Data/Features/type4_SDAEWords_h2_reduce_sameRatio.hdf5'
    sift_wordsFile_h1 = '../../Data/Features/type4_SIFTWords_h1.hdf5'
    sift_wordsFile_h2 = '../../Data/Features/type4_SIFTWords_h2.hdf5'
    sift_wordsFile_h1_reduce = '../../Data/Features/type4_SIFTWords_h1_reduce.hdf5'
    sift_wordsFile_h2_reduce = '../../Data/Features/type4_SIFTWords_h2_reduce.hdf5'
    lbp_wordsFile_h1_reduce = '../../Data/Features/type4_LBPWords_h1_reduce_sameRatio.hdf5'
    lbp_wordsFile_h2_reduce = '../../Data/Features/type4_LBPWords_h2_reduce_sameRatio.hdf5'
    lbp_wordsFile_h1 = '../../Data/Features/type4_LBPWords_h1.hdf5'
    lbp_wordsFile_h2 = '../../Data/Features/type4_LBPWords_h2.hdf5'
    SIFTFeaFile = '../../Data/Features/type4_SIFTFeatures.hdf5'
    SDAEFeaFile = '../../Data/Features/type4_SDAEFeas.hdf5'
    LBPFeaFile = '../../Data/Features/type4_LBPFeatures.hdf5'

    sift_saveName_h1 = '../../Data/Features/type4_SIFTWords_h1_s16_600_300_300_300.hdf5'
    sift_saveName_h2 = '../../Data/Features/type4_SIFTWords_h2_s16_600_300_300_300.hdf5'
    sdae_saveName_h1 = '../../Data/Features/type4_SDAEWords_h1_diff_mean_s16_600_300_300_300.hdf5'
    sdae_saveName_h2 = '../../Data/Features/type4_SDAEWords_h2_diff_mean_s16_600_300_300_300.hdf5'
    sdae_saveName_h1_s = '../../Data/Features/type4_SDAEWords_h1_same_mean_s16_600_300_300_300.hdf5'
    sdae_saveName_h2_s = '../../Data/Features/type4_SDAEWords_h2_same_mean_s16_600_300_300_300.hdf5'
    lbp_saveName_h1 = '../../Data/Features/type4_LBPWords_h1_s16_600_300_300_300.hdf5'
    lbp_saveName_h2 = '../../Data/Features/type4_LBPWords_h2_s16_600_300_300_300.hdf5'

    sizeRange = (16, 16)
    imResize = (256, 256)
    imgSize = (440, 440)
    nk = 19
    resolution = 5
    gridSize = np.array([resolution, resolution])
    im = np.array(imread(imgFile), dtype='f') / 255
    th1 = 0.5
    th2 = 0.5
    im_name = imgFile[-20:-4]

    # ----------------save sift------------------
    feaVectors, posVectors = glf.genImgLocalFeas(imgFile,
                                                 'SIFT',
                                                 gridSize,
                                                 sizeRange,
                                                 imResize=None)
    if isReduce:
        feaVectors = pca.reduceVecFeasDim(SIFTFeaFile, feaVectors, 64)
        heats = generateHeatMaps2by2(sift_wordsFile_h1_reduce,
                                     sift_wordsFile_h2_reduce, feaVectors,
                                     posVectors, gridSize, nk, th1, th2)
    else:
        heats = generateHeatMaps2by2(sift_saveName_h1, sift_saveName_h2,
                                     feaVectors, posVectors, gridSize, nk, th1,
                                     th2)

    for c, m in heats.iteritems():
        map3 = np.transpose(m, (1, 0, 2)).reshape(440, 440 * 3)
        map3 = np.append(map3, im, axis=1)
        if isReduce:
            imsave(im_name + '_SIFT_reduce_' + c + '_th' + str(th1) + '.jpg',
                   map3)
        else:
            imsave(im_name + '_SIFT_' + c + '_th' + str(th1) + '.jpg', map3)

    # ----------------save lbp------------------
    feaVectors, posVectors = glf.genImgLocalFeas(imgFile,
                                                 'LBP',
                                                 gridSize,
                                                 sizeRange,
                                                 imResize=None)
    if isReduce:
        feaVectors = pca.reduceVecFeasDim(LBPFeaFile, feaVectors, 8)
        heats = generateHeatMaps2by2(lbp_wordsFile_h1_reduce,
                                     lbp_wordsFile_h2_reduce, feaVectors,
                                     posVectors, gridSize, nk, th1, th2)
    else:
        heats = generateHeatMaps2by2(lbp_saveName_h1, lbp_saveName_h2,
                                     feaVectors, posVectors, gridSize, nk, th1,
                                     th2)

    for c, m in heats.iteritems():
        map3 = np.transpose(m, (1, 0, 2)).reshape(440, 440 * 3)
        map3 = np.append(map3, im, axis=1)
        if isReduce:
            imsave(im_name + '_LBP_reduce_' + c + '_th' + str(th1) + '.jpg',
                   map3)
        else:
            imsave(im_name + '_LBP_' + c + '_th' + str(th1) + '.jpg', map3)

    # ---------------show SDEA local results--------------
    # define SDAE parameters
    sdaePara = {}
    # sdaePara['weight'] = '../../Data/autoEncoder/final_0.01.caffemodel'
    # sdaePara['weight'] = '../../Data/autoEncoder/layer_diff_mean_final.caffemodel'
    sdaePara[
        'weight'] = '../../Data/autoEncoder/layer_diff_mean_s16_final.caffemodel'
    sdaePara['net'] = '../../Data/autoEncoder/test_net.prototxt'
    sdaePara['meanFile'] = '../../Data/patchData_mean_s16.txt'
    sdaePara['patchMean'] = True
    channels = 1
    # layerNeuronNum = [28 * 28, 2000, 1000, 500, 128]
    layerNeuronNum = [16 * 16, 1000, 1000, 500, 64]
    sdaePara['layerNeuronNum'] = layerNeuronNum
    _, gl, _ = esg.generateGridPatchData(imgFile, gridSize, sizeRange)
    batchSize = len(gl)
    inputShape = (batchSize, channels, 16, 16)
    sdaePara['inputShape'] = inputShape

    feaVectors, posVectors = glf.genImgLocalFeas(imgFile,
                                                 'SDAE',
                                                 gridSize,
                                                 sizeRange,
                                                 sdaePara=sdaePara)
    if isReduce:
        feaVectors = pca.reduceVecFeasDim(SDAEFeaFile, feaVectors, 9)
        heats = generateHeatMaps2by2(sdae_wordsFile_h1_reduce,
                                     sdae_wordsFile_h2_reduce, feaVectors,
                                     posVectors, gridSize, nk, th1, th2)
    else:
        heats = generateHeatMaps2by2(sdae_saveName_h1, sdae_saveName_h2,
                                     feaVectors, posVectors, gridSize, nk, th1,
                                     th2)
    for c, m in heats.iteritems():
        map3 = np.transpose(m, (1, 0, 2)).reshape(440, 440 * 3)
        map3 = np.append(map3, im, axis=1)
        if isReduce:
            imsave(im_name + '_SDAE_reduce_' + c + '_th' + str(th1) + '.jpg',
                   map3)
        else:
            imsave(im_name + '_SDAE_' + c + '_th' + str(th1) + '.jpg', map3)
    return 0
Esempio n. 8
0
 # # print feaVecs.shape, posVecs.shape
 # labelVecs = chm.calPatchLabels2(w, feaVecs, k=nk, two_classes=['1', '2'], isH1=True)
 # posVecs_f, labelVecs_f = filterPos(posVecs, labelVecs, radius=3, spaceSize=10)
 # specialIDs = list(np.argwhere(labelVecs_f == 0)[:, 0])
 # specialPos = list(posVecs_f[specialIDs, :])
 wordsFile_s = [
     lbp_wordsFile_s1, lbp_wordsFile_s2, lbp_wordsFile_s3, lbp_wordsFile_s4
 ]
 specialType = 2
 w = wordsFile_s[specialType]
 patchData_a, specialPos = selectSpecialPatch(imgFile, w, feaType, gridSize,
                                              sizeRange, nk)
 # print labelVecs.shape
 print len(specialPos)
 print len(patchData_a)
 im = imread(imgFile)
 patchData, _, _ = esg.generateGridPatchData(im,
                                             gridSize,
                                             sizeRange,
                                             gridList=specialPos)
 print len(patchData)
 pp = np.array(patchData)
 background_mean = pp.mean()
 print background_mean
 mm = np.mean(np.mean(pp, axis=2), axis=1)
 print mm.shape
 plf.showGrid(im, specialPos)
 plt.show()
 specialList = threshHoldFilterPatch(im, gridSize, sizeRange)
 plf.showGrid(im, specialList)
 plt.show()
Esempio n. 9
0
    # ---------------show SDEA local results--------------
    sdae_wordsFile_h1 = '../../Data/Features/SDAEWords_h1.hdf5'
    sdae_wordsFile_h2 = '../../Data/Features/SDAEWords_h2.hdf5'

    # define sdae model
    weight = '../../Data/autoEncoder/final_0.01.caffemodel'
    net = '../../Data/autoEncoder/test_net.prototxt'
    meanFile = '../../Data/patchData_mean.txt'
    f_mean = open(meanFile, 'r')
    patch_mean = float(f_mean.readline().split(' ')[1])
    f_mean.close()
    channels = 1
    layerNeuronNum = [28 * 28, 2000, 1000, 500, 128]

    _, gl, _ = esg.generateGridPatchData(imgFile, gridSize, sizeRange)
    batchSize = len(gl)

    inputShape = (batchSize, channels, 28, 28)
    with open(net, 'w') as f1:
        f1.write(str(AE.defineTestNet(inputShape, layerNeuronNum)))

    caffe.set_mode_gpu()
    model = caffe.Net(net, weight, caffe.TEST)

    feaVec, posVec = extSDAE.calImgSDAEFea(imgFile, model, gridSize, sizeRange, channels, patch_mean)
    labelVectors_h = calPatchLabelHierarchy(sdae_wordsFile_h1, sdae_wordsFile_h2, feaVec)
    showLocalLabel(imgFile, labelVectors_h, posVec, imResize=None, feaType='SDAE_')

    filtered_pos, filtered_label = filterPos(posVec, labelVectors_h, 1, 10)
    showLocalLabel(imgFile, filtered_label, filtered_pos, imResize=None, feaType='SDAE_filtered_')
    gridSize = np.array([10, 10])
    sizeRange = (28, 28)
    patchSize = sizeRange[0]
    channels = 1
    posParaNum = 4
    layerNeuronNum = [28 * 28, 1000, 1000, 500, 64]
    SDAEFeaDim = layerNeuronNum[-1]

    # weight = '../../Data/autoEncoder/final_0.01.caffemodel'
    # weight = '../../Data/autoEncoder/layer_diff_mean_final.caffemodel'
    # weight = '../../Data/autoEncoder/layer_same_mean_s16_final.caffemodel'
    # weight = '../../Data/autoEncoder/layer_same_mean_s28_special_final.caffemodel'
    weight = '../../Data/autoEncoder/layer_same_mean_s28_special_final.caffemodel'
    net = '../../Data/autoEncoder/test_net.prototxt'
    img_test = dataFolder + 'N20031221G030001.bmp'
    gridPatchData, gridList, im = esg.generateGridPatchData(
        img_test, gridSize, sizeRange)
    batchSize = len(gridList)

    inputShape = (batchSize, 1, 28, 28)
    with open(net, 'w') as f1:
        f1.write(str(AE.defineTestNet(inputShape, layerNeuronNum)))

    caffe.set_mode_gpu()
    model = caffe.Net(net, weight, caffe.TEST)

    labelTruth = '../../Data/Alllabel2003_38044.txt'
    # labelFile = '../../Data/type3_1000_500_500.txt'
    # labelFile = '../../Data/type4_1500_500_500_500.txt'
    # labelFile = '../../Data/type4_600_300_300_300.txt'
    labelFile = '../../Data/type4_b500.txt'
    print plf.compareLabeledFile(labelTruth, labelFile)
Esempio n. 11
0
def makePatchData(labelFile,
                  patchSize,
                  gridSize=np.array([10, 10]),
                  imgType='.bmp',
                  channels=1,
                  savePath='../../Data/one_in_minute_patch_diff_mean.hdf5',
                  same_mean_file='../.../Data/patchData_mean_s16.txt',
                  imagesFolder='../../Data/labeled2003_38044/',
                  patchMean=True,
                  saveList='../../Data/patchList_diff_mean.txt',
                  subtract_same_mean=False):
    sizeRange = (patchSize, patchSize)
    [images, labels] = plf.parseNL(labelFile)
    # arragedImages = plf.arrangeToClasses(images, labels, classNum)

    f = h5py.File(savePath, 'w')
    data = f.create_dataset('data', (0, channels, patchSize, patchSize),
                            dtype='f',
                            maxshape=(None, channels, patchSize, patchSize))
    label = f.create_dataset('label', (0, ), dtype='i', maxshape=(None, ))

    if subtract_same_mean:
        patches_mean = 0
        for i in range(len(images)):
            imf = imagesFolder + images[i] + imgType

            gridPatchData, gridList, _ = esg.generateGridPatchData(
                imf, gridSize, sizeRange)

            patchData = np.array(gridPatchData)
            patches_mean += patchData.mean()
        patch_mean = patches_mean / len(images)
        print 'patch number: ' + str(data.shape[0])
        print 'patch mean: ' + str(patch_mean)
        with open(same_mean_file, 'w') as f2:
            f2.write('patch_mean: ' + str(patch_mean))
        f2.close()
    else:
        patch_mean = 0

    print 'patch_mean: ', patch_mean
    for i in range(len(images)):
        imf = imagesFolder + images[i] + imgType
        print imf

        gridPatchData, gridList, _ = esg.generateGridPatchData(
            imf, gridSize, sizeRange)

        patchData = [
            p.reshape(channels, patchSize, patchSize) for p in gridPatchData
        ]
        patchData = np.array(patchData) - patch_mean
        if patchMean:
            means = np.mean(np.mean(patchData, axis=-1), axis=-1)
            means = means.reshape(means.shape[0], means.shape[1], 1, 1)
            means = np.tile(means, (1, 1, patchSize, patchSize))
            patchData -= means
        labelData = np.full((len(gridList), ), int(labels[i]), dtype='i')

        oldNum = data.shape[0]
        newNum = oldNum + patchData.shape[0]
        data.resize(newNum, axis=0)
        data[oldNum:newNum, :, :, :] = patchData
        label.resize(newNum, axis=0)
        label[oldNum:newNum, ] = labelData

    f.close()
    print 'make patch data done!'

    with open(saveList, 'w') as f1:
        f1.write(savePath)
    f1.close()
    print saveList + ' saved!'

    return 0
Esempio n. 12
0
def calImgLBPFeatures(imgFile,
                      gridSize,
                      sizeRange,
                      imResize=None,
                      gridList=None,
                      norm=True,
                      withIntensity=None,
                      diffResolution=False):
    # print imgFile
    P1 = 8
    P2 = 16
    P3 = 24
    R1 = 1
    R2 = 2
    R3 = 3
    if imResize:
        gridPatchData, positions, im = esg.generateGridPatchData(
            imgFile, gridSize, sizeRange, imResize=imResize, gridList=gridList)
    else:
        gridPatchData, positions, im = esg.generateGridPatchData(
            imgFile, gridSize, sizeRange, gridList=gridList)
    LBPFeaDim = 10 + 18 + 26
    feaVecs = np.zeros((len(gridPatchData), LBPFeaDim))
    for i in range(len(gridPatchData)):
        LBP_img_R1P8 = feature.local_binary_pattern(gridPatchData[i],
                                                    P1,
                                                    R1,
                                                    method='uniform')
        LBP_img_R2P16 = feature.local_binary_pattern(gridPatchData[i],
                                                     P2,
                                                     R2,
                                                     method='uniform')
        LBP_img_R3P24 = feature.local_binary_pattern(gridPatchData[i],
                                                     P3,
                                                     R3,
                                                     method='uniform')

        lbp_bin_num_R1P8 = P1 + 2
        lbp_hist_R1P8, lbp_bins_R1P8 = np.histogram(
            LBP_img_R1P8.flatten(), bins=range(lbp_bin_num_R1P8 + 1))

        lbp_bin_num_R2P16 = P2 + 2
        lbp_hist_R2P16, lbp_bins_R2P16 = np.histogram(
            LBP_img_R2P16.flatten(), bins=range(lbp_bin_num_R2P16 + 1))

        lbp_bin_num_R3P24 = P3 + 2
        lbp_hist_R3P24, lbp_bins_R3P24 = np.histogram(
            LBP_img_R3P24.flatten(), bins=range(lbp_bin_num_R3P24 + 1))

        feaVec = np.array(
            list(lbp_hist_R1P8) + list(lbp_hist_R2P16) + list(lbp_hist_R3P24))

        feaVecs[i, :] = feaVec
    if norm:
        feaVecs = nv.normalizeVecs(feaVecs)
    # print withIntensity
    # print feaVecs.shape
    if withIntensity is True:
        intensityFeas = intensityFeature(gridPatchData=gridPatchData,
                                         diffResolution=diffResolution)
        feaVecs = np.hstack((feaVecs, intensityFeas))
    return feaVecs, np.array(positions)
Esempio n. 13
0
def calCascadeFeaSet(dataFolder,
                     labelFile,
                     siftFeaFile_reduce,
                     lbpFeaFile_reduce,
                     sdaeFeaFile_reduce_d,
                     sdaeFeaFile_reduce_s,
                     classNum,
                     imgType,
                     gridSize,
                     sizeRange,
                     classLabel,
                     sdaePara,
                     saveName,
                     saveFolder='../../Data/Features/'):
    sift_f = h5py.File(siftFeaFile_reduce, 'r')
    sdae_f_d = h5py.File(sdaeFeaFile_reduce_d, 'r')
    sdae_f_s = h5py.File(sdaeFeaFile_reduce_s, 'r')
    lbp_f = h5py.File(lbpFeaFile_reduce, 'r')

    names, labels = plf.parseNL(labelFile)
    if classNum == 4:
        auroraData = plf.arrangeToClasses(names, labels, classNum, classLabel)
    else:
        auroraData, _ = plf.arrangeToClasses(names, labels, classNum,
                                             classLabel)

    f = h5py.File(saveFolder + saveName, 'w')
    f.attrs['dataFolder'] = dataFolder
    ad = f.create_group('auroraData')
    for c, imgs in auroraData.iteritems():
        ascii_imgs = [n.encode('ascii', 'ignore') for n in imgs]
        ad.create_dataset(c, (len(ascii_imgs), ), 'S10', ascii_imgs)

    feaSet = f.create_group('feaSet')
    posSet = f.create_group('posSet')
    for c, imgs in auroraData.iteritems():
        # sift_u = np.array(sift_f.get('uSet/'+c))
        # lbp_u = np.array(lbp_f.get('uSet/'+c))
        # sdae_u_d = np.array(sdae_f_d.get('uSet/'+c))
        # sdae_u_s = np.array(sdae_f_s.get('uSet/'+c))
        sift_u = np.array(sift_f.get('uSet/u'))
        lbp_u = np.array(lbp_f.get('uSet/u'))
        sdae_u_d = np.array(sdae_f_d.get('uSet/u'))
        sdae_u_s = np.array(sdae_f_s.get('uSet/u'))
        imgFile = dataFolder + imgs[0] + imgType
        _, gl, _ = esg.generateGridPatchData(imgFile, gridSize, sizeRange)
        feaVec, posVec = extractCascadeFeatures(imgFile, sift_u, lbp_u,
                                                sdae_u_d, sdae_u_s, gl,
                                                gridSize, sizeRange, sdaePara)
        feaArr = np.empty((0, feaVec.shape[1]))
        posArr = np.empty((0, posVec.shape[1]))
        for name in imgs:
            imgFile = dataFolder + name + imgType
            batchSize = len(gl)
            inputShape = (batchSize, 1, sizeRange[0], sizeRange[0])
            sdaePara['inputShape'] = inputShape
            feaVec, posVec = extractCascadeFeatures(imgFile, sift_u, lbp_u,
                                                    sdae_u_d, sdae_u_s, gl,
                                                    gridSize, sizeRange,
                                                    sdaePara)
            feaArr = np.append(feaArr, feaVec, axis=0)
            posArr = np.append(posArr, posVec, axis=0)
        feaSet.create_dataset(c, feaArr.shape, 'f', feaArr)
        posSet.create_dataset(c, posArr.shape, 'i', posArr)
    f.close()
    print saveFolder + saveName + ' saved'
    return 0