コード例 #1
0
ファイル: 5_2_4_43.py プロジェクト: ItzelXu/EE569-2020Spring
class Cal_ce:
    def __init__(self, output_train_sub):
        self.ce = Cross_Entropy(num_class=10, num_bin=5)

    def ce_cal(self, each_out, label):
        each_out = each_out.reshape(-1, 1)
        return self.ce.compute(each_out, label)
def featureSelector_binMethod(X,Y):
    '''
    Select 50% features that has the lowest value of cross entropy
    input:
    X: output from max-pooling. Typically it has (numOfImages,pooled dimension, pooled dimension, K) size. e.g., (50000,14,14,41) at unit 1
    Y: labels corresponding to X
    output:
    selectedFeatures: selected 50% features 
    selectedFeaturesIndeces: indeces of the selected features 
    '''
    
    reshapedPooledX = X.reshape((len(X),-1))
    
    crossEntropyObj = Cross_Entropy(num_class = 10, num_bin = 10) 
    crossEntropyValStorage = np.zeros(reshapedPooledX.shape[-1])
    for storageIndex in range(reshapedPooledX.shape[-1]): #12 minutes for 100 images and 8036(14*14*41) dimension
        crossEntropyValStorage[storageIndex] = crossEntropyObj.compute(reshapedPooledX[:,storageIndex].reshape(-1,1), Y)
    print("calculation is done \n")
    
    sortedIndex = np.argsort(crossEntropyValStorage) #return indeces that would be the indeces when the array is sorted
    numOfNs = int(sortedIndex.shape[0]/2) #select 50 persent of lowest values
    #numOfNs = int(1000) #Select 1000 lowest features 
    selectedFeaturesIndex = sortedIndex[0:numOfNs]
    
    
    numOfImgs = int(reshapedPooledX.shape[0]) #e.g.50000 images
    reducedFeatureImgs = np.zeros((numOfImgs,numOfNs))
    
    #Select features for each image
    for imgIndex in range(numOfImgs):
        for selectedFeatureCount in range(numOfNs):
            curr_selectedFeatureIndex = selectedFeaturesIndex[selectedFeatureCount]
            reducedFeatureImgs[imgIndex,selectedFeatureCount] = reshapedPooledX[imgIndex,curr_selectedFeatureIndex]
        
    return selectedFeaturesIndex, reducedFeatureImgs, numOfNs
コード例 #3
0
def cal_CE_layer(features, trainLabel):
    kernelNum = features.shape[1]
    #Compute the Cross_Entropy of Layer 1
    CE = []
    for i in range(kernelNum):
        tempData = features[:, i]
        tempData = tempData.reshape((len(tempData), -1))
        ce = Cross_Entropy(num_class=10, num_bin=10)
        ce_value = ce.compute(tempData, trainLabel)
        CE.append(ce_value)
    return CE
def extract_feature(re_output, re_output_test, N, train_label):
    my_CrossEntropy = Cross_Entropy(num_class=10, num_bin=5)
    feat_ce = np.zeros(re_output.shape[-1])
    print("re_output.shape[-1]", re_output.shape[-1])
    print("re_output_test.shape[-1]", re_output_test.shape[-1])

    for k in range(re_output.shape[-1]):
        feat_ce[k] = my_CrossEntropy.compute(re_output[:, k].reshape(-1, 1), train_label)
        # feat_ce[k] = my_CrossEntropy.KMeans_Cross_Entropy(re_output[:, k].reshape(-1, 1), train_label)
        # print(" --> KMeans cross entropy: %s" % str(feat_ce[k]))

    sorted_index = np.argsort(feat_ce)  # increasing
    final_output = np.zeros([re_output.shape[0], N], np.float32)
    print("re_output.shape[0]", re_output.shape[0])
    final_output_test = np.zeros([re_output_test.shape[0], N], np.float32)
    print("re_output_test.shape[0]", re_output_test.shape[0])

    for i in range(0, N):
        final_output[:, i] = re_output[:, sorted_index[i]]
        final_output_test[:, i] = re_output_test[:, sorted_index[i]]

    return final_output, final_output_test
コード例 #5
0
ファイル: 5_2_4_43.py プロジェクト: ItzelXu/EE569-2020Spring
# parallel
# for each in output_train_avg:
#     each = each.reshape(each.shape[0], -1)
#     ce = Cross_Entropy(num_class=10, num_bin=5)
#     cal_ce = Cal_ce(ce)
#     feat_ce = Parallel(n_jobs=mp.cpu_count(), backend='multiprocessing')(delayed(cal_ce.ce_cal)(each[:,k], y_train_later) for k in range(each.shape[-1]))
#     feat_ce = np.array(feat_ce)
#     feature_set.append(np.argpartition(feat_ce, np.min( (Ns, each.shape[-1] - 1) ))[:np.min( (Ns, each.shape[-1]) )])

for each in output_train_avg:
    each = each.reshape(each.shape[0], -1)
    ce = Cross_Entropy(num_class=10, num_bin=10)
    feat_ce = np.zeros(each.shape[-1])
    for k in range(each.shape[-1]):
        feat_ce[k] = ce.compute(each[:, k].reshape(-1, 1), y_train_later)
        # print(" --> KMeans ce: %s"%str(feat_ce[k]))
    feature_set.append(
        np.argpartition(feat_ce, np.min(
            (Ns, each.shape[-1] - 1)))[:np.min((Ns, each.shape[-1]))])
    print("------- DONE -------\n")

# for each in output_train_max:
#     each = each.reshape(each.shape[0], -1)
#     ce = Cross_Entropy(num_class=10, num_bin=5)
#     cal_ce = Cal_ce(ce)
#     feat_ce = Parallel(n_jobs=mp.cpu_count(), backend='multiprocessing')(delayed(cal_ce.ce_cal)(each[:,k], y_train_later) for k in range(each.shape[-1]))
#     feat_ce = np.array(feat_ce)
#     feature_set.append(np.argpartition(feat_ce, np.min( (Ns, each.shape[-1] - 1) ))[:np.min( (Ns, each.shape[-1]) )])

f = open('feature_set_avg_max_single.pkl', 'wb')
コード例 #6
0
    # reshaping
    for i in range(0, 3):
        output[i] = output[i].reshape(
            train_num,
            output[i].shape[1] * output[i].shape[2] * output[i].shape[3])

    # ------------------ cross entropy ------------------ #
    ce = Cross_Entropy(num_class=10, num_bin=5)

    print('Ns_1 - %d Ns_2 - %d Ns_3 - %d' % (Ns_1, Ns_2, Ns_3))

    entropy = np.zeros(output[0].shape[1])
    rank_1 = []
    for j in range(0, output[0].shape[1]):
        entropy[j] = ce.compute(output[0][:, j].reshape(-1, 1),
                                y_train[0:train_num])
        rank_1 = np.argsort(-entropy)
    output[0] = output[0][:, rank_1[0:Ns_1]]

    # ------------------------------------ #

    entropy = np.zeros(output[1].shape[1])
    rank_2 = []
    for j in range(0, output[1].shape[1]):
        entropy[j] = ce.compute(output[1][:, j].reshape(-1, 1),
                                y_train[0:train_num])
        rank_2 = np.argsort(-entropy)
    output[1] = output[1][:, rank_2[0:Ns_2]]

    # ------------------------------------ #