コード例 #1
0
    def __init__(self, batch_size):
        BaseDataIter.__init__(self, batch_size)
        self.num_train_batch = 0
        self.num_test_batch = 0

        with open('./data/nuswide/img_train_id_feats.pkl', 'rb') as f:
            self.train_img_feats = cPickle.load(f) #载入训练图片特征
        with open('./data/nuswide/train_id_bow.pkl', 'rb') as f:
            self.train_txt_vecs = cPickle.load(f) #载入训练文本特征
        with open('./data/nuswide/train_id_label_map.pkl', 'rb') as f:
            self.train_labels = cPickle.load(f) #载入训练标签
        with open('./data/nuswide/img_test_id_feats.pkl', 'rb') as f:
            self.test_img_feats = cPickle.load(f) #载入测试图片特征
        with open('./data/nuswide/test_id_bow.pkl', 'rb') as f:
            self.test_txt_vecs = cPickle.load(f) #载入测试文本特征
        with open('./data/nuswide/test_id_label_map.pkl', 'rb') as f:
            self.test_labels = cPickle.load(f) #载入测试标签
        with open('data/nuswide/train_id_label_single.pkl', 'rb') as f:
            self.train_labels_single = cPickle.load(f) 
        with open('data/nuswide/test_id_label_single.pkl', 'rb') as f:
            self.test_labels_single = cPickle.load(f)              
                
        
        self.num_train_batch = len(self.train_img_feats) / self.batch_size #计算所需批次数
        self.num_test_batch = len(self.test_img_feats) / self.batch_size
コード例 #2
0
    def __init__(self, batch_size):
        BaseDataIter.__init__(self, batch_size)
        self.num_train_batch = 0
        self.num_test_batch = 0

        with open('./data/nuswide/img_train_id_feats.pkl', 'rb') as f:
            self.train_img_feats = cPickle.load(f)
        with open('./data/nuswide/train_id_bow.pkl', 'rb') as f:
            self.train_txt_vecs = cPickle.load(f)
        with open('./data/nuswide/train_id_label_map.pkl', 'rb') as f:
            self.train_labels = cPickle.load(f)
        with open('./data/nuswide/img_test_id_feats.pkl', 'rb') as f:
            self.test_img_feats = cPickle.load(f)
        with open('./data/nuswide/test_id_bow.pkl', 'rb') as f:
            self.test_txt_vecs = cPickle.load(f)
        with open('./data/nuswide/test_id_label_map.pkl', 'rb') as f:
            self.test_labels = cPickle.load(f)
        with open('data/nuswide/train_ids.pkl', 'rb') as f:
            self.train_ids = cPickle.load(f)
        with open('data/nuswide/test_ids.pkl', 'rb') as f:
            self.test_ids = cPickle.load(f)
        with open('data/nuswide/train_id_label_single.pkl', 'rb') as f:
            self.train_labels_single = cPickle.load(f)
        with open('data/nuswide/test_id_label_single.pkl', 'rb') as f:
            self.test_labels_single = cPickle.load(f)

        np.random.shuffle(self.train_ids)
        np.random.shuffle(self.test_ids)
        self.num_train_batch = len(self.train_ids) / self.batch_size
        self.num_test_batch = len(self.test_ids) / self.batch_size
コード例 #3
0
    def __init__(self, batch_size, data):
        BaseDataIter.__init__(self, batch_size)
        self.num_train_batch = 0
        self.num_test_batch = 0

        self.train_img_feats = data.x_train['img_train']
        self.train_txt_vecs = data.x_train['txt_train']
        self.train_labels = data.y_train['train_labels']
        self.test_img_feats = data.x_test['img_test']
        self.test_txt_vecs = data.x_test['txt_test']
        self.test_labels = data.y_test['test_labels']
        self.num_train_batch = len(self.train_img_feats) / self.batch_size
        self.num_test_batch = len(self.test_img_feats) / self.batch_size
コード例 #4
0
    def __init__(self, batch_size):
        BaseDataIter.__init__(self, batch_size)
        self.num_train_batch = 0
        self.num_test_batch = 0

        with open('./data/wikipedia_dataset/train_img_feats.pkl', 'rb') as f:
            self.train_img_feats = cPickle.load(f)
        with open('./data/wikipedia_dataset/train_txt_vecs.pkl', 'rb') as f:
            self.train_txt_vecs = cPickle.load(f)
        with open('./data/wikipedia_dataset/train_labels.pkl', 'rb') as f:
            self.train_labels = cPickle.load(f)
        with open('./data/wikipedia_dataset/test_img_feats.pkl', 'rb') as f:
            self.test_img_feats = cPickle.load(f)
        with open('./data/wikipedia_dataset/test_txt_vecs.pkl', 'rb') as f:
            self.test_txt_vecs = cPickle.load(f)
        with open('./data/wikipedia_dataset/test_labels.pkl', 'rb') as f:
            self.test_labels = cPickle.load(f)

        self.num_train_batch = len(self.train_img_feats) / self.batch_size
        self.num_test_batch = len(self.test_img_feats) / self.batch_size
コード例 #5
0
    def __init__(self, batch_size, data):
        BaseDataIter.__init__(self, batch_size)
        data = data
        self.num_train_batch = 0
        self.num_test_batch = 0
        self.train_img_feats = data.x_train['img_train']
        self.train_txt_vecs = data.x_train['txt_train']
        self.train_labels = data.y_train['train_labels']
        self.test_img_feats = data.x_test['img_test']
        self.test_txt_vecs = data.x_test['txt_test']
        self.test_labels = data.y_test['test_labels']
        self.train_ids = data.y_train['train_ids']
        self.test_ids = data.y_test['test_ids']
        self.train_labels_single = data.y_train['train_labels_single']
        self.test_labels_single = data.y_test['test_labels_single']

        np.random.shuffle(self.train_ids)
        # np.random.shuffle(self.test_ids)
        self.num_train_batch = len(self.train_ids) / self.batch_size
        self.num_test_batch = len(self.test_ids) / self.batch_size
コード例 #6
0
    def __init__(self, batch_size):
        BaseDataIter.__init__(self, batch_size)
        self.num_train_batch = 0
        self.num_test_batch = 0

        with open('./data/xmn/train_img_files.pkl', 'rb') as f:
            self.train_img_feats = pickle.load(f, encoding='iso-8859-1')
        with open('./data/xmn/train_txt_files.pkl', 'rb') as f:
            self.train_txt_vecs = pickle.load(f, encoding='iso-8859-1')
        with open('./data/xmn/train_labels.pkl', 'rb') as f:
            self.train_labels = pickle.load(f, encoding='iso-8859-1')
        with open('./data/xmn/train_attribute.pkl', 'rb') as f:
            self.train_attributes = pickle.load(f, encoding='iso-8859-1')

        with open('./data/xmn/test_img_files.pkl', 'rb') as f:
            self.test_img_feats = pickle.load(f, encoding='iso-8859-1')
        with open('./data/xmn/test_txt_files.pkl', 'rb') as f:
            self.test_txt_vecs = pickle.load(f, encoding='iso-8859-1')
        with open('./data/xmn/test_labels.pkl', 'rb') as f:
            self.test_labels = pickle.load(f, encoding='iso-8859-1')

        self.num_train_batch = len(self.train_img_feats) / self.batch_size
コード例 #7
0
 def __init__(self, batch_size):
     BaseDataIter.__init__(self, batch_size)
     self.num_train_batch = 0
     self.num_test_batch = 0