Exemplo n.º 1
0
    def loadImage(self, image_path):
        # load and preprocess image, save to npy
        image = self.preprocessImage(image_path)
        # todo: emmit lua sub process

        # wait lua to generate corresponding data file, and read

        data_file = self.dataPipePath + 'data_demo_*'

        while True:
            filename = glob.glob(data_file)
            if len(filename) >= 1:
                if not os.path.isfile(self.dataPipePath + 'writing_block_' +
                                      filename[0].split('_')[-1]):
                    # lua writing file finished.
                    break
        assert (len(filename) == 1)
        filename = filename[0]

        # json load data_$pipIndex_*
        with open(filename, 'rb') as f:
            reader = torchfile.T7Reader(f, utf8_decode_strings=True)
            data = reader.read_obj()

        box_scores = np.squeeze(data['box_scores'])
        box_coords = data['boxes_pred']
        box_feats = data['box_feats']  #np.expand_dims(,axis=0)
        global_feat = data['glob_feat']
        return image, box_scores, box_coords, box_feats, global_feat
Exemplo n.º 2
0
def from_t7(t, b64=False):
    if b64:
        t = base64.b64decode(t)

    with open('/dev/shm/t7', 'wb') as ff:
        ff.write(t)
        ff.close()

    sf = open('/dev/shm/t7', 'rb')

    return torchfile.T7Reader(sf).read_obj()
Exemplo n.º 3
0
    def loadOneJson(self, mode, pInd):
        '''
		Every time loadJson load data for ONE image. The loaded data variable contains fields below:
		# data['info'] = info[1]
		# data['box_scores'] = 256x1
		# data['boxes_pred'] = [~128]x4
		# data['boxes_gt'] = [~128]x4
		# data['box_captions_gt'] = [~128]x15
		# data['box_feats'] = [~128]x512x7x7
		# data['glob_feat'] = 512x30x45
		# data['glob_caption_gt'] = 100
		'''

        # list data_$pipIndex_*
        # pInd = self.pipeIndex
        data_file = self.dataPipePath + 'data_' + str(pInd) + '_*'
        pick_confirm_file = self.dataPipePath + 'pick_confirm_' + str(pInd)
        writing_block_file = self.dataPipePath + 'writing_block_' + str(pInd)
        reading_block_file = self.dataPipePath + 'reading_block_' + str(pInd)

        while True:
            filename = glob.glob(data_file)
            if len(filename) >= 1:
                #if (not os.path.isfile(pick_confirm_file)) or mode=='test':
                # pick_confirm file has been removed by lua loader. so this file is new.
                if (not os.path.isfile(writing_block_file)) and (
                        not os.path.isfile(reading_block_file)):
                    # lua writing file finished.
                    if mode == 'train':
                        os.mknod(reading_block_file)
                    break
        assert (len(filename) == 1)
        filename = filename[0]
        # read iter and numiters
        tmp = filename.split('/')[-1].split('_')
        numiters = tmp[-1]
        itr = tmp[-2]

        # json load data_$pipIndex_*
        with open(filename, 'rb') as f:
            reader = torchfile.T7Reader(f, utf8_decode_strings=True)
            data = reader.read_obj()

        if mode == 'train':
            #os.remove(filename)
            os.remove(reading_block_file)
            # place pick_confirm_$pipIndex to notify lua program
            #os.mknod(pick_confirm_file)

        # update pInd
        # self.updatePipeIndex()
        # return data, iter, numiters
        return data, itr, numiters