Пример #1
0
    def prob_to_word(self, prob):
        """
        Convert 1 hot probabilities to sentences.

        Args:
            prob (Tensor): Word probabilities of each sentence of batch.
                           Of size (vocab_size, batch_size * (max_sentence_length+1))

        Returns:
            list containing sentences
        """

        sents = []

        if not isinstance(prob, np.ndarray):
            prob = prob.get()
        words = [
            self.index_to_vocab[x] for x in np.argmax(prob, axis=0).tolist()
        ]

        for sent_index in xrange(self.be.bsz):
            sent = []
            for i in xrange(self.max_sentence_length):
                word = words[self.be.bsz * i + sent_index]
                sent.append(word)
                if (i > 0 and word == self.end_token) or i >= 20:
                    break
            sents.append(" ".join(sent))

        return sents
Пример #2
0
    def prob_to_word(self, prob):
        """
        Convert 1 hot probabilities to sentences.

        Args:
            prob (Tensor): Word probabilities of each sentence of batch.
                           Of size (vocab_size, batch_size * (max_sentence_length+1))

        Returns:
            list containing sentences
        """

        sents = []

        if not isinstance(prob, np.ndarray):
            prob = prob.get()
        words = [self.index_to_vocab[x] for x in np.argmax(prob, axis=0).tolist()]

        for sent_index in xrange(self.be.bsz):
            sent = []
            for i in xrange(self.max_sentence_length):
                word = words[self.be.bsz * i + sent_index]
                sent.append(word)
                if (i > 0 and word == self.end_token) or i >= 20:
                    break
            sents.append(" ".join(sent))

        return sents
Пример #3
0
    def __iter__(self):
        """
        Generator that can be used to iterate over this dataset.

        Yields:
            tuple, tuple: first tuple contains image features and empty input Tensor
                          second tuple contains list of reference sentences and
                          placeholder for mask.
        """
        for batch_idx in xrange(self.nbatches):

            start = batch_idx * self.be.bsz
            end = (batch_idx + 1) * self.be.bsz

            image_batch = self.images[start:end, :].T.astype(np.float32, order='C')
            self.dev_image.set(image_batch)

            yield (self.dev_image, self.dev_X), (self.ref_sents[start:end], None)
Пример #4
0
    def __iter__(self):
        """
        Generator that can be used to iterate over this dataset.

        Yields:
            tuples, tuples, first tuple contains image features and one hot input sentence
                            second tuple contains one hot target sentence and mask
                            corresponding to 1's up to where each sentence ends and
                            zeros elsewhere after.
        """

        shuf_idx = self.be.rng.permutation(len(self.X))
        self.X, self.y, self.images = (self.X[shuf_idx], self.y[shuf_idx],
                                       self.images[shuf_idx])
        self.sent_length = self.sent_length[shuf_idx]

        for batch_idx in xrange(self.nbatches):

            start = batch_idx * self.be.bsz
            end = (batch_idx + 1) * self.be.bsz

            # image_batch = self.images[start:end].T.astype(np.float32, order='C')
            self.dev_imageT.set(self.images[start:end])
            self.dev_image[:] = self.dev_imageT.T

            # X_batch = self.X[start:end].T.astype(np.float32, order='C')
            self.dev_lblT.set(self.X[start:end])
            self.dev_lbl[:] = self.dev_lblT.T
            self.dev_X[:] = self.be.onehot(self.dev_lblflat, axis=0)

            self.y_mask[:] = 1
            sent_lens = self.sent_length[start:end]
            self.y_mask[:, self.sent_ends > sent_lens[np.newaxis, :]] = 0
            self.dev_y_mask[:] = self.y_mask_reshape

            # y_batch = self.y[start:end].T.astype(np.float32, order='C')
            self.dev_y_lblT.set(self.y[start:end])
            self.dev_y_lbl[:] = self.dev_y_lblT.T
            self.dev_y[:] = self.be.onehot(self.dev_y_lblflat, axis=0)
            self.dev_y[:] = self.dev_y * self.dev_y_mask

            yield (self.dev_image, self.dev_X), (self.dev_y, self.dev_y_mask)
Пример #5
0
    def __iter__(self):
        """
        Generator that can be used to iterate over this dataset.

        Yields:
            tuple, tuple: first tuple contains image features and empty input Tensor
                          second tuple contains list of reference sentences and
                          placeholder for mask.
        """
        for batch_idx in xrange(self.nbatches):

            start = batch_idx * self.be.bsz
            end = (batch_idx + 1) * self.be.bsz

            image_batch = self.images[start:end, :].T.astype(np.float32,
                                                             order='C')
            self.dev_image.set(image_batch)

            yield (self.dev_image, self.dev_X), (self.ref_sents[start:end],
                                                 None)
Пример #6
0
    def __iter__(self):
        """
        Generator that can be used to iterate over this dataset.

        Yields:
            tuples, tuples, first tuple contains image features and one hot input sentence
                            second tuple contains one hot target sentence and mask
                            corresponding to 1's up to where each sentence ends and
                            zeros elsewhere after.
        """

        shuf_idx = self.be.rng.permutation(len(self.X))
        self.X, self.y, self.images = (self.X[shuf_idx], self.y[shuf_idx], self.images[shuf_idx])
        self.sent_length = self.sent_length[shuf_idx]

        for batch_idx in xrange(self.nbatches):

            start = batch_idx * self.be.bsz
            end = (batch_idx + 1) * self.be.bsz

            # image_batch = self.images[start:end].T.astype(np.float32, order='C')
            self.dev_imageT.set(self.images[start:end])
            self.dev_image[:] = self.dev_imageT.T

            # X_batch = self.X[start:end].T.astype(np.float32, order='C')
            self.dev_lblT.set(self.X[start:end])
            self.dev_lbl[:] = self.dev_lblT.T
            self.dev_X[:] = self.be.onehot(self.dev_lblflat, axis=0)

            self.y_mask[:] = 1
            sent_lens = self.sent_length[start:end]
            self.y_mask[:, self.sent_ends > sent_lens[np.newaxis, :]] = 0
            self.dev_y_mask[:] = self.y_mask_reshape

            # y_batch = self.y[start:end].T.astype(np.float32, order='C')
            self.dev_y_lblT.set(self.y[start:end])
            self.dev_y_lbl[:] = self.dev_y_lblT.T
            self.dev_y[:] = self.be.onehot(self.dev_y_lblflat, axis=0)
            self.dev_y[:] = self.dev_y * self.dev_y_mask

            yield (self.dev_image, self.dev_X), (self.dev_y, self.dev_y_mask)
Пример #7
0
# set up the detection params
num_images = valid_set.num_images if n_mb is None else n_mb
num_classes = valid_set.num_classes
image_index = valid_set.image_index
# heuristic: keep an average of 40 detections per class per images prior
# to NMS
max_per_set = 40 * num_images
# heuristic: keep at most 100 detection per class per image prior to NMS
max_per_image = 100
# detection thresold for each class (this is adaptively set based on the
# max_per_set constraint)
thresh = -np.inf * np.ones(num_classes)
# top_scores will hold one minheap of scores per class (used to enforce
# the max_per_set constraint)
top_scores = [[] for _ in xrange(num_classes)]
# all detections are collected into:
#    all_boxes[cls][image] = N x 5 array of detections in
#    (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in xrange(num_images)] for _ in xrange(num_classes)]

NMS_THRESH = 0.3

neon_logger.display('total batches {}'.format(valid_set.nbatches))

last_strlen = 0
# iterate through minibatches of the dataset
for mb_idx, (x, db) in enumerate(valid_set):

    # print testing progress
    prt_str = "Finished: {} / {}".format(mb_idx, valid_set.nbatches)
Пример #8
0
# set up the detection params
num_images = valid_set.num_images if n_mb is None else n_mb
num_classes = valid_set.num_classes
image_index = valid_set.image_index
# heuristic: keep an average of 40 detections per class per images prior
# to NMS
max_per_set = 40 * num_images
# heuristic: keep at most 100 detection per class per image prior to NMS
max_per_image = 100
# detection thresold for each class (this is adaptively set based on the
# max_per_set constraint)
thresh = -np.inf * np.ones(num_classes)
# top_scores will hold one minheap of scores per class (used to enforce
# the max_per_set constraint)
top_scores = [[] for _ in xrange(num_classes)]
# all detections are collected into:
#    all_boxes[cls][image] = N x 5 array of detections in
#    (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in xrange(num_images)]
             for _ in xrange(num_classes)]

NMS_THRESH = 0.3

neon_logger.display('total batches {}'.format(valid_set.nbatches))

last_strlen = 0
# iterate through minibatches of the dataset
for mb_idx, (x, db) in enumerate(valid_set):

    # print testing progress