Пример #1
0
    def make_input(self, batch_dict, train=False):
        """Transform a `batch_dict` into a TensorFlow `feed_dict`

        :param batch_dict: (``dict``) A dictionary containing all inputs to the embeddings for this model
        :param train: (``bool``) Are we training.  Defaults to False
        :return:
        """
        y = batch_dict.get('y', None)
        if not tf.executing_eagerly():
            batch_for_model = new_placeholder_dict(train)

            for k in self.embeddings.keys():
                batch_for_model["{}:0".format(k)] = batch_dict[k]

            # Allow us to track a length, which is needed for BLSTMs
            if self.lengths_key is not None:
                batch_for_model[self.lengths] = batch_dict[self.lengths_key]

            if y is not None:
                batch_for_model[self.y] = fill_y(len(self.labels), y)

        else:
            SET_TRAIN_FLAG(train)
            batch_for_model = {}
            for k in self.embeddings.keys():
                batch_for_model[k] = batch_dict[k]

            # Allow us to track a length, which is needed for BLSTMs
            if self.lengths_key is not None:
                batch_for_model["lengths"] = batch_dict[self.lengths_key]

        return batch_for_model
Пример #2
0
    def ex2dict(self, x, y, do_dropout=False):

        pkeep = 1.0 - self.pdrop_value if do_dropout else 1
        return {
            self.x: x,
            self.y: fill_y(len(self.labels), y),
            self.pkeep: pkeep
        }
Пример #3
0
 def make_input(self, batch_dict, do_dropout=False):
     x = batch_dict['x']
     y = batch_dict['y']
     lengths = batch_dict['lengths']
     pkeep = 1.0 - self.pdrop_value if do_dropout else 1
     return {
         self.x: x,
         self.lengths: lengths,
         self.y: fill_y(len(self.labels), y),
         self.pkeep: pkeep
     }
Пример #4
0
 def ex2dict(self, x, y, do_dropout=False):
     """Convert from an input of x and y tensors to a `feed_dict`
     
     :param x: Input tensor `x` (`BxT`)
     :param y: Input tensor `y` (`B`)
     :param do_dropout: Defaults to off.  If its on, use the dropout value provided during model construction
     :return: A `feed_dict`
     """
     pkeep = 1.0 - self.pdrop_value if do_dropout else 1
     return {
         self.x: x,
         self.y: fill_y(len(self.labels), y),
         self.pkeep: pkeep
     }
Пример #5
0
    def make_input(self, batch_dict, do_dropout=False):
        x = batch_dict['x']
        y = batch_dict.get('y', None)
        xch = batch_dict.get('xch')
        lengths = batch_dict.get('lengths')
        pkeep = 1.0 - self.pdrop_value if do_dropout else 1.0
        feed_dict = {self.x: x, self.pkeep: pkeep}

        if hasattr(self, 'lengths') and self.lengths is not None:
            feed_dict[self.lengths] = lengths
        if hasattr(self, 'xch') and xch is not None and self.xch is not None:
            feed_dict[self.xch] = xch

        if y is not None:
            feed_dict[self.y] = fill_y(len(self.labels), y)
        return feed_dict
Пример #6
0
    def make_input(self, batch_dict, train=False):
        if train is False:
            return self.inference.make_input(batch_dict)

        y = batch_dict.get('y', None)
        feed_dict = new_placeholder_dict(True)

        for key in self.parallel_params.keys():
            feed_dict["{}_parallel:0".format(key)] = batch_dict[key]

        # Allow us to track a length, which is needed for BLSTMs
        if self.lengths_key is not None:
            feed_dict[self.lengths] = batch_dict[self.lengths_key]

        if y is not None:
            feed_dict[self.y] = fill_y(len(self.labels), y)
        return feed_dict
Пример #7
0
    def make_input(self, batch_dict, train=False):
        if train is False:
            return self.inference.make_input(batch_dict)

        y = batch_dict.get('y', None)
        feed_dict = new_placeholder_dict(True)

        for key in self.parallel_params.keys():
            feed_dict["{}_parallel:0".format(key)] = batch_dict[key]

        # Allow us to track a length, which is needed for BLSTMs
        if self.lengths_key is not None:
            feed_dict[self.lengths] = batch_dict[self.lengths_key]

        if y is not None:
            feed_dict[self.y] = fill_y(len(self.labels), y)
        return feed_dict
Пример #8
0
    def _test(self, ts):
        total_loss = 0
        steps = len(ts)
        cm = ConfusionMatrix(self.model.labels)

        pg = create_progress_bar(steps)
        for batch_dict in ts:
            y = fill_y(len(self.model.labels), batch_dict['y'])
            feed_dict = self.model.make_input(batch_dict, do_dropout=True)
            preds, lossv, = self.model.sess.run(
                [self.model.best, self.model.loss], feed_dict=feed_dict)
            # print(preds)
            cm.add_batch(y, preds)
            total_loss += lossv
            pg.update()
        pg.done()
        metrics = cm.get_all_metrics()
        metrics['avg_loss'] = float(total_loss) / steps
        return metrics
Пример #9
0
    def make_input(self, batch_dict, train=False):
        """Transform a `batch_dict` into a TensorFlow `feed_dict`

        :param batch_dict: (``dict``) A dictionary containing all inputs to the embeddings for this model
        :param train: (``bool``) Are we training.  Defaults to False
        :return:
        """
        y = batch_dict.get('y', None)
        feed_dict = new_placeholder_dict(train)

        for k in self.embeddings.keys():
            feed_dict["{}:0".format(k)] = batch_dict[k]

        # Allow us to track a length, which is needed for BLSTMs
        if self.lengths_key is not None:
            feed_dict[self.lengths] = batch_dict[self.lengths_key]

        if y is not None:
            feed_dict[self.y] = fill_y(len(self.labels), y)

        return feed_dict