Exemple #1
0
    def execute(self, execution_component, input_dictionary, value,
                output_value_models, mode):
        all_lengths = input_dictionary["input"].get_lengths()
        val = input_dictionary["input"].get_value()

        if all_lengths[value.axis] is not None:
            axis_lengths = tf.cast(all_lengths[value.axis], tf.float32)

            for _ in range(value.axis + 1, len(all_lengths)):
                axis_lengths = tf.expand_dims(axis_lengths, -1)

            replacement = tf.zeros_like(val)

            sth = SoftTensorHelper()
            zeroed_out = sth.replace_elements_outside_lengths(
                val, all_lengths, replacement)

            summed_dim = tf.reduce_sum(zeroed_out, axis=value.axis)
            mean_dim = summed_dim / (axis_lengths + 1e-8)
        else:
            mean_dim = tf.reduce_mean(val, axis=value.axis)

        old_lengths = input_dictionary["input"].get_lengths()
        previous_dim_idxs = list(range(len(old_lengths)))
        del previous_dim_idxs[value.axis]

        new_lengths = [old_lengths[x] for x in previous_dim_idxs]

        output_value_models["output"].assign(mean_dim, length_list=new_lengths)

        return output_value_models
    def as_soft_tensor(self):
        if self.tensor is None:
            sth = SoftTensorHelper()
            self.tensor, self.length_list = sth.to_soft_tensor(
                self.full_list, self.infer_dims(), [False, True], "string")

        return self.tensor, self.length_list
    def execute(self, execution_component, input_dictionary, value,
                output_value_models, mode):
        logits = input_dictionary["logits"].get_value()
        labels = tf.cast(input_dictionary["labels"].get_value(), tf.float32)
        loss = tf.losses.hinge_loss(logits=logits,
                                    labels=labels,
                                    reduction=tf.losses.Reduction.NONE)

        lengths = input_dictionary["labels"].get_lengths()

        sth = SoftTensorHelper()
        replacement_tensor = tf.zeros_like(loss)
        loss = sth.replace_elements_outside_lengths(loss, lengths,
                                                    replacement_tensor)

        for i in range(1, len(lengths)):
            if lengths[i] is not None:
                sum = tf.reduce_sum(loss, axis=i)

                axis_lengths = tf.cast(lengths[i], tf.float32)
                for _ in range(i + 1, len(lengths)):
                    axis_lengths = tf.expand_dims(axis_lengths, -1)
                loss = sum / (tf.cast(axis_lengths, tf.float32) + 1e-8)
            else:
                loss = tf.reduce_mean(loss, axis=i)

        output_value_models["output"].assign(loss, length_list=[lengths[0]])

        return output_value_models
    def execute(self, execution_component, input_dictionary, value,
                output_models, mode):
        if value.language == "python":
            print("didnt bother defining ReLu for python")
            exit()
        elif value.language == "tensorflow":
            v = input_dictionary["input"].get_value()

            v = tf.nn.softplus(v)
            all_lengths = input_dictionary["input"].get_lengths()

            mask_shape = [1] * (len(all_lengths) - 1) + [value.vocab_size]
            mask = np.ones(mask_shape, dtype=np.float32)
            outer_slices = tuple(
                [slice(None, None, 1) for _ in range(len(all_lengths) - 1)])
            for mask_dim in value.mask_dimensions:
                mask_slice = outer_slices + (mask_dim, )
                mask[mask_slice] = 0

            v = tf.multiply(v, mask)

            replacement = tf.zeros_like(v)

            sth = SoftTensorHelper()
            replaced_v = sth.replace_elements_outside_lengths(
                v, all_lengths, replacement)

            output_models["output"].assign(replaced_v, length_list=all_lengths)

        return output_models
    def initial_assign(self, python_representation):
        sth = SoftTensorHelper()
        tensor, soft_length_tensors = sth.to_soft_tensor(
            python_representation, self.max_lengths, self.soft_by_dimension,
            self.get_data_type())

        self.assign(tensor, soft_length_tensors, chop_dimensions=True)
    def execute(self, execution_component, input_dictionary, value,
                output_models, mode):
        if value.language == "python":
            print("didnt implement index_converter for python")
            exit()
        elif value.language == "tensorflow":
            v = input_dictionary["input"].get_value()
            lengths = input_dictionary["input"].get_lengths()[:]

            # Retrieve boolean length mask
            sth = SoftTensorHelper()
            if value.keep_last_dimension:
                length_mask = sth.retrieve_boolean_length_mask(
                    tf.zeros(tf.shape(v)[:-1], dtype=tf.int32), lengths[:-1])
            else:
                length_mask = sth.retrieve_boolean_length_mask(v, lengths)

            # Use length mask to compute prefixes:
            prefixes = tf.cast(tf.where(length_mask), dtype=tf.int32)

            # Retrieve indexes and append to prefixes:
            final_indexes = tf.gather_nd(v, prefixes)
            if not value.keep_last_dimension:
                final_indexes = tf.expand_dims(final_indexes, -1)

            a_vecs = tf.unstack(prefixes, axis=-1)
            del a_vecs[-1]
            prefixes = tf.stack(a_vecs, -1)

            output = tf.concat([prefixes, final_indexes], axis=-1)

            output_models["output"].assign(output, length_list=[None, None])

        return output_models
Exemple #7
0
    def execute(self, execution_component, input_dictionary, value,
                output_value_models, mode):
        cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
            labels=tf.cast(input_dictionary["labels"].get_value(), tf.int32),
            logits=input_dictionary["logits"].get_value())

        lengths = input_dictionary["labels"].get_lengths()

        sth = SoftTensorHelper()
        replacement_tensor = tf.zeros_like(cross_entropy)
        cross_entropy = sth.replace_elements_outside_lengths(
            cross_entropy, lengths, replacement_tensor)

        for i in range(1, len(lengths)):
            if lengths[i] is not None:
                sum = tf.reduce_sum(cross_entropy, axis=i)

                axis_lengths = tf.cast(lengths[i], tf.float32)
                for _ in range(i + 1, len(lengths)):
                    axis_lengths = tf.expand_dims(axis_lengths, -1)

                cross_entropy = sum
                if i not in value.sum_dimensions:
                    cross_entropy = cross_entropy / (
                        tf.cast(axis_lengths, tf.float32) + 1e-8)
            else:
                if i in value.sum_dimensions:
                    cross_entropy = tf.reduce_sum(cross_entropy, axis=i)
                else:
                    cross_entropy = tf.reduce_mean(cross_entropy, axis=i)

        output_value_models["output"].assign(cross_entropy,
                                             length_list=[lengths[0]])

        return output_value_models
Exemple #8
0
    def execute(self, execution_component, input_dictionary, value,
                output_models, mode):
        sth = SoftTensorHelper()
        first_val = list(input_dictionary.values())[0].get_value()
        first_lengths = list(input_dictionary.values())[0].get_lengths()

        initialize_fn = lambda x: value.action[:]
        result = sth.transform(first_val,
                               first_lengths,
                               initialize_fn,
                               new_type=np.object,
                               transform_dim=-1)

        for k, v in input_dictionary.items():
            second_tensor = v.get_value()

            transform_fn = lambda x, y: x.replace("[" + k + "]", y)
            result = sth.transform_combine(result,
                                           second_tensor,
                                           first_lengths,
                                           transform_fn,
                                           new_type=np.object,
                                           transform_dim=-1)

        output_models["output"].assign(result, length_list=first_lengths)
        return output_models
Exemple #9
0
    def execute(self, execution_component, input_dictionary, value,
                output_models, mode):
        # Make sure logits outside lengths are 0:
        v = input_dictionary["word_logits"].get_value()
        l = input_dictionary["word_logits"].get_lengths()
        replacement = tf.zeros_like(v)
        sth = SoftTensorHelper()
        v = sth.replace_elements_outside_lengths(v, l, replacement)

        output_shape = tf.concat(
            [tf.shape(v)[:-1], [value.get_vocabulary_size()]], axis=0)

        indexes = input_dictionary["word_indexes"].get_value()
        batch_range = tf.range(tf.shape(indexes)[0], dtype=tf.int32)
        batch_range = tf.tile(tf.expand_dims(batch_range, -1),
                              [1, tf.shape(indexes)[1]])
        full_indexes = tf.stack([batch_range, indexes], axis=-1)

        output_tensor = tf.scatter_nd(full_indexes, v, output_shape)

        output_lengths = l[:]
        output_lengths[-1] = None

        output_models["output"].assign(output_tensor, output_lengths)

        return output_models
    def as_soft_tensor(self):
        if self.tensor is None:
            sth = SoftTensorHelper()
            self.tensor, self.length_list = sth.to_soft_tensor(
                self.full_list, self.infer_dims(), [False, True] +
                ([False] if not self.reads_single_column() else []), "string")

        return self.tensor, self.length_list
    def replace_elements_outside_lengths(self, outside_replacement):
        val = self.get_value()
        all_lengths = self.get_lengths()

        sth = SoftTensorHelper()
        replaced_val = sth.replace_elements_outside_lengths(
            val, all_lengths, outside_replacement)

        return replaced_val
Exemple #12
0
    def apply_index(self, input_value, input_lengths, index):
        apply_fn = lambda x: self.apply_index_to_single_cell(x, index)

        sth = SoftTensorHelper()
        result = sth.transform(
            input_value,
            input_lengths,
            apply_fn,
            new_type=np.int32,
            transform_dim=-2 if self.input_column is not None else -1)

        return result
Exemple #13
0
    def execute(self, execution_component, input_dictionary, value,
                output_value_models, mode):
        v = input_dictionary["input"].get_value()
        all_lengths = input_dictionary["input"].get_lengths()

        v = tf.nn.softmax(v)

        replacement = tf.zeros_like(v)
        sth = SoftTensorHelper()
        replaced_v = sth.replace_elements_outside_lengths(
            v, all_lengths, replacement)

        output_value_models["output"].assign(replaced_v,
                                             length_list=all_lengths)

        return output_value_models
Exemple #14
0
    def execute(self, execution_component, input_dictionary, value, output_models, mode):
        if value.language == "python":
            print("didnt bother defining ReLu for python")
            exit()
        elif value.language == "tensorflow":
            v = input_dictionary["input"].get_value()

            v = tf.nn.relu(v)
            all_lengths = input_dictionary["input"].get_lengths()
            replacement = tf.zeros_like(v)

            sth = SoftTensorHelper()
            replaced_v = sth.replace_elements_outside_lengths(v, all_lengths, replacement)

            output_models["output"].assign(replaced_v, length_list=all_lengths)

        return output_models
Exemple #15
0
    def execute(self, execution_component, input_dictionary, value,
                output_models, mode):
        input_list = input_dictionary["tensor"]
        input_values = input_list.get_value()
        lengths = input_list.get_lengths()

        pos_sample = self.recursive_sample(input_values, lengths, (),
                                           value.pos_sample_rate)

        sth = SoftTensorHelper()
        replacement_tensor = np.ones_like(input_values)
        replaced = sth.python_replace_elements_outside_lengths(
            input_values, lengths, replacement_tensor)
        neg_sample = self.recursive_sample(np.logical_not(replaced), lengths,
                                           (), value.neg_sample_rate)

        combined_samples = self.recursive_combine(pos_sample, neg_sample,
                                                  lengths, (),
                                                  len(input_values.shape) - 1)
        output_models["output"].initial_assign(combined_samples)

        return output_models
Exemple #16
0
    def execute(self, execution_component, input_dictionary, value,
                output_models, mode):
        #TODO: Proper handling of various cases for this:
        if value.switch_input_type == "logits":
            switch_logits = input_dictionary["switch_input"].get_value()

            #TODO: Hardcoded expand dims
            switch_values = tf.expand_dims(tf.nn.sigmoid(switch_logits), -1)

            all_lengths = input_dictionary["switch_input"].get_lengths()
            replacement = tf.zeros_like(switch_values)
            sth = SoftTensorHelper()
            switch_values = sth.replace_elements_outside_lengths(
                switch_values, all_lengths, replacement)

        left = input_dictionary["left"].get_value()
        right = input_dictionary["right"].get_value()
        lengths = input_dictionary["left"].get_lengths()

        output = switch_values * left + (1 - switch_values) * right
        output_models["output"].assign(output, length_list=lengths)

        return output_models
Exemple #17
0
    def execute(self, execution_component, input_dictionary, value, output_value_models, mode):
        logits = input_dictionary["logits"].get_value()
        labels = tf.cast(input_dictionary["labels"].get_value(), tf.float32)

        # First, ensure labels and logits are zero outside label lengths:
        lengths = input_dictionary["labels"].get_lengths()
        sth = SoftTensorHelper()
        replacement_tensor = tf.zeros_like(labels)
        logits = sth.replace_elements_outside_lengths(logits, lengths, replacement_tensor)
        labels = sth.replace_elements_outside_lengths(labels, lengths, replacement_tensor)

        if value.aggregation == "flatten":
            logits = tf.reshape(logits, tf.stack([tf.shape(logits)[0], -1]))
            labels = tf.reshape(labels, tf.stack([tf.shape(labels)[0], -1]))

            VERY_NEGATIVE_NUMBER = -1e20
            log_scores_for_positive_labels = tf.reduce_logsumexp(logits + VERY_NEGATIVE_NUMBER * (1 - labels), axis=-1)
            log_norm = tf.reduce_logsumexp(logits, axis=-1)

            # output is negative log probability:
            scores = - ( log_scores_for_positive_labels - log_norm )

        output_value_models["output"].assign(scores, length_list=[lengths[0]])
        return output_value_models
    def cast(self, new_type):
        sth = SoftTensorHelper()
        if new_type == "float":
            apply_fn = lambda x: float(x)
            result = sth.transform(self.tensor,
                                   self.soft_length_tensors,
                                   apply_fn,
                                   new_type=np.float32,
                                   transform_dim=-1)

        elif new_type == "int":
            apply_fn = lambda x: int(x)
            result = sth.transform(self.tensor,
                                   self.soft_length_tensors,
                                   apply_fn,
                                   new_type=np.int32,
                                   transform_dim=-1)

        elif new_type == "string":
            apply_fn = lambda x: str(x)
            result = sth.transform(self.tensor,
                                   self.soft_length_tensors,
                                   apply_fn,
                                   new_type=np.object,
                                   transform_dim=-1)
        elif new_type == "bool":
            apply_fn = lambda x: x == "True"
            result = sth.transform(self.tensor,
                                   self.soft_length_tensors,
                                   apply_fn,
                                   new_type=np.bool,
                                   transform_dim=-1)

        new_value_model = SoftTensorValueModel(self.dimensions,
                                               new_type,
                                               self.max_lengths,
                                               self.soft_by_dimension,
                                               language=self.language)
        new_value_model.assign(result, length_list=self.soft_length_tensors)

        return new_value_model
    def process(self,
                left_value_model,
                right_value_model,
                op,
                output_value_model,
                language="python"):
        left_lengths = left_value_model.get_lengths()
        right_lengths = right_value_model.get_lengths()

        new_lengths = [None] * max(len(left_lengths), len(right_lengths))
        length_origins = [None for _ in new_lengths]

        for i in range(len(left_lengths)):
            if new_lengths[i] is None and left_lengths[i] is not None:
                new_lengths[i] = left_lengths[i]
                length_origins[i] = "left"

        for i in range(len(right_lengths)):
            if new_lengths[i] is None and right_lengths[i] is not None:
                new_lengths[i] = right_lengths[i]
                length_origins[i] = "right"

        left_value = left_value_model.get_value()
        right_value = right_value_model.get_value()

        if language == "tensorflow":
            left_dims = len(left_value.shape)
            right_dims = len(right_value.shape)

            all_left_dims = left_value_model.get_dimensions()[:]
            all_right_dims = right_value_model.get_dimensions()[:]

            for dim in range(left_dims, right_dims):
                left = tf.expand_dims(left, -1)
                all_left_dims.append(1)

            for dim in range(right_dims, left_dims):
                right = tf.expand_dims(right, -1)
                all_right_dims.append(1)

            for i in range(max(left_dims, right_dims)):
                if all_left_dims[
                        i] == 1 and all_right_dims[i] != all_left_dims[i]:
                    dims_to_add = tf.shape(right_value)[i]
                    expand_origin = "right"
                elif all_right_dims[
                        i] == 1 and all_right_dims[i] != all_left_dims[i]:
                    dims_to_add = tf.shape(left_value)[i]
                    expand_origin = "left"
                else:
                    continue

                for l in range(i, max(left_dims, right_dims)):
                    if new_lengths[l] is not None and length_origins[
                            l] != expand_origin:
                        length_expansion = [1] * l
                        length_expansion[i] *= dims_to_add
                        new_lengths[l] = tf.tile(new_lengths[l],
                                                 length_expansion)

        new_value = op(left_value, right_value)

        if language == "tensorflow":
            replacement = tf.zeros_like(new_value)
            sth = SoftTensorHelper()
            new_value = sth.replace_elements_outside_lengths(
                new_value, new_lengths, replacement)

        output_value_model.assign(new_value, length_list=new_lengths)

        return output_value_model
    def format_for_program_output(self):
        sth = SoftTensorHelper()

        return sth.format_to_python_list(self.tensor, self.soft_length_tensors)
    def as_soft_tensor(self):
        if self.tensor is None or self.use_read_batches():
            sth = SoftTensorHelper()
            self.tensor, self.length_list = sth.to_soft_tensor(self.full_list, self.infer_dims(), self.get_soft_by_dimensions(), "string")

        return self.tensor, self.length_list