Exemplo n.º 1
0
def split_func(line):
    a2 = tf.compat.as_str(line)
    tf.string_strip()
    print(a2)
    a1 = tf.decode_base64(line)
    a = line.numpy()
    b = a.decode()
    c = tf.string_split([line], '').values
    return c
Exemplo n.º 2
0
def decode_csv(line):
    items = tf.string_split(tf.string_strip([line]), delimiter=",").values
    print(type(items))
    features = [
        tf.string_to_number(items[i], tf.float32) for i in [2, 3, 5, 6, 7]
    ]
    labels = [tf.string_to_number(items[i], tf.float32) for i in [1, 4]]
    return features, labels
Exemplo n.º 3
0
 def to_instance(line_tensor):
     split_line_tensor = tf.string_split([tf.string_strip(line_tensor)], "\t", False).values
     instance = []
     to_instance_input_feature_map = dataset_config.feature_config[config.INPUT_FEATURE_SPACE]
     for to_instance_feature in to_instance_input_feature_map:
         to_instance_feature_attribute_map = to_instance_input_feature_map[to_instance_feature]
         to_instance_feature_index = to_instance_feature_attribute_map[config.INPUT_FEATURE_INDEX]
         to_instance_feature_form = to_instance_feature_attribute_map[config.INPUT_FEATURE_FORM]
         to_instance_feature_type = to_instance_feature_attribute_map[config.INPUT_FEATURE_TYPE]
         instance.append(tf.string_to_number(split_line_tensor[to_instance_feature_index],
                                             tf.int32 if to_instance_feature_type == "discrete" else tf.float32
                                             )
                         if to_instance_feature_form == "single" or to_instance_feature_form == "label" or to_instance_feature_form == 'cross'
                         else tf.string_to_number(
             tf.string_split([tf.string_strip(split_line_tensor[to_instance_feature_index])], ",").values, tf.int32)
                         )
     return instance
Exemplo n.º 4
0
def sparse_string_join(input_sp):
    """Concats each row of SparseTensor `input_sp` and outputs them as a 1-D string tensor."""
    # convert the `SparseTensor` to a dense `Tensor`
    dense_input = tf.sparse_to_dense(input_sp.indices,
                                     input_sp.dense_shape,
                                     input_sp.values,
                                     default_value='')
    # remove extra spaces.
    return tf.string_strip(dense_input)
Exemplo n.º 5
0
    def _mask_groundtruth(self, groundtruth_strings, question_strings):
        """Gets groundtruth mask from groundtruth_strings and question_strings.

    Args:
      groundtruth_strings: A [batch_groundtruth, max_groundtruth_text_len] string tensor.
      question_strings: A [batch_question, max_question_text_len] string tensor.

    Returns:
      groundtruth_mask: A [batch_question] boolean tensor, in which `True` 
        denotes the option is correct.
    """
        with tf.name_scope('mask_groundtruth_op'):
            groundtruth_strings = tf.string_strip(
                tf.reduce_join(groundtruth_strings, axis=-1, separator=' '))
            question_strings = tf.string_strip(
                tf.reduce_join(question_strings, axis=-1, separator=' '))
            equal_mat = tf.equal(tf.expand_dims(question_strings, axis=1),
                                 tf.expand_dims(groundtruth_strings, axis=0))
            return tf.reduce_any(equal_mat, axis=-1)
Exemplo n.º 6
0
    def parse_function_train(self, line):
        split_line = tf.string_split([line]).values
        image_path = tf.string_join([self.data_path, split_line[0]])
        depth_gt_path = tf.string_join(
            [self.gt_path, tf.string_strip(split_line[1])])

        if self.params.dataset == 'nyu':
            image = tf.image.decode_jpeg(tf.read_file(image_path))
        else:
            image = tf.image.decode_png(tf.read_file(image_path))

        depth_gt = tf.image.decode_png(tf.read_file(depth_gt_path),
                                       channels=0,
                                       dtype=tf.uint16)

        if self.params.dataset == 'nyu':
            depth_gt = tf.cast(depth_gt, tf.float32) / 1000.0
        else:
            depth_gt = tf.cast(depth_gt, tf.float32) / 256.0

        image = tf.image.convert_image_dtype(image, tf.float32)
        focal = tf.string_to_number(split_line[2])

        # To avoid blank boundaries due to pixel registration
        if self.params.dataset == 'nyu':
            depth_gt = depth_gt[45:472, 43:608, :]
            image = image[45:472, 43:608, :]

        if self.do_kb_crop is True:
            print('Cropping training images as kitti benchmark images')
            height = tf.shape(image)[0]
            width = tf.shape(image)[1]
            top_margin = tf.to_int32(height - 352)
            left_margin = tf.to_int32((width - 1216) / 2)
            depth_gt = depth_gt[top_margin:top_margin + 352,
                                left_margin:left_margin + 1216, :]
            image = image[top_margin:top_margin + 352,
                          left_margin:left_margin + 1216, :]

        if self.do_rotate is True:
            random_angle = tf.random_uniform([], -self.degree * 3.141592 / 180,
                                             self.degree * 3.141592 / 180)
            image = tf.contrib.image.rotate(image,
                                            random_angle,
                                            interpolation='BILINEAR')
            depth_gt = tf.contrib.image.rotate(depth_gt,
                                               random_angle,
                                               interpolation='NEAREST')

        print('Do random cropping from fixed size input')
        image, depth_gt = self.random_crop_fixed_size(image, depth_gt)

        return image, depth_gt, focal
Exemplo n.º 7
0
 def adress_data(self, x):
     aa = tf.string_strip(x)
     aa = tf.string_split([aa], 'aaa')
     label, q, d, q_v, d_v = aa.values[0], aa.values[1], aa.values[
         2], aa.values[3], aa.values[4]
     q = tf.string_split([q], ',').values
     d = tf.string_split([d], ',').values
     q_v = tf.string_split([q_v], ',').values
     d_v = tf.string_split([d_v], ',').values
     q = tf.string_to_number(q, out_type=tf.int32)
     d = tf.string_to_number(d, out_type=tf.int32)
     q_v = tf.string_to_number(q_v, out_type=tf.float32)
     d_v = tf.string_to_number(d_v, out_type=tf.float32)
     label = tf.string_to_number(label, out_type=tf.float32)
     return q, d, q_v, d_v, label
Exemplo n.º 8
0
 def sparse_string_join(self, sparse_tensor_input, name):
     """
     Join SparseTensor to 1-D String dense Tensor
     :param sparse_tensor_input:
     :param name:
     :return:
     """
     dense_tensor_input = tf.sparse_to_dense(
         sparse_indices=sparse_tensor_input.indices,
         output_shape=sparse_tensor_input.dense_shape,
         sparse_values=sparse_tensor_input.values,
         default_value='')
     dense_tensor_input_join = tf.reduce_join(dense_tensor_input,
                                              axis=1,
                                              separator=' ')
     format_predict_labels = tf.string_strip(dense_tensor_input_join,
                                             name=name)
     return format_predict_labels
Exemplo n.º 9
0
def clean_english_str_tf(input_str):
    """Clean English string with tensorflow oprations."""
    # pylint: disable=anomalous-backslash-in-string
    string = tf.regex_replace(input_str, r"[^A-Za-z0-9(),!?\'\`<>/]", " ")
    string = tf.regex_replace(string, "\'s", " \'s")
    string = tf.regex_replace(string, "\'ve", " \'ve")
    string = tf.regex_replace(string, "n\'t", " n\'t")
    string = tf.regex_replace(string, "\'re", " \'re")
    string = tf.regex_replace(string, "\'d", " \'d")
    string = tf.regex_replace(string, "\'ll", " \'ll")
    string = tf.regex_replace(string, ",", " , ")
    string = tf.regex_replace(string, "!", " ! ")
    string = tf.regex_replace(string, "\(", " ( ")
    string = tf.regex_replace(string, "\)", " ) ")
    string = tf.regex_replace(string, "\?", " ? ")
    string = tf.regex_replace(string, "\s{2,}", " ")
    string = tf.string_strip(string)
    string = py_x_ops.str_lower(string)
    return string
Exemplo n.º 10
0
    def parse_example(self, line, prepend, append):
        """
        Input:
            line: line of text string
            prepend: whether to add sequence start
            append: wheteher to add sequence end
        Return:
            feature: {tokens:, seq_len:}
        """
        features = {}
        tokens = tf.string_split([tf.string_strip(line)]).values

        if prepend:
            tokens = tf.concat([[self.special_token.SEQ_START], tokens], 0)
        if append:
            tokens = tf.concat([tokens, [self.special_token.SEQ_END]], 0)

        features['tokens'] = tokens
        features['seq_len'] = tf.size(tokens)
        return features
Exemplo n.º 11
0
 def _NormalizeWhitespace(s):
     return tf.regex_replace(tf.string_strip(s), r'\s+', ' ')