def variable_summaries(var, scope=""): """Attach a lot of summaries to a Tensor (for TensorBoard visualization).""" with tf.name_scope(scope): with tf.name_scope("summaries"): mean = tf.reduce_mean(var) tf.summary.scalar("mean", mean) with tf.name_scope("stddev"): stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar("stddev", stddev) tf.summary.scalar("max", tf.reduce_max(var)) tf.summary.scalar("min", tf.reduce_min(var)) tf.summary.histogram("histogram", var)
def metric_fn(loss): """Evaluation metric Fn which runs on CPU.""" perplexity = tf.exp(tf.reduce_mean(loss)) return { "eval/loss": tf.metrics.mean(loss), "eval/perplexity": tf.metrics.mean(perplexity), }
def _discriminator(self, encoder_output, features, labels, num_classes, num_units=256): # A class discriminator # Create attention over the encoder states encoder_output_output = encoder_output["outputs"] encoder_output_output_shape = encoder_output_output.get_shape() encoder_output_att_values = encoder_output["attention_values"] encoder_output_att_values_shape = encoder_output_att_values.get_shape() if self.params["discriminator_reverse_grad"]: encoder_output_output = reverse_grad(encoder_output_output) encoder_output_output.set_shape(encoder_output_output_shape) encoder_output_att_values = reverse_grad(encoder_output_att_values) encoder_output_att_values.set_shape(encoder_output_att_values_shape) attention_fn = AttentionLayerBahdanau(params={}, mode=self.mode) _, attention_context = attention_fn( query=tf.zeros_like(encoder_output["outputs"][:, 0, :]), keys=encoder_output_output, values=encoder_output_att_values, values_length=encoder_output["attention_values_length"]) # Fully connected layer fc1 = tf.contrib.layers.fully_connected( inputs=attention_context, num_outputs=num_units, activation_fn=tf.nn.tanh, scope="discriminator_fc") # Create logits logits = tf.contrib.layers.fully_connected( inputs=fc1, num_outputs=num_classes, activation_fn=None, scope="discriminator_softmax") losses = tf.nn.sparse_softmax_cross_entropy_with_logits( logits=logits, labels=labels["domain"]) mean_loss = tf.reduce_mean(losses, name="mean_loss") return (mean_loss, fc1)
def _process_example(images, cls_targets, box_targets, num_positives, source_ids, image_scales, boxes, is_crowds, areas, classes): """Processes one batch of data.""" labels = {} # Count num_positives in a batch. num_positives_batch = tf.reduce_mean(num_positives) labels['mean_num_positives'] = tf.reshape( tf.tile(tf.expand_dims(num_positives_batch, 0), [ batch_size, ]), [batch_size, 1]) for level in range(params['min_level'], params['max_level'] + 1): labels['cls_targets_%d' % level] = cls_targets[level] labels['box_targets_%d' % level] = box_targets[level] # Concatenate groundtruth annotations to a tensor. groundtruth_data = tf.concat([boxes, is_crowds, areas, classes], axis=2) labels['source_ids'] = source_ids labels['groundtruth_data'] = groundtruth_data labels['image_scales'] = image_scales return images, labels
def __call__(self, params): input_anchors = anchors.Anchors(params['min_level'], params['max_level'], params['num_scales'], params['aspect_ratios'], params['anchor_scale'], params['image_size']) anchor_labeler = anchors.AnchorLabeler(input_anchors, params['num_classes']) example_decoder = tf_example_decoder.TfExampleDecoder() def get_dataset_for_mode(data_dir, is_training): """Return the location of input samples for a given mode.""" if is_training: return '%s/coco_train2017_nocrowd-*' % data_dir return '%s/coco_val2017-*' % data_dir def _dataset_parser(value): """Parse data to a fixed dimension input image and learning targets.""" with tf.name_scope('parser'): data = example_decoder.decode(value) source_id = data['source_id'] image = data['image'] boxes = data['groundtruth_boxes'] classes = data['groundtruth_classes'] classes = tf.reshape(tf.cast(classes, dtype=tf.float32), [-1, 1]) # the image normalization is identical to Cloud TPU ResNet-50 image = tf.image.convert_image_dtype(image, dtype=tf.float32) image = _normalize_image(image) if params['input_rand_hflip']: image, boxes = preprocessor.random_horizontal_flip( image, boxes=boxes) image_original_shape = tf.shape(image) image, _ = preprocessor.resize_to_range( image, min_dimension=params['image_size'], max_dimension=params['image_size']) image_scale = tf.to_float( image_original_shape[0]) / tf.to_float(tf.shape(image)[0]) image, boxes = preprocessor.scale_boxes_to_pixel_coordinates( image, boxes, keypoints=None) image = tf.image.pad_to_bounding_box(image, 0, 0, params['image_size'], params['image_size']) (cls_targets, box_targets, num_positives) = anchor_labeler.label_anchors(boxes, classes) source_id = tf.string_to_number(source_id, out_type=tf.float32) row = (image, cls_targets, box_targets, num_positives, source_id, image_scale) return row batch_size = params['batch_size'] data_file_pattern = get_dataset_for_mode(self._data_dir, self._is_training) dataset = tf.data.Dataset.list_files(data_file_pattern) dataset = dataset.shuffle(buffer_size=1024) if self._is_training: dataset = dataset.repeat() def prefetch_dataset(filename): dataset = tf.data.TFRecordDataset(filename).prefetch(1) return dataset dataset = dataset.apply( tf.contrib.data.parallel_interleave(prefetch_dataset, cycle_length=32, sloppy=True)) dataset = dataset.shuffle(20) dataset = dataset.map(_dataset_parser, num_parallel_calls=64) dataset = dataset.prefetch(batch_size) dataset = dataset.apply( tf.contrib.data.batch_and_drop_remainder(batch_size)) dataset = dataset.prefetch(1) (images, cls_targets, box_targets, num_positives, source_ids, image_scales) = dataset.make_one_shot_iterator().get_next() labels = {} # count num_positives in a batch num_positives_batch = tf.reduce_mean(num_positives) labels['mean_num_positives'] = tf.reshape( tf.tile(tf.expand_dims(num_positives_batch, 0), [ batch_size, ]), [batch_size, 1]) for level in range(params['min_level'], params['max_level'] + 1): labels['cls_targets_%d' % level] = cls_targets[level] labels['box_targets_%d' % level] = box_targets[level] labels['source_ids'] = source_ids labels['image_scales'] = image_scales return images, labels