示例#1
0
def reduce_mask(mask,
                bcount,
                *,
                bsize,
                boffset,
                bstride,
                tol=0.5,
                avgpool=False):
    """Reduce the mask to namedtuple `(bin_counts, active_block_indices)`, indices.

  :param mask:
  :param block_count:
  :param bsize:
  :param boffset:
  :param bstride:
  :param tol:
  :param avgpool:
  :returns:
  :rtype:

  """
    logger.debug(f"mask: {mask.shape}")
    mask = _pad_inputs(mask, bcount, bsize, boffset, bstride)
    logger.debug(f"padded mask: {mask.shape}")
    mask = tf.nn.pool(mask,
                      window_shape=bsize,
                      pooling_type='AVG' if avgpool else 'MAX',
                      padding='SAME',
                      strides=bstride)
    mask = tf.squeeze(mask, axis=3)
    active_block_indices = tf.where(mask > tf.constant(tol, mask.dtype))
    active_block_indices = tf.cast(active_block_indices, tf.int32)
    bin_counts = tf.shape(active_block_indices)[0]
    Indices = namedtuple('Indices', ['active_block_indices', 'bin_counts'])
    return Indices(active_block_indices, bin_counts)
示例#2
0
    def predict(self):
        """Run prediction on the unlabeled set."""
        unlabeled_set = self._load_unlabeled()
        model = self._load_model()
        start_time = time()
        predictions = list(
            model.predict(unlabeled_set, multiscale=self.multiscale))
        logger.info(f"ran prediction in {time() - start_time}s.")
        logger.debug(f"prediction:\n{predictions}")

        fname = join(self.model_root, 'predictions.npy')
        np.save(fname, predictions)
        logger.info(f"saved {len(predictions)} predictions to {fname}.")
示例#3
0
def _upsample_block_indices(active_block_indices, bsize, boffset, bstride):
    """Upsamples the indices to have all indices in a rectangle.

  :param active_block_indices: [M,3] Tensor. Corresponds to top left coordinate
  after offset and scaling.
  :param bsize: block size
  :param boffset:
  :param bstride:
  :returns: [M, bsize[0], bsize[1], 3] locations of all pixels in the blocks.
  :rtype:

  """
    ops = []
    logger.debug(f"bsize: {bsize}")
    logger.debug(f"bstride: {bstride}")
    # ops.append(tf.print(active_block_indices, summarize=-1))
    offset = tf.constant([0, boffset[0], boffset[1]], dtype=tf.int32)
    scale = tf.constant([1, bstride[0], bstride[1]], dtype=tf.int32)
    indices = tf.cast(active_block_indices, tf.int32) + offset
    indices *= scale  # [M, 3]
    indices = tf.expand_dims(indices, 1)
    indices = tf.expand_dims(indices, 2)  # [M, 1, 1, 3]
    upsample_offsets = _compute_upsample_offsets(
        bsize)  # [1, bsize[0], bsize[1], 3]
    logger.debug(f"indices: {indices.shape}")
    logger.debug(f"upsample_offsets: {upsample_offsets.shape}")
    # ops.append(tf.print(indices, summarize=-1))
    # ops.append(tf.print(upsample_offsets, summarize=-1))
    with tf.control_dependencies(ops):
        indices += upsample_offsets  # [M, bsize[0], bsize[1], 3]

    return indices
示例#4
0
def plot_hists_from_dir(model_root, columns=10, scale=20):
    """Plot all the histories in `model_dir`.

  For each named property, creates a plot with all the model histories that had
  that named property (loss or metric)

  :returns: fig, axes

  """

    history_fnames = glob(join(model_root, '*history.json'))
    logger.debug(f"history_fnames: {history_fnames}")
    if not history_fnames:
        logger.warning(f"no history saved at {model_root}")
        return None, None

    hist_data = {}  # {property_name -> {model_name -> [values]}}
    for fname in history_fnames:
        hist = utils.json_load(fname)
        model_name = pascalcase(basename(fname).replace('_history.json', ''))
        for prop_name, values in hist.items():
            if not isinstance(values, list):
                continue
            if hist_data.get(prop_name) is None:
                hist_data[prop_name] = {}
            hist_data[prop_name][model_name] = values

    columns = min(columns, len(hist_data))
    rows = max(1, len(hist_data) // columns)
    fig, axes = plt.subplots(rows,
                             columns,
                             squeeze=False,
                             figsize=(scale, scale * rows / columns))

    for i, (prop_name, prop_data) in enumerate(hist_data.items()):
        ax = axes[i // columns, i % columns]
        for model_name, values in prop_data.items():
            ax.plot(values, '-', label=model_name)
        ax.set_title(titlecase(prop_name))
        ax.set_xlabel('Epoch')
        ax.set_ylabel('Loss')
    fig.suptitle("Training")
    plt.legend()
    return fig, axes
示例#5
0
def gather(inputs, bin_counts, active_block_indices, *, bsize, boffset,
           bstride):
    """FIXME! briefly describe function

  :param inputs:
  :param bin_counts: number of blocks?
  :param active_block_indices:
  :param bsize:
  :param boffset:
  :param bstride:
  :returns:
  :rtype:

  """
    logger.debug(f"inputs: {inputs.shape}")
    size = inputs.shape[1:3]
    bcount = _compute_bcount(size, bstride)
    inputs = _pad_inputs(inputs, bcount, bsize, boffset, bstride)

    logger.debug(f"padded inputs: {inputs.shape}")
    indices = _upsample_block_indices(active_block_indices, bsize, boffset,
                                      bstride)
    ops = []
    # ops.append(tf.print(indices, summarize=-1))
    logger.debug(f"gather indices: {indices.shape}")
    with tf.control_dependencies(ops):
        blocks = tf.gather_nd(inputs, indices)  # todo: fix index error
    blocks = tf.reshape(blocks,
                        [bin_counts, bsize[0], bsize[1],
                         tf.shape(inputs)[3]])
    return blocks