Ejemplo n.º 1
0
 def _get_layer_string(self, f, scale):
     assert isinstance(f, Layer)
     result = f.abbreviation
     if scale and f.neuron_scale is not None:
         self._output_scale = shape_string(
             f.neuron_scale if f.output_scale is None else f.output_scale)
         result += '_{}'.format(f.neuron_scale)
     return result
Ejemplo n.º 2
0
Archivo: net.py Proyecto: rscv5/tframe
    def structure_string(self, detail=True, scale=True):
        # Get functions to be added to structure string
        assert isinstance(self.children, list)
        fs = [
            f for f in self.children
            if isinstance(f, Net) or detail or f.is_nucleus
        ]

        # Add input layer
        result = ('' if self.input_ is None else 'input[{}] => '.format(
            shape_string(self.input_.sample_shape)))

        # Check interconnection type
        next_net, next_layer = ' => ', ' -> '
        if self._inter_type not in (pedia.cascade,
                                    self.RECURRENT) or self.is_branch:
            if self._inter_type in [pedia.sum, pedia.prod, pedia.concat]:
                result += self._inter_type
            if self.is_branch: result += 'branch'
            else: next_layer, next_net = ', ', ', '
            result += '('

        # Add children
        str_list, next_token = [], None
        for f in fs:
            if isinstance(f, Net):
                if next_token is None: next_token = next_net
                assert next_token == next_net
                str_list.append(f.structure_string(detail, scale))
            else:
                assert isinstance(f, Layer)
                if next_token is None: next_token = next_layer
                assert next_token == next_layer
                str_list.append(self._get_layer_string(f, scale))

        str_list = merger(str_list)
        result += next_token.join(str_list)

        # Check is_branch flag
        if self.is_branch:
            result += ' -> output'

        # Check interconnection type
        if self._inter_type not in (pedia.cascade,
                                    self.RECURRENT) or self.is_branch:
            result += ')'
        # Add output scale
        if self.is_root and not self._inter_type == pedia.fork:
            result += ' => output[{}]'.format(self.output_shape_str)

        # Return
        return result
Ejemplo n.º 3
0
    def structure_string(self, detail=True, scale=True):
        # Get functions to be added to structure string
        assert isinstance(self.children, list)
        fs = [
            f for f in self.children
            if isinstance(f, Net) or detail or f.is_nucleus
        ]

        # Add input layer
        result = ('' if self.input_ is None else 'input_{} => '.format(
            shape_string(self.input_.sample_shape)))

        # Check interconnection type
        next_net, next_layer = ' => ', ' -> '
        if self._inter_type != pedia.cascade or self.is_branch:
            if self._inter_type in [pedia.sum, pedia.prod, pedia.concat]:
                result += self._inter_type
            if self.is_branch: result += 'branch'
            else: next_layer, next_net = ', ', ', '
            result += '('

        # Add children
        for (i, f) in zip(range(len(self.children)), fs):
            if isinstance(f, Net):
                result += next_net if i != 0 else ''
                result += f.structure_string(detail, scale)
            else:
                assert isinstance(f, Layer)
                result += next_layer if i != 0 else ''
                result += self._get_layer_string(f, scale)

        # Check is_branch flag
        if self.is_branch:
            result += ' -> output'

        # Check interconnection type
        if self._inter_type != pedia.cascade or self.is_branch: result += ')'

        # Add output scale
        if self.is_root and not self._inter_type == pedia.fork:
            result += ' => output_{}'.format(self.children[-1]._output_scale)

        # Return
        return result
Ejemplo n.º 4
0
Archivo: net.py Proyecto: ssh352/tframe
  def structure_detail(self):
    """A list of structure strings with format
       Layer (type)           Output Shape           Params #
    Currently only work for sequential model
    TODO: refactoring is badly needed
    """
    from tframe.nets.rnet import RNet
    from tframe.nets.customized_net import CustomizedNet
    widths = hub.structure_detail_widths
    indent = 3

    # rows is a list of lists of 3 cols
    rows = []
    # TODO: the line below should be removed is things are settled down
    # add_to_rows = lambda cols: rows.append(fs.table_row(cols, widths))

    # Dense total will be used when model weights are pruned
    total_params, dense_total = 0, 0
    if self.is_root:
      rows.append(['input', shape_string(self.input_.sample_shape), ''])

    for child in self.children:
      if isinstance(child, Layer):
        _row, num, dense_num = self._get_layer_detail(child)
        rows.append(_row)
      elif isinstance(child, (RNet, CustomizedNet)):
        num, dense_num = child.params_num
        cols = [child.structure_string(), child.output_shape_str,
                stark.get_num_string(num, dense_num)]
        rows.append(cols)
      elif isinstance(child, Net):
        _rows, num, dense_num = child.structure_detail
        # TODO
        rows += _rows
      else:
        raise TypeError('!! unknown child type {}'.format(type(child)))

      # Accumulate total_params and dense_total_params
      total_params += num
      dense_total += dense_num

    # Check total params
    if not (hub.prune_on or hub.etch_on):
      assert total_params == sum([np.prod(v.shape) for v in self.var_list])

    if self.is_root:
      headers = ['Layers', 'Output Shape', 'Params #']
      # Decide cell widths
      widths = [max(len(h), max([len(r[i]) for r in rows]))
                for i, h in enumerate(headers)]
      # Put all these stuff into a table
      t = Table(*widths, margin=0, tab=9, buffered=True, indent=indent)
      t.specify_format(align='llr')
      t.print_header(*headers)
      for i, row in enumerate(rows):
        t.print_row(*row)
        # Draw line
        t.hline() if i != len(rows) - 1 else t.dhline()
      t.print_with_margin('Total params: {}'.format(
        stark.get_num_string(total_params, dense_total)))
      t.hline()
      return t.content, total_params, dense_total
    else: return rows, total_params, dense_total
Ejemplo n.º 5
0
  def structure_detail(self):
    """A list of structure strings with format
       Layer (type)           Output Shape           Params #
    Currently only work for sequential model
    """
    from tframe.nets.rnet import RNet
    widths = [33, 24, 20]
    indent = 3

    rows = []
    add_to_rows = lambda cols: rows.append(fs.table_row(cols, widths))
    # Dense total will be used when model weights are pruned
    total_params, dense_total = 0, 0
    if self.is_root:
      add_to_rows(['input', shape_string(self.input_.sample_shape), ''])

    def get_num_string(num, dense_num):
      if num == 0: num_str = ''
      elif hub.prune_on or hub.etch_on:
        num_str = '{} ({:.1f}%)'.format(num, 100.0 * num / dense_num)
      else: num_str = str(num)
      return num_str

    for child in self.children:
      if isinstance(child, Layer):
        # Try to find variable in child
        variables = [v for v in self.var_list if child.group_name in v.name]
        num, dense_num = stark.get_params_num(variables, consider_prune=True)
        # Generate a row
        cols = [self._get_layer_string(child, True, True),
                child.output_shape_str, get_num_string(num, dense_num)]
        add_to_rows(cols)
      elif isinstance(child, RNet):
        num, dense_num = child.params_num
        cols = [child.structure_string(), child.output_shape_str,
                get_num_string(num, dense_num)]
        add_to_rows(cols)
      elif isinstance(child, Net):
        _rows, num, dense_num = child.structure_detail
        rows += _rows
      else:
        raise TypeError('!! unknown child type {}'.format(type(child)))

      # Accumulate total_params and dense_total_params
      total_params += num
      dense_total += dense_num

    if self.is_root:
      # Head
      detail = ''
      add_with_indent = lambda d, c: d + ' ' * indent + c + '\n'
      width = sum(widths)
      detail = add_with_indent(detail, '-' * width)
      detail = add_with_indent(
        detail, fs.table_row(['Layers', 'Output Shape', 'Params #'], widths))
      detail = add_with_indent(detail, '=' * width)
      # Content
      for i, row in enumerate(rows):
        if i > 0:
          detail = add_with_indent(detail, '-' * width)
        detail = add_with_indent(detail, row)
      # Summary
      detail = add_with_indent(detail, '=' * width)
      detail = add_with_indent(
        detail, 'Total params: {}'.format(
          get_num_string(total_params, dense_total)))
      detail += ' ' * indent + '-' * width
      return detail, total_params, dense_total
    else: return rows, total_params, dense_total