Esempio n. 1
0
    def train(self):
        MiniBatchTrainer.train(self)

        for i, group in enumerate(self.num_group_list):
            self.set_num_group(group)
            self.curr_batch = self.curr_epoch = 0
            self.num_batch = self.num_batch_list[i]

            model = self.checkpoint_dumper.get_checkpoint()
            layers = model['layers']

            fc = layers[-2]
            fc['outputSize'] = group
            fc['weight'] = None
            fc['bias'] = None
            fc['weightIncr'] = None
            fc['biasIncr'] = None

            self.learning_rate = self.learning_rate_list[i]
            self.net = FastNet(self.learning_rate,
                               self.image_shape,
                               init_model=model)

            self.net.clear_weight_incr()
            MiniBatchTrainer.train(self)
Esempio n. 2
0
  def train(self):
    MiniBatchTrainer.train(self)

    for i, cate in enumerate(self.num_caterange_list):
      self.set_category_range(cate)
      self.curr_batch = self.curr_epoch = 0
      self.num_batch = self.num_batch_list[i]

      model = self.checkpoint_dumper.get_checkpoint()
      layers = model['layers']

      fc = layers[-2]
      fc['weight'] = None
      fc['bias'] = None
      fc['weightIncr'] = None
      fc['biasIncr'] = None
      # for l in layers:
      #  if l['type'] == 'fc':
      #    l['weight'] = None
      #    l['bias'] = None
      #    l['weightIncr'] = None
      #    l['biasIncr'] = None

      # fc = layers[-2]
      fc['outputSize'] = cate

      self.learning_rate = self.learning_rate_list[i]
      self.net = FastNet(self.learning_rate, self.image_shape, init_model=model)

      self.net.clear_weight_incr()
      MiniBatchTrainer.train(self)
Esempio n. 3
0
    def train_replaynet(self, stack):
        self.container.append(self.save_freq)
        self.container.append(self.test_freq)
        self.container.append(self.train_dp)
        self.container.append(self.test_dp)
        self.container.append(self.layer_output_dumper)
        self.container.append(self.net)

        self.save_freq = self.curr_batch + 100
        self.test_freq = self.curr_batch + 100
        self.curr_batch = self.curr_epoch = 0
        self.init_replaynet_data_provider()

        model = []
        model.extend(stack)
        model.extend(self.fc_tmp)

        self.layer_output_dumper = None
        size = self.net['fc8'].get_input_size()
        image_shape = (size, 1, 1, self.batch_size)
        self.net = FastNet(self.learning_rate, image_shape, model)
        self.replaynet = self.net
        self.num_epoch = self.replaynet_epoch
        Trainer.train(self)

        self.net = self.container.pop()
        self.layer_output_dumper = self.container.pop()
        self.layer_output_dumper.reset()
        self.test_dp = self.container.pop()
        self.train_dp = self.container.pop()
        self.test_freq = self.container.pop()
        self.save_freq = self.container.pop()
Esempio n. 4
0
class ImageNetCatewisedTrainer(MiniBatchTrainer):
  def _finish_init(self):
    assert len(self.num_caterange_list) == len(self.num_batch) and self.num_caterange_list[-1] == 1000
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]

    init_output = self.num_caterange_list[0]
    self.num_caterange_list = self.num_caterange_list[1:]

    fc = self.init_model[-2]
    fc['outputSize'] = init_output

    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    self.set_category_range(init_output)
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)


  def set_category_range(self, r):
    dp = data.get_by_name(self.data_provider)
    self.train_dp = dp(self.data_dir, self.train_range, category_range=range(r))
    self.test_dp = dp(self.data_dir, self.test_range, category_range=range(r))


  def train(self):
    MiniBatchTrainer.train(self)

    for i, cate in enumerate(self.num_caterange_list):
      self.set_category_range(cate)
      self.curr_batch = self.curr_epoch = 0
      self.num_batch = self.num_batch_list[i]

      model = self.checkpoint_dumper.get_checkpoint()
      layers = model['layers']

      fc = layers[-2]
      fc['weight'] = None
      fc['bias'] = None
      fc['weightIncr'] = None
      fc['biasIncr'] = None
      # for l in layers:
      #  if l['type'] == 'fc':
      #    l['weight'] = None
      #    l['bias'] = None
      #    l['weightIncr'] = None
      #    l['biasIncr'] = None

      # fc = layers[-2]
      fc['outputSize'] = cate

      self.learning_rate = self.learning_rate_list[i]
      self.net = FastNet(self.learning_rate, self.image_shape, init_model=model)

      self.net.clear_weight_incr()
      MiniBatchTrainer.train(self)
Esempio n. 5
0
class ImageNetCatewisedTrainer(MiniBatchTrainer):
  def _finish_init(self):
    assert len(self.num_caterange_list) == len(self.num_batch) and self.num_caterange_list[-1] == 1000
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]

    init_output = self.num_caterange_list[0]
    self.num_caterange_list = self.num_caterange_list[1:]

    fc = self.init_model[-2]
    fc['outputSize'] = init_output

    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    self.set_category_range(init_output)
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)


  def set_category_range(self, r):
    dp = data.get_by_name(self.data_provider)
    self.train_dp = dp(self.data_dir, self.train_range, category_range=range(r))
    self.test_dp = dp(self.data_dir, self.test_range, category_range=range(r))


  def train(self):
    MiniBatchTrainer.train(self)

    for i, cate in enumerate(self.num_caterange_list):
      self.set_category_range(cate)
      self.curr_batch = self.curr_epoch = 0
      self.num_batch = self.num_batch_list[i]

      model = self.checkpoint_dumper.get_checkpoint()
      layers = model['layers']

      fc = layers[-2]
      fc['weight'] = None
      fc['bias'] = None
      fc['weightIncr'] = None
      fc['biasIncr'] = None
      # for l in layers:
      #  if l['type'] == 'fc':
      #    l['weight'] = None
      #    l['bias'] = None
      #    l['weightIncr'] = None
      #    l['biasIncr'] = None

      # fc = layers[-2]
      fc['outputSize'] = cate

      self.learning_rate = self.learning_rate_list[i]
      self.net = FastNet(self.learning_rate, self.image_shape, init_model=model)

      self.net.clear_weight_incr()
      MiniBatchTrainer.train(self)
Esempio n. 6
0
  def _finish_init(self):
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]
    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    layers = self.init_model
    fc = layers[-2]
    fc['outputSize'] = self.num_group_list[0]
    self.num_group_list = self.num_group_list[1:]

    self.set_num_group(fc['outputSize'])
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)
Esempio n. 7
0
  def get_trainer_by_name(name, param_dict):
    net = FastNet(param_dict['image_shape'])
    if name == 'layerwise':
      param_dict['net'] = net
      return ImageNetLayerwisedTrainer(**param_dict)

    net = FastNet(param_dict['image_shape'])
    load_model(net, param_dict['init_model'])
    param_dict['net'] = net
    if name == 'normal':
      return Trainer(**param_dict)
    elif name == 'minibatch':
      return MiniBatchTrainer(**param_dict)
    else:
      raise Exception, 'No trainer found for name: %s' % name
Esempio n. 8
0
class ImageNetCateGroupTrainer(MiniBatchTrainer):
    def _finish_init(self):
        self.num_batch_list = self.num_batch[1:]
        self.num_batch = self.num_batch[0]
        self.learning_rate_list = self.learning_rate[1:]
        self.learning_rate = self.learning_rate[0]

        layers = self.init_model
        fc = layers[-2]
        fc['outputSize'] = self.num_group_list[0]
        self.num_group_list = self.num_group_list[1:]

        self.set_num_group(fc['outputSize'])
        self.net = FastNet(self.learning_rate,
                           self.image_shape,
                           init_model=self.init_model)
        MiniBatchTrainer._finish_init(self)

    def set_num_group(self, n):
        dp = data.get_by_name(self.data_provider)
        self.train_dp = dp(self.data_dir, self.train_range, n)
        self.test_dp = dp(self.data_dir, self.test_range, n)

    def train(self):
        MiniBatchTrainer.train(self)

        for i, group in enumerate(self.num_group_list):
            self.set_num_group(group)
            self.curr_batch = self.curr_epoch = 0
            self.num_batch = self.num_batch_list[i]

            model = self.checkpoint_dumper.get_checkpoint()
            layers = model['layers']

            fc = layers[-2]
            fc['outputSize'] = group
            fc['weight'] = None
            fc['bias'] = None
            fc['weightIncr'] = None
            fc['biasIncr'] = None

            self.learning_rate = self.learning_rate_list[i]
            self.net = FastNet(self.learning_rate,
                               self.image_shape,
                               init_model=model)

            self.net.clear_weight_incr()
            MiniBatchTrainer.train(self)
Esempio n. 9
0
  def train(self):
    MiniBatchTrainer.train(self)

    for i, cate in enumerate(self.num_caterange_list):
      self.set_category_range(cate)
      self.curr_batch = self.curr_epoch = 0
      self.num_batch = self.num_batch_list[i]

      model = self.checkpoint_dumper.get_checkpoint()
      layers = model['layers']

      fc = layers[-2]
      fc['weight'] = None
      fc['bias'] = None
      fc['weightIncr'] = None
      fc['biasIncr'] = None
      # for l in layers:
      #  if l['type'] == 'fc':
      #    l['weight'] = None
      #    l['bias'] = None
      #    l['weightIncr'] = None
      #    l['biasIncr'] = None

      # fc = layers[-2]
      fc['outputSize'] = cate

      self.learning_rate = self.learning_rate_list[i]
      self.net = FastNet(self.learning_rate, self.image_shape, init_model=model)

      self.net.clear_weight_incr()
      MiniBatchTrainer.train(self)
Esempio n. 10
0
  def train_replaynet(self, stack):
    self.container.append(self.save_freq)
    self.container.append(self.test_freq)
    self.container.append(self.train_dp)
    self.container.append(self.test_dp)
    self.container.append(self.layer_output_dumper)
    self.container.append(self.net)

    self.save_freq = self.curr_batch + 100
    self.test_freq = self.curr_batch + 100
    self.curr_batch = self.curr_epoch = 0
    self.init_replaynet_data_provider()

    model = []
    model.extend(stack)
    model.extend(self.fc_tmp)

    self.layer_output_dumper = None
    size = self.net['fc8'].get_input_size()
    image_shape = (size, 1, 1, self.batch_size)
    self.net = FastNet(self.learning_rate, image_shape, model)
    self.replaynet = self.net
    self.num_epoch = self.replaynet_epoch
    Trainer.train(self)

    self.net = self.container.pop()
    self.layer_output_dumper = self.container.pop()
    self.layer_output_dumper.reset()
    self.test_dp = self.container.pop()
    self.train_dp = self.container.pop()
    self.test_freq = self.container.pop()
    self.save_freq = self.container.pop()
Esempio n. 11
0
  def _finish_init(self):
    assert len(self.num_caterange_list) == len(self.num_batch) and self.num_caterange_list[-1] == 1000
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]

    init_output = self.num_caterange_list[0]
    self.num_caterange_list = self.num_caterange_list[1:]

    fc = self.init_model[-2]
    fc['outputSize'] = init_output

    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    self.set_category_range(init_output)
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)
Esempio n. 12
0
class ImageNetCateGroupTrainer(MiniBatchTrainer):
  def _finish_init(self):
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]
    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    layers = self.init_model
    fc = layers[-2]
    fc['outputSize'] = self.num_group_list[0]
    self.num_group_list = self.num_group_list[1:]

    self.set_num_group(fc['outputSize'])
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)

  def set_num_group(self, n):
    dp = data.get_by_name(self.data_provider)
    self.train_dp = dp(self.data_dir, self.train_range, n)
    self.test_dp = dp(self.data_dir, self.test_range, n)


  def train(self):
    MiniBatchTrainer.train(self)

    for i, group in enumerate(self.num_group_list):
      self.set_num_group(group)
      self.curr_batch = self.curr_epoch = 0
      self.num_batch = self.num_batch_list[i]

      model = self.checkpoint_dumper.get_checkpoint()
      layers = model['layers']

      fc = layers[-2]
      fc['outputSize'] = group
      fc['weight'] = None
      fc['bias'] = None
      fc['weightIncr'] = None
      fc['biasIncr'] = None

      self.learning_rate = self.learning_rate_list[i]
      self.net = FastNet(self.learning_rate, self.image_shape, init_model=model)

      self.net.clear_weight_incr()
      MiniBatchTrainer.train(self)
Esempio n. 13
0
  def __init__(self, learning_rate, image_shape, num_ouput, init_model):
    if 'model_state' in init_model:
      model = init_model['model_state']['layers']
    else:
      model = init_model

    for layer in model:
      if layer.type == 'fc':
        layer['outputSize'] = layer['outputSize'] / comm.Get_size()
        size = comm.Get_size()
        if 'weight' in layer:
          layer['weight'] = np.vsplit(layer['weight'], size)[rank]
          layer['bias'] = np.vsplit(layer['bias'], size)[rank]
        if 'weightIncr' in layer:
          layer['weightIncr'] = np.vsplit(layer['weightIncr'], size)[rank]
          layer['biasIncr'] = np.vsplit(layers['biasIncr'], size)[rank]

    FastNet.__init__(self, learning_rate, image_shape, num_ouput, init_model)
Esempio n. 14
0
  def _finish_init(self):
    self.final_num_epoch = self.num_epoch
    self.curr_model = []
    self.divide_layers_to_stack()
    self.conv_stack = FastNet.split_conv_to_stack(self.conv_params)
    self.fc_stack = FastNet.split_fc_to_stack(self.fc_params)


    self.fc_tmp = [self.fc_stack['fc8'][0], self.softmax_param]
    del self.fc_stack['fc8']
    self.stack = self.fc_stack

    self.initialize_model()
    pprint.pprint(self.stack)

    self.num_epoch = self.frag_epoch
    self.net = FastNet(self.learning_rate, self.image_shape, self.curr_model)

    self.container = deque()
Esempio n. 15
0
  def _finish_init(self):
    self.final_num_epoch = self.num_epoch
    self.curr_model = []
    self.divide_layers_to_stack()
    self.conv_stack = FastNet.split_conv_to_stack(self.conv_params)
    self.fc_stack = FastNet.split_fc_to_stack(self.fc_params)


    self.fc_tmp = [self.fc_stack['fc8'][0], self.softmax_param]
    del self.fc_stack['fc8']
    self.stack = self.fc_stack

    self.initialize_model()
    pprint.pprint(self.stack)

    self.num_epoch = self.frag_epoch
    self.net = FastNet(self.learning_rate, self.image_shape, self.curr_model)

    self.container = deque()
Esempio n. 16
0
    def __init__(self, learning_rate, image_shape, num_ouput, init_model):
        if 'model_state' in init_model:
            model = init_model['model_state']['layers']
        else:
            model = init_model

        for layer in model:
            if layer.type == 'fc':
                layer['outputSize'] = layer['outputSize'] / comm.Get_size()
                size = comm.Get_size()
                if 'weight' in layer:
                    layer['weight'] = np.vsplit(layer['weight'], size)[rank]
                    layer['bias'] = np.vsplit(layer['bias'], size)[rank]
                if 'weightIncr' in layer:
                    layer['weightIncr'] = np.vsplit(layer['weightIncr'],
                                                    size)[rank]
                    layer['biasIncr'] = np.vsplit(layers['biasIncr'],
                                                  size)[rank]

        FastNet.__init__(self, learning_rate, image_shape, num_ouput,
                         init_model)
Esempio n. 17
0
  def _finish_init(self):
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]
    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    layers = self.init_model
    fc = layers[-2]
    fc['outputSize'] = self.num_group_list[0]
    self.num_group_list = self.num_group_list[1:]

    self.set_num_group(fc['outputSize'])
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)
Esempio n. 18
0
    def get_trainer_by_name(name, param_dict):
        net = FastNet(param_dict['learning_rate'],
                      param_dict['image_shape'],
                      init_model=None)
        param_dict['net'] = net
        if name == 'layerwise':
            return ImageNetLayerwisedTrainer(**param_dict)

        if name == 'catewise':
            return ImageNetCatewisedTrainer(**param_dict)

        if name == 'categroup':
            return ImageNetCateGroupTrainer(**param_dict)

        net = FastNet(param_dict['learning_rate'], param_dict['image_shape'],
                      param_dict['init_model'])
        param_dict['net'] = net
        if name == 'normal':
            return Trainer(**param_dict)

        if name == 'minibatch':
            return MiniBatchTrainer(**param_dict)

        raise Exception, 'No trainer found for name: %s' % name
Esempio n. 19
0
  def _finish_init(self):
    assert len(self.num_caterange_list) == len(self.num_batch) and self.num_caterange_list[-1] == 1000
    self.num_batch_list = self.num_batch[1:]
    self.num_batch = self.num_batch[0]

    init_output = self.num_caterange_list[0]
    self.num_caterange_list = self.num_caterange_list[1:]

    fc = self.init_model[-2]
    fc['outputSize'] = init_output

    self.learning_rate_list = self.learning_rate[1:]
    self.learning_rate = self.learning_rate[0]

    self.set_category_range(init_output)
    self.net = FastNet(self.learning_rate, self.image_shape, init_model=self.init_model)
    MiniBatchTrainer._finish_init(self)
Esempio n. 20
0
  def train(self):
    MiniBatchTrainer.train(self)

    for i, group in enumerate(self.num_group_list):
      self.set_num_group(group)
      self.curr_batch = self.curr_epoch = 0
      self.num_batch = self.num_batch_list[i]

      model = self.checkpoint_dumper.get_checkpoint()
      layers = model['layers']

      fc = layers[-2]
      fc['outputSize'] = group
      fc['weight'] = None
      fc['bias'] = None
      fc['weightIncr'] = None
      fc['biasIncr'] = None

      self.learning_rate = self.learning_rate_list[i]
      self.net = FastNet(self.learning_rate, self.image_shape, init_model=model)

      self.net.clear_weight_incr()
      MiniBatchTrainer.train(self)
Esempio n. 21
0
class ImageNetLayerwisedTrainer(Trainer):
  def divide_layers_to_stack(self):
    self.fc_params = []
    self.conv_params = []
    conv = True
    for ld in self.init_model:
      if ld['type'] in ['conv', 'rnorm', 'pool', 'neuron'] and conv:
        # self.conv_params.append(ld)
        self.conv_params.append(ld)
      elif ld['type'] == 'fc' or (not conv and ld['type'] == 'neuron'):
        self.fc_params.append(ld)
        conv = False
      else:
        self.softmax_param = ld

  def initialize_model(self):
    self.curr_model.extend(self.conv_stack['conv1'])
    self.curr_model.extend(self.conv_stack['conv2'])
    self.curr_model.extend(self.conv_stack['conv3'])
    self.curr_model.extend(self.conv_stack['conv4'])
    self.curr_model.extend(self.conv_stack['conv5'])
    self.curr_model.extend(self.fc_tmp)

  def _finish_init(self):
    self.final_num_epoch = self.num_epoch
    self.curr_model = []
    self.divide_layers_to_stack()
    self.conv_stack = FastNet.split_conv_to_stack(self.conv_params)
    self.fc_stack = FastNet.split_fc_to_stack(self.fc_params)


    self.fc_tmp = [self.fc_stack['fc8'][0], self.softmax_param]
    del self.fc_stack['fc8']
    self.stack = self.fc_stack

    self.initialize_model()
    pprint.pprint(self.stack)

    self.num_epoch = self.frag_epoch
    self.net = FastNet(self.learning_rate, self.image_shape, self.curr_model)

    self.container = deque()

  def report(self):
    pass

  def should_continue_training(self):
    return self.curr_epoch <= self.num_epoch

  def init_replaynet_data_provider(self):
    if self.output_method == 'disk':
      dp = data.get_by_name('intermediate')
      count = self.train_layer_output_dumper.get_count()
      self.train_dp = dp(self.train_layer_output_path, range(0, count), 'fc')
      count = self.test_layer_output_dumper.get_count()
      self.test_dp = dp(self.test_layer_output_path, range(count), 'fc')
    elif self.output_method == 'memory':
      dp = data.get_by_name('memory')
      self.train_dp = dp(self.train_layer_output_dumper)
      self.test_dp = dp(self.test_layer_output_dumper)

  def train_replaynet(self, stack):
    self.container.append(self.save_freq)
    self.container.append(self.test_freq)
    self.container.append(self.train_dp)
    self.container.append(self.test_dp)
    self.container.append(self.train_layer_output_dumper)
    self.container.append(self.test_layer_output_dumper)
    self.container.append(self.net)

    self.save_freq = self.curr_batch + 100
    self.test_freq = self.curr_batch + 100
    self.curr_batch = self.curr_epoch = 0
    self.init_replaynet_data_provider()

    model = []
    model.extend(stack)
    model.extend(self.fc_tmp)

    self.train_layer_output_dumper = None
    self.test_layer_output_dumper = None
    size = self.net['fc8'].get_input_size()
    image_shape = (size, 1, 1, self.batch_size)
    self.net = FastNet(self.learning_rate, image_shape, model)
    self.replaynet = self.net
    self.num_epoch = self.replaynet_epoch
    Trainer.train(self)

    self.net = self.container.pop()
    self.test_layer_output_dumper = self.container.pop()
    self.test_layer_output_dumper.reset()
    self.train_layer_output_dumper = self.container.pop()
    self.train_layer_output_dumper.reset()
    self.test_dp = self.container.pop()
    self.train_dp = self.container.pop()
    self.test_freq = self.container.pop()
    self.save_freq = self.container.pop()

  def reset_trainer(self, i):
    if i == len(self.stack) - 1:
      self.num_epoch = self.final_num_epoch
    else:
      self.num_epoch = self.frag_epoch

    self.curr_batch = self.curr_epoch = 0
    self.init_data_provider()

  def train(self):
    Trainer.train(self)
    for i, stack in enumerate(self.stack.values()):
      self.train_replaynet(stack)
      self.reset_trainer(i)

      self.net.drop_layer_from('fc8')

      for layer in self.replaynet:
        if layer.type != 'data':
          self.net.append_layer(layer)
      Trainer.train(self)
Esempio n. 22
0
class ImageNetLayerwisedTrainer(Trainer):
  def divide_layers_to_stack(self):
    self.fc_params = []
    self.conv_params = []
    conv = True
    for ld in self.init_model:
      if ld['type'] in ['conv', 'rnorm', 'pool', 'neuron'] and conv:
        # self.conv_params.append(ld)
        self.conv_params.append(ld)
      elif ld['type'] == 'fc' or (not conv and ld['type'] == 'neuron'):
        self.fc_params.append(ld)
        conv = False
      else:
        self.softmax_param = ld

  def initialize_model(self):
    self.curr_model.extend(self.conv_stack['conv1'])
    self.curr_model.extend(self.conv_stack['conv2'])
    self.curr_model.extend(self.conv_stack['conv3'])
    self.curr_model.extend(self.conv_stack['conv4'])
    self.curr_model.extend(self.conv_stack['conv5'])
    self.curr_model.extend(self.fc_tmp)

  def _finish_init(self):
    self.final_num_epoch = self.num_epoch
    self.curr_model = []
    self.divide_layers_to_stack()
    self.conv_stack = FastNet.split_conv_to_stack(self.conv_params)
    self.fc_stack = FastNet.split_fc_to_stack(self.fc_params)


    self.fc_tmp = [self.fc_stack['fc8'][0], self.softmax_param]
    del self.fc_stack['fc8']
    self.stack = self.fc_stack

    self.initialize_model()
    pprint.pprint(self.stack)

    self.num_epoch = self.frag_epoch
    self.net = FastNet(self.learning_rate, self.image_shape, self.curr_model)

    self.container = deque()

  def report(self):
    pass

  def should_continue_training(self):
    return self.curr_epoch <= self.num_epoch

  def init_replaynet_data_provider(self):
    if self.output_method == 'disk':
      dp = data.get_by_name('intermediate')
      count = self.layer_output_dumper.get_count()
      self.train_dp = dp(self.layer_output_path, range(0, count), 'fc')
    elif self.output_method == 'memory':
      dp = data.get_by_name('memory')
      self.train_dp = dp(self.layer_output_dumper)

  def train_replaynet(self, stack):
    self.container.append(self.save_freq)
    self.container.append(self.test_freq)
    self.container.append(self.train_dp)
    self.container.append(self.test_dp)
    self.container.append(self.layer_output_dumper)
    self.container.append(self.net)

    self.save_freq = self.curr_batch + 100
    self.test_freq = self.curr_batch + 100
    self.curr_batch = self.curr_epoch = 0
    self.init_replaynet_data_provider()

    model = []
    model.extend(stack)
    model.extend(self.fc_tmp)

    self.layer_output_dumper = None
    size = self.net['fc8'].get_input_size()
    image_shape = (size, 1, 1, self.batch_size)
    self.net = FastNet(self.learning_rate, image_shape, model)
    self.replaynet = self.net
    self.num_epoch = self.replaynet_epoch
    Trainer.train(self)

    self.net = self.container.pop()
    self.layer_output_dumper = self.container.pop()
    self.layer_output_dumper.reset()
    self.test_dp = self.container.pop()
    self.train_dp = self.container.pop()
    self.test_freq = self.container.pop()
    self.save_freq = self.container.pop()

  def reset_trainer(self, i):
    if i == len(self.stack) - 1:
      self.num_epoch = self.final_num_epoch
    else:
      self.num_epoch = self.frag_epoch

    self.curr_batch = self.curr_epoch = 0
    self.init_data_provider()

  def train(self):
    Trainer.train(self)
    for i, stack in enumerate(self.stack.values()):
      self.train_replaynet(stack)
      self.reset_trainer(i)

      self.net.drop_layer_from('fc8')

      for layer in self.replaynet:
        if layer.type != 'data':
          self.net.append_layer(layer)
      Trainer.train(self)