Пример #1
0
 def __init__(self, factor, **kwargs):
   h = T.concatenate([s.act[0] for s in kwargs['sources']],axis=2)
   time = h.shape[0]
   batch = h.shape[1]
   src = Layer([],sum([s.attrs['n_out'] for s in kwargs['sources']]),kwargs['index'])
   src.output = h.reshape((1,h.shape[0] * h.shape[1], h.shape[2])).repeat(factor,axis=0)
   src.index = kwargs['sources'][0].index
   src.layer_class = ''
   kwargs['sources'] = [ src ]
   kwargs['index'] = kwargs['index'].flatten().dimshuffle('x',0).repeat(factor,axis=0)
   kwargs['n_dec'] = factor
   super(RecurrentUpsampleLayer, self).__init__(**kwargs)
   self.index = self.index.reshape((self.index.shape[0]*time,batch))
   self.output = self.output.reshape((self.output.shape[0]*time,batch,self.output.shape[2]))
Пример #2
0
 def __init__(self, factor, **kwargs):
   h = T.concatenate([s.act[0] for s in kwargs['sources']],axis=2)
   time = h.shape[0]
   batch = h.shape[1]
   src = Layer([],sum([s.attrs['n_out'] for s in kwargs['sources']]),kwargs['index'])
   src.output = h.reshape((1,h.shape[0] * h.shape[1], h.shape[2])).repeat(factor,axis=0)
   src.index = kwargs['sources'][0].index
   src.layer_class = ''
   kwargs['sources'] = [ src ]
   kwargs['index'] = kwargs['index'].flatten().dimshuffle('x',0).repeat(factor,axis=0)
   kwargs['n_dec'] = factor
   super(RecurrentUpsampleLayer, self).__init__(**kwargs)
   self.index = self.index.reshape((self.index.shape[0]*time,batch))
   self.output = self.output.reshape((self.output.shape[0]*time,batch,self.output.shape[2]))
Пример #3
0
    def traverse(model, layer_name, output_index):
      index = output_index
      mask = network.default_mask
      if not mask and 'mask' in model[layer_name].attrs:
        mask = model[layer_name].attrs['mask']
      if 'from' in model[layer_name].attrs:
        x_in = []
        for s in model[layer_name].attrs['from'].split(','):
          if s == 'data':
            x_in.append(SourceLayer(network.n_in, network.x, sparse=sparse_input, name='data', index=network.i))
            index = network.i
          elif s != "null" and s != "": # this is allowed, recurrent states can be passed as input
            if not network.hidden.has_key(s):
              index = traverse(model, s, index)
            else:
              index = network.hidden[s].index
            x_in.append(network.hidden[s])
          elif s == "":
            assert not s
            # Fix for old models via NetworkDescription.
            s = Layer.guess_source_layer_name(layer_name)
            if not s:
              # Fix for data input. Just like in NetworkDescription, so that param names are correct.
              x_in.append(SourceLayer(n_out=network.n_in, x_out=network.x, name="", index=network.i))
            else:
              if not network.hidden.has_key(s):
                index = traverse(model, s, index)
              else:
                index = network.hidden[s].index
              # Add just like in NetworkDescription, so that param names are correct.
              x_in.append(SourceLayer(n_out=network.hidden[s].attrs['n_out'], x_out=network.hidden[s].output, name="", index=network.i))
      else:
        x_in = [ SourceLayer(network.n_in, network.x, sparse=sparse_input, name='data', index=network.i) ]
      if 'encoder' in model[layer_name].attrs:
        encoder = []
        for s in model[layer_name].attrs['encoder'].split(','):
          if s != "":
            if not network.hidden.has_key(s):
              traverse(model, s, index)
            encoder.append(network.hidden[s])
      if 'base' in model[layer_name].attrs: # TODO see json
        base = []
        for s in model[layer_name].attrs['base'].split(','):
          if s != "":
            if not network.hidden.has_key(s):
              traverse(model, s, index)
            base.append(network.hidden[s])
      for key in ['copy_input', 'copy_output']:
        if key in model[layer_name].attrs:
          index = traverse(model, model[layer_name].attrs[key], index)
          if key == 'copy_input':
            copy_input = network.hidden[model[layer_name].attrs[key]]
          if key == 'copy_output':
            copy_output = network.hidden[model[layer_name].attrs[key]]
      if 'encoder' in model[layer_name].attrs and not x_in:
        index = output_index
      if 'target' in model[layer_name].attrs:
        target = model[layer_name].attrs['target']
        if target != "null" and target not in network.y:
          network.use_target(target, dtype=dtype)
          index = network.j[target]
      cl = model[layer_name].attrs['class']
      if cl == 'softmax':
        params = { 'dropout' : 0.0,
                   'name' : 'output',
                   'mask' : mask,
                   'train_flag' : train_flag }
        params.update(model[layer_name].attrs)
        if 'encoder' in model[layer_name].attrs:
          params['encoder'] = encoder #network.hidden[model[layer_name].attrs['encoder']] if model[layer_name].attrs['encoder'] in network.hidden else network.output[model[layer_name].attrs['encoder']]
        if 'base' in model[layer_name].attrs:
          params['base'] = base
        if 'copy_input' in model[layer_name].attrs:
          params['copy_input'] = copy_input
        if 'copy_output' in model[layer_name].attrs:
          params['copy_output'] = copy_output
        #if not 'target' in params:
        #  params['target'] = target
        params['index'] = index #output_index
        params['sources'] = x_in
        params['y_in'] = network.y
        params.pop('from', None)
        params.pop('class', None)
        network.make_classifier(**params)
      else:
        params = { 'sources': x_in,
                   'n_out': model[layer_name].attrs['n_out'],
                   'dropout': model[layer_name].attrs['dropout'] if train_flag else 0.0,
                   'name': layer_name,
                   'mask': mask,
                   'train_flag' : train_flag,
                   "eval_flag": eval_flag,
                   'network': network,
                   'index' : index }
        try:
          act = model[layer_name].attrs['activation']
          params["activation"] = act
        except Exception:
          pass
        params['y_in'] = network.y
        layer_class = get_layer_class(cl)
        for p in collect_class_init_kwargs(layer_class):
          if p in params: continue  # don't overwrite existing
          if p in model[layer_name].attrs.keys():
            params[p] = model[layer_name].attrs[p]
        if 'encoder' in model[layer_name].attrs:
          params['encoder'] = encoder #network.hidden[model[layer_name].attrs['encoder']] if model[layer_name].attrs['encoder'] in network.hidden else network.output[model[layer_name].attrs['encoder']]
        if 'base' in model[layer_name].attrs:
          params['base'] = base

        if 'target' in model[layer_name].attrs:
          params['target'] = model[layer_name].attrs['target']
        if layer_class.recurrent:
          network.recurrent = True
        return network.add_layer(layer_class(**params)).index
Пример #4
0
 def __init__(self, n_in=None, n_out=None,
              base_network=None, data_map=None, data_map_i=None,
              shared_params_network=None,
              mask=None, sparse_input=False, target='classes', train_flag=False, eval_flag=False):
   """
   :param int n_in: input dim of the network
   :param dict[str,(int,int)] n_out: output dim of the network.
     first int is num classes, second int is 1 if it is sparse, i.e. we will get the indices.
   :param dict[str,theano.Variable] data_map: if specified, this will be used for x/y (and it expects data_map_i)
   :param dict[str,theano.Variable] data_map_i: if specified, this will be used for i/j
   :param LayerNetwork|None base_network: optional base network where we will derive x/y/i/j/n_in/n_out from.
     data_map will have precedence over base_network.
   :param LayerNetwork|()->LayerNetwork|None shared_params_network: optional network where we will share params with.
     we will error if there is a param which cannot be shared.
   :param str mask: e.g. "unity" or None ("dropout")
   :param bool sparse_input: for SourceLayer
   :param str target: default target
   :param bool train_flag: marks that we are used for training
   :param bool eval_flag: marks that we are used for evaluation
   """
   if n_out is None:
     assert base_network is not None
     n_out = base_network.n_out
   else:
     assert n_out is not None
     n_out = n_out.copy()
   if n_in is None:
     assert "data" in n_out
     n_in = n_out["data"][0]
   if "data" not in n_out:
     data_dim = 3
     n_out["data"] = (n_in, data_dim - 1)  # small hack: support input-data as target
   else:
     assert 1 <= n_out["data"][1] <= 2  # maybe obsolete check...
     data_dim = n_out["data"][1] + 1  # one more because of batch-dim
   if data_map is not None:
     assert data_map_i is not None
     self.y = data_map
     self.x = data_map["data"]
     self.j = data_map_i
     self.i = data_map_i["data"]
   elif base_network is not None:
     self.x = base_network.x
     self.y = base_network.y
     self.i = base_network.i
     self.j = base_network.j
   else:
     dtype = "float32" if data_dim >= 3 else "int32"
     self.x = T.TensorType(dtype, ((False,) * data_dim))('x')
     self.y = {"data": self.x}
     self.i = T.bmatrix('i'); """ :type: theano.Variable """
     self.j = {"data": self.i}
   if base_network is not None:
     self.epoch = base_network.epoch
     self.tags  = base_network.tags
   else:
     self.epoch = T.constant(0, name="epoch", dtype="int32")
     self.tags  = T.bmatrix('tags')
   self.constraints = {}
   self.total_constraints = T.constant(0)
   Layer.initialize_rng()
   self.n_in = n_in
   self.n_out = n_out
   self.hidden = {}; """ :type: dict[str,ForwardLayer|RecurrentLayer] """
   self.train_params_vars = []; """ :type: list[theano.compile.sharedvalue.SharedVariable] """
   self.description = None; """ :type: LayerNetworkDescription | None """
   self.train_param_args = None; """ :type: dict[str] """
   self.recurrent = False  # any of the from_...() functions will set this
   self.default_mask = mask
   self.sparse_input = sparse_input
   self.default_target = target
   self.train_flag = train_flag
   self.eval_flag = eval_flag
   self.output = {}; " :type: dict[str,FramewiseOutputLayer] "
   self.known_grads = {}; " :type: dict[theano.Variable,theano.Variable]"
   self.json_content = "{}"
   self.costs = {}
   self.total_cost = T.constant(0)
   self.update_step = 0
   self.errors = {}
   self.loss = None
   self.ctc_priors = None
   self.calc_step_base = None
   self.calc_steps = []
   self.base_network = base_network
   self.shared_params_network = shared_params_network
Пример #5
0
 def __init__(self,
              n_in=None,
              n_out=None,
              base_network=None,
              data_map=None,
              data_map_i=None,
              shared_params_network=None,
              mask=None,
              sparse_input=False,
              target='classes',
              train_flag=False,
              eval_flag=False):
     """
 :param int n_in: input dim of the network
 :param dict[str,(int,int)] n_out: output dim of the network.
   first int is num classes, second int is 1 if it is sparse, i.e. we will get the indices.
 :param dict[str,theano.Variable] data_map: if specified, this will be used for x/y (and it expects data_map_i)
 :param dict[str,theano.Variable] data_map_i: if specified, this will be used for i/j
 :param LayerNetwork|None base_network: optional base network where we will derive x/y/i/j/n_in/n_out from.
   data_map will have precedence over base_network.
 :param LayerNetwork|()->LayerNetwork|None shared_params_network: optional network where we will share params with.
   we will error if there is a param which cannot be shared.
 :param str mask: e.g. "unity" or None ("dropout")
 :param bool sparse_input: for SourceLayer
 :param str target: default target
 :param bool train_flag: marks that we are used for training
 :param bool eval_flag: marks that we are used for evaluation
 """
     if n_out is None:
         assert base_network is not None
         n_out = base_network.n_out
     else:
         assert n_out is not None
         n_out = n_out.copy()
     if n_in is None:
         assert "data" in n_out
         n_in = n_out["data"][0]
     if "data" not in n_out:
         data_dim = 3
         n_out["data"] = (n_in, data_dim - 1
                          )  # small hack: support input-data as target
     else:
         assert 1 <= n_out["data"][1] <= 2  # maybe obsolete check...
         data_dim = n_out["data"][1] + 1  # one more because of batch-dim
     if data_map is not None:
         assert data_map_i is not None
         self.y = data_map
         self.x = data_map["data"]
         self.j = data_map_i
         self.i = data_map_i["data"]
     elif base_network is not None:
         self.x = base_network.x
         self.y = base_network.y
         self.i = base_network.i
         self.j = base_network.j
     else:
         dtype = "float32" if data_dim >= 3 else "int32"
         self.x = T.TensorType(dtype, ((False, ) * data_dim))('x')
         self.y = {"data": self.x}
         self.i = T.bmatrix('i')
         """ :type: theano.Variable """
         self.j = {"data": self.i}
     if base_network is not None:
         self.epoch = base_network.epoch
         self.tags = base_network.tags
     else:
         self.epoch = T.constant(0, name="epoch", dtype="int32")
         self.tags = T.bmatrix('tags')
     self.constraints = {}
     self.total_constraints = T.constant(0)
     Layer.initialize_rng()
     self.n_in = n_in
     self.n_out = n_out
     self.hidden = {}
     """ :type: dict[str,ForwardLayer|RecurrentLayer] """
     self.train_params_vars = []
     """ :type: list[theano.compile.sharedvalue.SharedVariable] """
     self.description = None
     """ :type: LayerNetworkDescription | None """
     self.train_param_args = None
     """ :type: dict[str] """
     self.recurrent = False  # any of the from_...() functions will set this
     self.default_mask = mask
     self.sparse_input = sparse_input
     self.default_target = target
     self.train_flag = train_flag
     self.eval_flag = eval_flag
     self.output = {}
     " :type: dict[str,FramewiseOutputLayer] "
     self.known_grads = {}
     " :type: dict[theano.Variable,theano.Variable]"
     self.json_content = "{}"
     self.costs = {}
     self.total_cost = T.constant(0)
     self.objective = None
     self.update_step = 0
     self.errors = {}
     self.loss = None
     self.ctc_priors = None
     self.calc_step_base = None
     self.calc_steps = []
     self.base_network = base_network
     self.shared_params_network = shared_params_network