示例#1
0
文件: model.py 项目: wangmengzhi/CAT
 def __init__(self, input_dim, output_dim, half_context=1):
     super(TDNN, self).__init__()
     self.input_dim = input_dim
     self.output_dim = output_dim
     self.half_context = half_context
     self.conv = torch.nn.Conv1d(self.input_dim, self.output_dim, 2*half_context+1, padding=half_context)
     self.bn = bns.BatchnormSync(self.output_dim, eps=1e-5, affine=True)
示例#2
0
文件: model.py 项目: wangmengzhi/CAT
 def __init__(self, idim,  hdim, n_layers, dropout):
     super(TDNN_LSTM, self).__init__()
     setattr(self, "tdnn0" , TDNN(idim, hdim))
     for i in six.moves.range(n_layers):
         setattr(self, "tdnn%d-1" % i, TDNN(hdim, hdim))
         setattr(self, "tdnn%d-2" % i, TDNN(hdim, hdim))
         setattr(self, "lstm%d" % i, torch.nn.LSTM(hdim,hdim, num_layers=1, bidirectional=False, batch_first=True))
         setattr(self, "bn%d" % i, bns.BatchnormSync(hdim, eps=1e-5, affine=True))
         setattr(self, "dropout%d" % i, torch.nn.Dropout(dropout))
     self.n_layers = n_layers
示例#3
0
文件: model.py 项目: alex-ht/CAT
 def __init__(self, idim, hdim, n_layers, dropout):
     super(BLSTMN, self).__init__()
     for i in six.moves.range(n_layers):
         if i == 0:
             inputdim = idim
         else:
             inputdim = hdim * 2
         setattr(self, "lstm%d" % i, torch.nn.LSTM(inputdim, hdim,num_layers=1, bidirectional=True, batch_first=True))
         setattr(self, "bn%d" % i, bns.BatchnormSync(hdim*2, eps=1e-5, affine=True))
         setattr(self, "dropout%d" % i, torch.nn.Dropout(dropout))
     self.n_layers = n_layers
     self.hdim = hdim