def __init__(self, input_size, embed_dim, hidden_size, n_layers=1, dropout_r=0.1, cell_type='TLSTM', bii=False, time=True, preTrainEmb=''): EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers, dropout_r, cell_type, time, preTrainEmb) EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers=n_layers, dropout_r=dropout_r, cell_type=cell_type, bii=False, time=True, preTrainEmb=preTrainEmb, packPadMode=False) if self.cell_type != 'TLSTM' or self.bi != 1: print( "TLSTM only supports Time aware LSTM cell type and 1 direction. Implementing corrected parameters instead" ) self.cell_type = 'TLSTM' self.bi = 1 #enforcing 1 directional self.packPadMode = False
def __init__(self, input_size, embed_dim, hidden_size, n_layers=1, dropout_r=0.1, cell_type='QRNN', bii=False, time=False, preTrainEmb='', packPadMode=False): EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers=n_layers, dropout_r=dropout_r, cell_type=cell_type, bii=bii, time=time, preTrainEmb=preTrainEmb, packPadMode=packPadMode) #super(EHR_QRNN, self).__init__() #basically, we dont allow cell_type and bii choices #let's enfroce these: if (self.cell_type != 'QRNN' or self.bi != 1): print( 'QRNN only supports 1-direction & QRNN cell_type implementation. Implementing corrected parameters instead' ) self.cell_type = 'QRNN' self.bi = 1 #enforcing 1 directional self.packPadMode = False #enforcing correct packpaddedmode
def __init__(self, input_size, embed_dim, time=False, cell_type='LR', preTrainEmb=''): EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size=embed_dim)
def __init__(self, input_size, embed_dim, hidden_size, n_layers, dropout_r=0.1, cell_type='GRU', bii=False, time=False, preTrainEmb='', packPadMode=False): EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers=n_layers, dropout_r=dropout_r, cell_type=cell_type, bii=False, time=time, preTrainEmb=preTrainEmb, packPadMode=False) #super(DRNN, self).__init__() #The additional parameters that norma RNNs don't have self.dilations = [2**i for i in range(n_layers)] self.layers = nn.ModuleList([]) if self.bi == 2: print( 'DRNN only supports 1-direction, implementing 1-direction instead' ) self.bi = 1 #Enforcing 1-directional self.packPadMode = False #Enforcing no packpadded indicator for i in range(n_layers): if i == 0: c = self.cell(self.in_size, self.hidden_size, dropout=self.dropout_r) else: c = self.cell(self.hidden_size, self.hidden_size, dropout=self.dropout_r) self.layers.append(c) self.cells = nn.Sequential(*self.layers)
def __init__(self, input_size, embed_dim, hidden_size, n_layers): EHREmbeddings.__init__(self, input_size=input_size, embed_dim=embed_dim, hidden_size=hidden_size) self.embed_dim = embed_dim self.RNN1 = nn.RNN(embed_dim, hidden_size, 1, batch_first=True, bidirectional=True) self.RNN2 = nn.RNN(embed_dim, hidden_size, 1, batch_first=True, bidirectional=True) self.wa = nn.Linear(hidden_size * 2, 1, bias=False) self.Wb = nn.Linear(hidden_size * 2, hidden_size, bias=False) self.W_out = nn.Linear(hidden_size, n_layers, bias=False) self.sigmoid = nn.Sigmoid()
def __init__(self, input_size, embed_dim, hidden_size, n_layers=1, dropout_r=0.1, cell_type='GRU', bii=False, time=False, preTrainEmb='', packPadMode=True): EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers=n_layers, dropout_r=dropout_r, cell_type=cell_type, bii=bii, time=time, preTrainEmb=preTrainEmb, packPadMode=packPadMode)
def __init__(self, input_size, embed_dim, hidden_size, n_layers=1, dropout_r=0.1, cell_type='TLSTM', bii=False, time=True, preTrainEmb=''): EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers, dropout_r, cell_type, time, preTrainEmb) EHREmbeddings.__init__(self, input_size, embed_dim, hidden_size, n_layers=n_layers, dropout_r=dropout_r, cell_type=cell_type, bii=False, time=True, preTrainEmb=preTrainEmb, packPadMode=False) #test on EHR_TLSTM() parameters please #modify something here to make sure everything runs correctly '''ask laila if i Implemented the right model parameters regarding, time, bii, and pretrained, ''' if self.cell_type != 'TLSTM' or self.bi != 1: print( "TLSTM only supports Time aware LSTM cell type and 1 direction. Implementing corrected parameters instead" ) self.cell_type = 'TLSTM' self.bi = 1 #enforcing 1 directional self.packPadMode = False