예제 #1
0
 def __init__(self,
              input_size=11,
              model_type='LSTM',
              activation='ReLU',
              bidirectional=False,
              hidden_size=64,
              num_layers=1,
              linear_layers=None,
              dropout_rate=0.5,
              use_cuda=True,
              cuda_num='cuda:0'):
     super(Custom_RNN, self).__init__()
     self.use_cuda = use_cuda
     self.cuda_num = cuda_num
     self.num_layers = num_layers
     self.hidden_size = hidden_size
     self.linear_layers = linear_layers
     self.linear_layers.insert(0, hidden_size)
     self.rnn_layer01 = model_config.set_recurrent_layer(
         name=model_type,
         input_size=input_size,
         bidirectional=bidirectional,
         hidden_size=hidden_size,
         num_layers=num_layers)
     self.linear_stack = nn.Sequential()
     self.linear_stack = model_config.build_linear_layer(
         layer=self.linear_stack,
         linear_layers=self.linear_layers,
         activation=activation,
         dropout_rate=dropout_rate)
예제 #2
0
 def __init__(self,
              input_size=11,
              recurrent_model='LSTM',
              activation='PReLU',
              bidirectional=False,
              recurrent_hidden_size=64,
              recurrent_num_layers=1,
              linear=3,
              cuda=True):
     super(VanillaRecurrentNetwork, self).__init__()
     self.activation = activation
     self.hidden_size = recurrent_hidden_size
     self.num_layers = recurrent_num_layers
     self.cuda = cuda
     self.recurrent01 = model_config.set_recurrent_layer(
         name=recurrent_model,
         input_size=input_size,
         hidden_size=self.hidden_size,
         num_layers=self.num_layers,
         batch_first=True,
         bidirectional=bidirectional)
     if linear == 3:
         self.linear_relu_stack = nn.Sequential(
             nn.Linear(self.hidden_size, 64), nn.Dropout(0.3),
             model_config.set_activation(self.activation),
             nn.Linear(64, 64), nn.Dropout(0.3),
             model_config.set_activation(self.activation), nn.Linear(64, 1))
     elif linear == 4:
         self.linear_relu_stack = nn.Sequential(
             nn.Linear(self.hidden_size, 32), nn.Dropout(0.3),
             model_config.set_activation(self.activation),
             nn.Linear(32, 64), nn.Dropout(0.3),
             model_config.set_activation(self.activation),
             nn.Linear(64, 32), nn.Dropout(0.3),
             model_config.set_activation(self.activation), nn.Linear(32, 1))
     elif linear == 5:
         self.linear_relu_stack = nn.Sequential(
             nn.Linear(self.hidden_size, 32), nn.Dropout(0.3),
             model_config.set_activation(self.activation),
             nn.Linear(32, 64), nn.Dropout(0.3),
             model_config.set_activation(self.activation),
             nn.Linear(64, 64), nn.Dropout(0.3),
             model_config.set_activation(self.activation),
             nn.Linear(64, 32), nn.Dropout(0.3),
             model_config.set_activation(self.activation), nn.Linear(32, 1))
예제 #3
0
 def __init__(self,
              input_size=11,
              model_type='LSTM',
              activation='ReLU',
              bidirectional=False,
              hidden_size=64,
              num_layers=1,
              linear_layers=None,
              dropout_rate=0.5,
              use_cuda=True,
              convolution_layer=3,
              use_batch_norm=True,
              cuda_num='cuda:0'):
     super(Custom_CRNN, self).__init__()
     # convolution layer를 더 쌓는게 맞을지 고민해봐야 할것 같음
     # nn.Conv1d(input_channel, output_channel, kernel_size)\
     self.num_layers = num_layers
     self.hidden_size = hidden_size
     self.linear_layers = linear_layers
     self.linear_layers.insert(0, hidden_size)
     self.use_cuda = use_cuda
     self.cuda_num = cuda_num
     self.conv1d_stack = nn.Sequential()
     self.conv1d_stack = model_config.build_conv1d_layer(
         self.conv1d_stack,
         convolution_layers=convolution_layer,
         input_size=input_size)
     self.rnn_layer01 = model_config.set_recurrent_layer(
         name=model_type,
         input_size=input_size,
         hidden_size=hidden_size,
         num_layers=num_layers,
         batch_first=True,
         bidirectional=bidirectional)
     self.linear_stack = nn.Sequential()
     self.linear_stack = model_config.build_linear_layer(
         layer=self.linear_stack,
         linear_layers=self.linear_layers,
         activation=activation,
         dropout_rate=dropout_rate,
         use_batch_norm=use_batch_norm)
예제 #4
0
 def __init__(self,
              input_size=11,
              recurrent_model='LSTM',
              activation='PReLU',
              bidirectional=False,
              recurrent_num_layeres=1,
              recurrent_hidden_size=256):
     super(VanillaCRNNNetwork, self).__init__()
     # convolution
     self.num_layers = recurrent_num_layeres
     self.hidden_size = recurrent_hidden_size
     self.activation = activation
     self.conv1d_layer = nn.Conv1d(input_size, input_size, 3)
     self.lstm_layer = model_config.set_recurrent_layer(
         name=recurrent_model,
         input_size=input_size,
         hidden_size=self.hidden_size,
         num_layers=self.num_layers,
         batch_first=True,
         bidirectional=bidirectional)
     self.linear_layer1 = nn.Linear(256, 64)
     self.dropout = nn.Dropout(0.3)
     self.activation = model_config.set_activation(self.activation)
     self.linear_layer2 = nn.Linear(64, 1)