Example #1
0
def configure_net(net_type, vocab_size, emb_dim, conv_hidden,
                  lstm_hidden, lstm_layer, bidirectional, use_bert,
                  bert_type, bert_cache, tokenizer_cache, cuda, aux_device, fix_bert):
    assert net_type in ['ff', 'rnn']
    net_args = {}
    net_args['conv_hidden']   = conv_hidden
    net_args['lstm_hidden']   = lstm_hidden
    net_args['lstm_layer']    = lstm_layer
    net_args['bidirectional'] = bidirectional

    if not use_bert:
        net_args['vocab_size']    = vocab_size
        net_args['emb_dim']       = emb_dim

        net = (ExtractSumm(**net_args) if net_type == 'ff'
           else PtrExtractSumm(**net_args))
        
        if cuda:
            net = net.cuda()
    else:
        # bert config
        net_args['bert_type'] = bert_type
        net_args['bert_cache'] = bert_cache
        net_args['tokenizer_cache'] = tokenizer_cache
        net_args['fix_bert'] = fix_bert

        # add aux cuda
        added_net_args = dict(net_args)
        added_net_args['aux_device'] = aux_device
        net = BertPtrExtractSumm(**added_net_args)

    return net, net_args
Example #2
0
def configure_net(net_type,
                  vocab_size,
                  emb_dim,
                  conv_hidden,
                  lstm_hidden,
                  lstm_layer,
                  bidirectional,
                  prev_ckpt=None):
    assert net_type in ['ff', 'rnn', 'trans_rnn']
    net_args = {}
    net_args['vocab_size'] = vocab_size
    net_args['emb_dim'] = emb_dim
    net_args['conv_hidden'] = conv_hidden
    net_args['lstm_hidden'] = lstm_hidden
    net_args['lstm_layer'] = lstm_layer
    net_args['bidirectional'] = bidirectional

    if net_type == 'ff':
        net = ExtractSumm(**net_args)
    elif net_type == 'trans_rnn':
        net = TransExtractSumm(**net_args)
    else:
        net = PtrExtractSumm(**net_args)
    if prev_ckpt is not None:
        ext_ckpt = load_best_ckpt(prev_ckpt)
        net.load_state_dict(ext_ckpt)
    return net, net_args
Example #3
0
def configure_net(net_type, vocab_size, emb_dim, conv_hidden,
                  lstm_hidden, lstm_layer, bidirectional):
    assert net_type in ['ff', 'rnn']
    net_args = {}
    net_args['vocab_size'] = vocab_size
    net_args['emb_dim'] = emb_dim
    net_args['conv_hidden'] = conv_hidden
    net_args['lstm_hidden'] = lstm_hidden
    net_args['lstm_layer'] = lstm_layer
    net_args['bidirectional'] = bidirectional

    net = (ExtractSumm(**net_args) if net_type == 'ff'
           else PtrExtractSumm(**net_args))
    return net, net_args
Example #4
0
def configure_net(net_type, vocab_size, emb_dim, conv_hidden,
                  lstm_hidden, lstm_layer, bidirectional):
    assert net_type in ['ff', 'rnn']
    net_args = {}
    net_args['vocab_size'] = vocab_size
    net_args['emb_dim'] = emb_dim
    net_args['conv_hidden'] = conv_hidden
    net_args['lstm_hidden'] = lstm_hidden
    net_args['lstm_layer'] = lstm_layer
    net_args['bidirectional'] = bidirectional

    net_args['dropoute'] = 0.0  # dropout to remove words from embedding layer (0 = no dropout)
    net_args['dropout'] = 0.2   # dropout applied to other layers (0 = no dropout)
    net_args['wdrop'] = 0.2     # amount of weight dropout to apply to the RNN hidden to hidden matrix
    net_args['dropouth'] = 0.2  # dropout for rnn layers (0 = no dropout)

    net = (ExtractSumm(**net_args) if net_type == 'ff'
           else PtrExtractSumm(**net_args))
    return net, net_args
def configure_net(net_type, vocab_size, emb_dim, conv_hidden,
                  lstm_hidden, lstm_layer, bidirectional, pe, petrainable, stop):
    assert net_type in ['ff', 'rnn', 'nnse']
    net_args = {}
    net_args['vocab_size']    = vocab_size
    net_args['emb_dim']       = emb_dim
    net_args['conv_hidden']   = conv_hidden
    net_args['lstm_hidden']   = lstm_hidden
    net_args['lstm_layer']    = lstm_layer
    net_args['bidirectional'] = bidirectional
    net_args['pe'] = pe # positional encoding
    net_args['petrainable'] = petrainable
    net_args['stop'] = stop

    if net_type in ['ff', 'rnn']:
        net = (ExtractSumm(**net_args) if net_type == 'ff'
           else PtrExtractSumm(**net_args))
    elif net_type == 'nnse':
        net = NNSESumm(**net_args)
    return net, net_args