class Hyperparamters:
    # Train parameters
    num_train_epochs = 20
    print_step = 100
    batch_size = 8#64           
    summary_step = 10
    num_saved_per_epoch = 3
    max_to_keep = 100
    logdir = 'logdir/CML_Denses'
    file_save_model = 'model/CML_Denses'
    inference_model = 'CML_Denses'
    
    # Train/Test data
    data_dir = os.path.join(pwd,'data')
    train_data = 'train_onehot.csv'
    test_data = 'test_onehot.csv'    
    
    
    # Load vocabulcary dict
    dict_id2label,dict_label2id = load_vocabulary(os.path.join(pwd,'data','vocabulary_label.txt') )
    label_vocabulary = list(dict_id2label.values())

    # Optimization parameters
    warmup_proportion = 0.1    
    use_tpu = None
    do_lower_case = True    
    learning_rate = 5e-5     

    
    # Sequence and Label
    sequence_length = 60
    num_labels = len(list(dict_id2label))    
        
    # ALBERT
    model = 'albert_small_zh_google'
    bert_path = os.path.join(pwd,model)
    vocab_file = os.path.join(pwd,model,'vocab_chinese.txt')
    init_checkpoint = os.path.join(pwd,model,'albert_model.ckpt')
    saved_model_path = os.path.join(pwd,'model')    
    
   
    
    
    
    
    
    


    
    
Example #2
0
class Hyperparamters:
    # Train parameters
    num_train_epochs = 400
    print_step = 100
    batch_size = 64
    summary_step = 10
    num_saved_per_epoch = 1
    max_to_keep = 100
    logdir = 'logdir/model_01'
    file_save_model = 'model/model_01'

    # Train/Test data
    data_dir = os.path.join(pwd, 'data')
    train_data = 'train_onehot.csv'
    test_data = 'test_onehot.csv'

    # Predict model file
    file_model = 'model/saved_01'

    # Load vocabulcary dict
    dict_id2label, dict_label2id = load_vocabulary(
        os.path.join(pwd, 'data', 'vocabulary_label.txt'))
    print(dict_id2label)
    label_vocabulary = list(dict_id2label.values())
    print(dict_label2id)

    # Optimization parameters
    warmup_proportion = 0.1
    use_tpu = None
    do_lower_case = True
    learning_rate = 5e-5

    # TextCNN parameters
    num_filters = 128
    filter_sizes = [2, 3, 4, 5, 6, 7]
    embedding_size = 384
    keep_prob = 0.5

    # Sequence and Label
    sequence_length = 60
    num_labels = len(list(dict_id2label))

    # ALBERT
    model = 'albert_small_zh_google'
    bert_path = os.path.join(pwd, model)
    vocab_file = os.path.join(pwd, model, 'vocab_chinese.txt')
    init_checkpoint = os.path.join(pwd, model, 'albert_model.ckpt')
    saved_model_path = os.path.join(pwd, 'model')
Example #3
0
class Hyperparamters:
    # Train Parameters
    print_step = 100
    summary_step = 10
    batch_size = 64
    num_saved_per_epoch = 3
    logdir = 'logdir/model_02'

    # Load dict
    dict_id2label, dict_label2id = load_vocabulary(
        os.path.join(pwd, 'data', 'vocabulary_label.txt'))
    label_vocabulary = list(dict_id2label.values())

    # Optimization parameters
    use_tpu = None
    num_train_epochs = 20
    warmup_proportion = 0.1
    do_lower_case = True
    learning_rate = 5e-5

    # TextCNN parameters
    num_filters = 128
    filter_sizes = [2, 3, 4, 5, 6, 7]
    embedding_size = 384
    keep_prob = 0.5

    # Sequence and Label
    sequence_length = 60
    num_labels = len(list(dict_id2label))

    # BERT model
    model = 'albert_small_zh_google'
    bert_path = os.path.join(pwd, model)
    data_dir = os.path.join(pwd, 'data')
    vocab_file = os.path.join(pwd, model, 'vocab_chinese.txt')
    init_checkpoint = os.path.join(pwd, model, 'albert_model.ckpt')
    saved_model_path = os.path.join(pwd, 'model')