"If u want to accelerate this process, please see read_me -> transform_data_to_feature_and_dump" ) accu_dict, reverse_accu_dict = generator.read_accu() word_dict, embedding, reverse_dictionary = generator.get_dictionary_and_embedding( ) train_data_x, train_data_y = generator.read_data_in_accu_format( constant.DATA_TRAIN, embedding, word_dict, accu_dict, one_hot=True) valid_data_x, valid_data_y = generator.read_data_in_accu_format( constant.DATA_VALID, embedding, word_dict, accu_dict, one_hot=True) test_data_x, test_data_y = generator.read_data_in_accu_format( constant.DATA_TEST, embedding, word_dict, accu_dict, one_hot=True) print("reading complete!") # just test generate_accu_batch x, y = generator.generate_batch(training_batch_size, train_data_x, train_data_y) print(x.shape) print("data load complete") print("The model begin here") print(len(train_data_y[0])) model = AccusationNN() # run part with model.graph.as_default(): with tf.Session() as sess: # 初始化变量 sess.run(tf.global_variables_initializer()) # 保存参数所用的保存器 saver = tf.train.Saver(max_to_keep=1)
"No dump file read original file! Please wait... " "If u want to accelerate this process, please see read_me -> transform_data_to_feature_and_dump" ) accu_dict, reverse_accu_dict = generator.read_accu() word_dict, embedding, reverse_dictionary = generator.get_dictionary_and_embedding( ) train_data_x, train_data_y = generator.read_data_in_accu_format( constant.DATA_TRAIN, embedding, word_dict, accu_dict, one_hot=True) valid_data_x, valid_data_y = generator.read_data_in_accu_format( constant.DATA_VALID, embedding, word_dict, accu_dict, one_hot=True) print("reading complete!") # just test generate_accu_batch x, y = generator.generate_batch(training_batch_size, train_data_x, train_data_y) print(x.shape) print("data load complete") print("The model begin here") print(len(train_data_y[0])) model = AccusationNN() # run part with model.graph.as_default(): with tf.Session() as sess: # 初始化变量 sess.run(tf.global_variables_initializer()) # 保存参数所用的保存器 saver = tf.train.Saver(max_to_keep=1)
"No dump file read original file! Please wait... " "If u want to accelerate this process, please see read_me -> transform_data_to_feature_and_dump" ) accu_dict, reverse_accu_dict = generator.read_accu() word_dict, embedding, reverse_dictionary = generator.get_dictionary_and_embedding( ) train_data_x, train_data_y = generator.read_data_in_accu_format( constant.DATA_TRAIN, embedding, word_dict, accu_dict, one_hot=False) valid_data_x, valid_data_y = generator.read_data_in_accu_format( constant.DATA_VALID, embedding, word_dict, accu_dict, one_hot=False) print("reading complete!") # just test generate_accu_batch train_data_x_for_validate, train_data_y_for_validate = generator.generate_batch( valid_batch_size, train_data_x, train_data_y) print("data load complete") print("The model begin here") clf = xgb.XGBClassifier(learning_rate=0.05, objective='multi:softmax', n_estimators=100, max_depth=4, reg_alpha=0.2, min_child_weight=3) print(valid_data_y.shape) # try to load model # try: # boost = xgb.Booster()