Exemple #1
0
trans_test_data, trans_test_label, _, _, _, _ = read_csv(
    trans_test_path,
    split_ratio=split_ratio,
    header=True,
    ignore_cols=["POL_ID", "DATA_MONTH", "TB_POL_BILL_MODE_CD", "MI"],
    output_label="Lapse_Flag")

print(trans_train_data[0])

print("Train Data Size - ", len(trans_train_data))
print("Test Data Size - ", len(trans_test_data))

print("Splitting the data...")

# train_x = divide_batches_gen(trans_train_data, batch_size)
train_y = divide_batches(trans_train_label, batch_size)

# test_x = divide_batches_gen(trans_test_data, batch_size)
test_y = divide_batches(trans_test_label, batch_size)

train_batch_size = len(train_y)
test_batch_size = len(test_y)

logdir = "../tensorboard/transaction_model/" + datetime.datetime.now(
).strftime("%Y%m%d-%H%M%S")

saved_model_dir = "../maxlife_models/"
if not os.path.isdir(saved_model_dir):
    os.mkdir(saved_model_dir)

saved_model = saved_model_dir + model_name
Exemple #2
0
print("lstm data")
print(lstm_train_data[0])
print(len(lstm_train_data[0]))

# pos_weight = len(ffn_train_label) / sum(ffn_train_label)

pos_weight = np.count_nonzero(ffn_train_label == 0) / np.count_nonzero(
    ffn_train_label == 1)

print("Train Data Size - ", len(ffn_train_data))

print("Creating batches...")

# train_x = divide_batches_gen(ffn_train_data, batch_size)
train_y = divide_batches(ffn_train_label, batch_size)

train_batch_size = len(train_y)

saved_model_dir = "../maxlife_models/"
if not os.path.isdir(saved_model_dir):
    os.mkdir(saved_model_dir)

saved_model = saved_model_dir + model_name
ckpt = tf.train.latest_checkpoint(saved_model)
filename = ".".join([ckpt, 'meta'])
model_saver = tf.train.import_meta_graph(filename, clear_devices=True)

with tf.device("/GPU:0"):
    with tf.Session() as sess:
        # sess.run(init)
Exemple #3
0
split_ratio = [100, 0, 0]

print("Reading the data...")
inference_data, inference_label, _, _, _, _ = read_csv(
    infer_path,
    split_ratio=split_ratio,
    header=True,
    ignore_cols=["POL_ID", "DATA_MONTH"],
    output_label="Lapse_Flag")

print(inference_data[0])

print("Infer Data Size - ", len(inference_data))

print("Splitting the data...")
infer_y = divide_batches(inference_label, batch_size)

infer_batch_size = len(infer_y)

saved_model_dir = "../maxlife_models/"
if not os.path.isdir(saved_model_dir):
    os.mkdir(saved_model_dir)

saved_model = saved_model_dir + model_name
ckpt = tf.train.latest_checkpoint(saved_model)
filename = ".".join([ckpt, 'meta'])
model_saver = tf.train.import_meta_graph(filename, clear_devices=True)

with tf.device("/GPU:0"):
    with tf.Session() as sess:
        model_saver.restore(sess, ckpt)