Example #1
0
def main():
    dataset_train = tf.data.TFRecordDataset(train_file)
    dataset_val = tf.data.TFRecordDataset(val_file)

    dataset_train = dataset_train.map(parsing_record)
    dataset_val = dataset_val.map(parsing_record)

    deepFM = DeepFM(config.data_size["feature_size"],
                    config.data_size["field_size"], config.model_conf)
    deepFM.train(dataset_train,
                 dataset_val=dataset_val,
                 epochs=500,
                 batch_size=128)
Example #2
0
	num_contns= num_contns, 
	k= k, 
	hidden_dims= hidden_dims, 
	dropout= p, 
	n_class= n_class,
	sparse= sparse).to(device)

optimizer = torch.optim.Adam(model.parameters(), lr= lr)
criterion= nn.BCEWithLogitsLoss(pos_weight=torch.tensor(pos_weight, device= device))
print('model created.')
# lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=step_size, gamma= 0.3, verbose= True)

'''training phase'''
for epoch in range(n_epoch):

	model.train()
	train_loss= 0
	train_score= 0
	val_score= 0
	train_preds, train_gts = [], []
	val_preds, val_gts = [], []

	'''train'''
	for i, (X_cat, X_dense, y) in enumerate(tqdm(train_loader)):
		optimizer.zero_grad()
		output= model(X_cat.to(device), X_dense.to(device))
		loss= criterion(output, y.unsqueeze(dim= 1).to(device))
		loss.backward()
		optimizer.step()
		train_loss += loss.item()
		output_boolean = nn.Sigmoid()(output.squeeze(dim= 1))>=threshold