def train(args): dropout_rate = 0.5 train_X, train_y = read_data(data_img, data_prepro, args.data_limit) model = get_model(dropout_rate, args.weight_path, args.weights_load) checkpointer = ModelCheckpoint(filepath=ckpt_model_weights_filename,verbose=1) model.fit(train_X, train_y, epochs=args.epochs, batch_size=args.batch_size, callbacks=[checkpointer], shuffle="batch") if not os.path.exists(args.base_path + "/data/model"): os.makedirs(args.base_path + "/data/model") model.save_weights(save_dest, overwrite=True)
# Task1 from dataloader import gen_list, shuffle_split, read_data from Unet import get_UNet from plot import plot_learning_curve from metrics import dice_coef_loss, dice_coef # Read the data path = '/Lab1/Lab3/X_ray/' img_h, img_w = 256, 256 Mask = gen_list(path, 'Mask') Img = gen_list(path,'Image') Mask_train, Mask_val, Img_train, Img_val = shuffle_split(Mask, Img, 0.8) Mask_train = read_data(path+'Mask/', Mask_train, img_h, img_w) Mask_val = read_data(path+'Mask/', Mask_val, img_h, img_w) Img_train = read_data(path+'Image/', Img_train, img_h, img_w) Img_val = read_data(path+'Image/', Img_val, img_h, img_w) # Train the model model = get_UNet(img_shape=(256,256,1), Base=16, depth=4, inc_rate=2, activation='relu', drop=0.5, batchnorm=True) model.compile(optimizer=Adam(lr=0.0001), loss='binary_crossentropy', metrics=[dice_coef]) History = model.fit(Img_train, Mask_train, batch_size=8, epochs=150, verbose=2, validation_data=(Img_val, Mask_val)) # Plot the learning curve plot_learning_curve(History, 'Task1a') # Train the model
for x in range(values.shape[0]): child = create_node(dict[values[x]], metadata) node.children.append((values[x], child)) return node def empty(size): s = "" for x in range(size): s += " " return s def print_tree(node, level): if node.answer != "": print empty(level), node.answer return print empty(level), node.attribute for value, n in node.children: print empty(level + 1), value print_tree(n, level + 2) metadata, traindata = read_data("input1.csv") data = np.array(traindata) node = create_node(data, metadata) print_tree(node, 0)
use_cuda = True if use_cuda: cuda.empty_cache() """ training mode""" results = [] f = 3 model = CDKT() if use_cuda: model = model.cuda() optimizer = optim.Adam(model.parameters(),5*1e-4) DL = DataLoader(read_data(f'/data/train.{f}.dat'),load_init()) for r in range(10): # 20-epochs i = 0 for x,y in DL.samples(72): X = tensor(x) Y = tensor(y) if use_cuda: X = X.cuda() Y = Y.cuda() loss = model.forward(X,Y,True) optimizer.zero_grad() clip_grad_value_(model.parameters(),10) loss.backward() optimizer.step()
# -*- coding: utf-8 -*- from torch import tensor from torch import cuda from torch import optim from torch.nn.utils import clip_grad_value_ from model import Model from numpy import argmax, concatenate from dataloader import read_data, DataLoader from sklearn.model_selection import train_test_split from sklearn.metrics import roc_curve, auc, accuracy_score use_cuda = True cuda.empty_cache() data = read_data('../classic_kt.dat') train_data, test_data = train_test_split(data, test_size=.2) model = Model(13, 64) if use_cuda: model = model.cuda() optimizer = optim.Adam(model.parameters(), 5e-4) dl_train = DataLoader(train_data) dl_test = DataLoader(test_data) for r in range(10): # 10-epochs i = -1 print('training:') for x, y in dl_train.sampling(72): i += 1 if use_cuda: loss = model.forward(tensor(x).cuda(), tensor(y).cuda(), True)
metadata = np.delete(metadata, split, 0) items, dict = subtables(data, split, delete=True) for x in range(items.shape[0]): child = create_node(dict[items[x]], metadata) node.children.append((items[x], child)) return node def empty(size): s = "" for x in range(size): s += " " return s def print_tree(node, level): if node.answer != "": print(empty(level), node.answer) return print(empty(level), node.attribute) for value, n in node.children: print(empty(level + 1), value) print_tree(n, level + 2) metadata, traindata = read_data("tennis.csv") data = np.array(traindata) node = create_node(data, metadata) print_tree(node, 0)