def do_classification(feature_data, predict, params): length = params[0]['max_length'] x, m = batch.make_batch(feature_data, length, length / 2) #decision = predict(np.expand_dims(feature_data,axis=0).astype('float32'), np.ones(shape=(1,feature_data.shape[0]))) decision = predict(x, m) pred_label = np.argmax(np.sum(decision, axis=0), axis=-1) return batch.labels[pred_label]
def train(args, data, model, criterion, optimizer): [ prot_fea_list, drug_node_list, drug_edge_list, drug_n2n_list, drug_e2n_list, label_list ] = data prot_data = [prot_fea_list] drug_data = [drug_node_list, drug_edge_list, drug_n2n_list, drug_e2n_list] label_data = [label_list] n_data = len(label_list) n_step = int(n_data / args.n_batch) + 1 batch_idx_list = split(list(range(n_data)), n_step, shuffle=True) total_loss = 0 model.train() for i, batch_idx in enumerate(batch_idx_list): batch_data = batch.make_batch(args, data=[prot_data, drug_data, label_data], idx=batch_idx) batch_prot_data, batch_drug_data, batch_label_data = batch_data optimizer.zero_grad() pred = model(batch_prot_data, batch_drug_data) loss = criterion(pred.squeeze(), batch_label_data) loss.backward() optimizer.step() total_loss += loss.data return total_loss / n_step
def do_classification(feature_data, predict, params): length = params[0]['max_length'] x, m = batch.make_batch(feature_data,length,length/2) #decision = predict(np.expand_dims(feature_data,axis=0).astype('float32'), np.ones(shape=(1,feature_data.shape[0]))) decision = predict(x, m) pred_label = np.argmax(np.sum(decision,axis=0), axis = -1) return batch.labels[pred_label]
def do_classification(feature_data, predict, params): length = params[0]['max_length'] x, m = batch.make_batch(feature_data, length, length / 2) x = batch.make_context(x, 15) #decision = predict(np.expand_dims(feature_data,axis=0).astype('float32'), np.ones(shape=(1,feature_data.shape[0]))) decision = predict(x) return decision
def do_classification(feature_data, predict, params): length = params[0]['max_length'] x, m = batch.make_batch(feature_data,length,length/2) x=batch.make_context(x,15) #decision = predict(np.expand_dims(feature_data,axis=0).astype('float32'), np.ones(shape=(1,feature_data.shape[0]))) decision = predict(x) return decision
def do_classification(feature_data, predict, params): ''' input feature_data return classification results ''' x, _ = batch.make_batch(feature_data, 15, 5) decision = predict(x.reshape((x.shape[0], -1))) return decision
def do_classification(feature_data, predict, params): ''' input feature_data return classification results ''' x, _ = batch.make_batch(feature_data,15,5) decision = predict(x.reshape((x.shape[0],-1))) return decision
def test(args, data, model, criterion, val): if val: shuffle = True n_mc_step = 1 else: shuffle = False n_mc_step = args.n_mc_step [ prot_fea_list, drug_node_list, drug_edge_list, drug_n2n_list, drug_e2n_list, label_list ] = data prot_data = [prot_fea_list] drug_data = [drug_node_list, drug_edge_list, drug_n2n_list, drug_e2n_list] label_data = [label_list] n_data = len(label_list) n_step = int(n_data / args.n_batch) + 1 batch_idx_list = split(list(range(n_data)), n_step, shuffle=shuffle) y_pred_list = [] label_list = [] score_list = [] total_loss = 0 model.train() for mc_idx in range(n_mc_step): buf_y_pred_list = [] buf_label_list = [] for i, batch_idx in enumerate(batch_idx_list): batch_data = batch.make_batch( args, data=[prot_data, drug_data, label_data], idx=batch_idx) batch_prot_data, batch_drug_data, batch_label_data = batch_data y_pred = model(batch_prot_data, batch_drug_data) loss = criterion(y_pred.squeeze(), batch_label_data) total_loss += loss.data for yp in y_pred.squeeze().cpu().detach().numpy(): buf_y_pred_list.append(yp) for l in batch_label_data.squeeze().cpu().detach().numpy(): buf_label_list.append(l) y_pred_list.append(buf_y_pred_list) label_list.append(buf_label_list) score_list.append(cal_roc_auc(buf_label_list, buf_y_pred_list)) y_pred_list = np.array(y_pred_list) label_list = np.array(label_list) #print("P = {}".format(model.prot_encoder.prot_fc1.ConDrop.p)) return (total_loss / n_step / n_mc_step, np.mean(score_list), label_list, y_pred_list)
def do_classification(feature_data, predict, params): ''' input feature_data return classification results ''' # ??? #import pdb; pdb.set_trace() x, _ = batch.make_batch(feature_data,params[0]['max_length'],params[0]['max_length']) x = reshape(x) decision = predict(x) return decision
def test(args, data, model, criterion, val): if val: shuffle = True else: shuffle = False [ prot_fea_list, drug_node_list, drug_edge_list, drug_n2n_list, drug_e2n_list, label_list ] = data prot_data = [prot_fea_list] drug_data = [drug_node_list, drug_edge_list, drug_n2n_list, drug_e2n_list] label_data = [label_list] n_data = len(label_list) n_step = int(n_data / args.n_batch) + 1 batch_idx_list = split(list(range(n_data)), n_step, shuffle=shuffle) y_pred_list = [] label_list = [] total_loss = 0 model.eval() for i, batch_idx in enumerate(batch_idx_list): batch_data = batch.make_batch(args, data=[prot_data, drug_data, label_data], idx=batch_idx) batch_prot_data, batch_drug_data, batch_label_data = batch_data y_pred = model(batch_prot_data, batch_drug_data) loss = criterion(y_pred.squeeze(), batch_label_data) total_loss += loss.data for yp in y_pred.squeeze().cpu().detach().numpy(): y_pred_list.append(yp) for l in batch_label_data.squeeze().cpu().detach().numpy(): label_list.append(l) score = cal_roc_auc(label_list, y_pred_list) y_pred_list = np.array(y_pred_list) label_list = np.array(label_list) return (total_loss / n_step, score, label_list, y_pred_list)
output, main = solve_system(system) #postprocess if output is not None: main = postprocess_data(system, output) if main == 1: break else: break if option == 2: #run a batch file systems, type, main = make_batch(system) if main != 1: outputs, main = solve_batch(systems, type) if outputs is not None: main = postprocess_batch(type, systems, outputs) if main == 1: break if option == 3: #Load an existing output file while (1):