def experiment_anomaly_segmentation(train, test, comb, num_train, anom_prob, labels): # transductive train/pred for structured anomaly detection sad = StructuredOCSVM(comb, C=1.0/(num_train*0.5)) (lsol, lats, thres) = sad.train_dc(max_iter=40) (cont, cont_exm) = test.evaluate(lats[num_train:]) # train structured svm ssvm = SSVM(train) (sol, slacks) = ssvm.train() (vals, preds) = ssvm.apply(test) (base_cont, base_cont_exm) = test.evaluate(preds) return (cont, base_cont)
def experiment_anomaly_segmentation(train, test, comb, num_train, anom_prob, labels): # transductive train/pred for structured anomaly detection sad = StructuredOCSVM(comb, C=1.0 / (num_train * 0.5)) (lsol, lats, thres) = sad.train_dc(max_iter=40) (cont, cont_exm) = test.evaluate(lats[num_train:]) # train structured svm ssvm = SSVM(train) (sol, slacks) = ssvm.train() (vals, preds) = ssvm.apply(test) (base_cont, base_cont_exm) = test.evaluate(preds) return (cont, base_cont)
def perf_ssvm(test_inds, marker, train, test): # SAD annotation print('(a) Setup SSVM...') ssvm = SSVM(train, C=10.0) print('(b) Train SSVM...') (lsol, slacks) = ssvm.train() print('(c) Evaluate SSVM...') (scores, lats) = ssvm.apply(test) (err, err_exm) = test.evaluate(lats) res = (err['fscore'], err['precision'], err['sensitivity'], err['specificity']) (fpr, tpr, thres) = metric.roc_curve(co.matrix(marker)[test_inds], -scores) auc = metric.auc(fpr, tpr) print('(d) Return AUC={0}...'.format(auc)) print res return auc, res
if keys_cv[j][0] not in poi_set: good_partition = False break if good_partition is True: poi_list = sorted(poi_set) break # train ssvm = SSVM(inference_train=inference_method, inference_pred=inference_method, dat_obj=dat_obj, share_params=SSVM_SHARE_PARAMS, multi_label=SSVM_MULTI_LABEL, C=ssvm_C, poi_info=poi_info_i.loc[poi_list].copy()) if ssvm.train(sorted(trajid_set_train), n_jobs=N_JOBS) is True: for j in test_ix: # test ps_cv, L_cv = keys_cv[j] y_hat_list = ssvm.predict(ps_cv, L_cv) if y_hat_list is not None: F1, pF1, tau = evaluate(dat_obj, keys_cv[j], y_hat_list) F1_ssvm.append(F1) pF1_ssvm.append(pF1) Tau_ssvm.append(tau) else: for j in test_ix: F1_ssvm.append(0) pF1_ssvm.append(0) Tau_ssvm.append(0) mean_F1 = np.mean(F1_ssvm)
Dtrain4 = ToyData.get_gaussian(50, dims=2, means=[6.0, -3.0], vars=[0.2, 0.1]) Dtrain = co.matrix([[Dtrain1], [Dtrain2], [Dtrain3], [Dtrain4]]) Dtrain = co.matrix([[Dtrain.trans()], [co.matrix(1.0, (1250, 1))]]).trans() Dy = co.matrix([[co.matrix(0, (1, 1000))], [co.matrix(1, (1, 100))], [co.matrix(2, (1, 100))], [co.matrix(3, (1, 50))]]) # generate structured object sobj = SOMultiClass(Dtrain, NUM_CLASSES, Dy) # train svdd ssvm = SSVM(sobj, 1.0) (ws, slacks) = ssvm.train() print(ws) # print(slacks) # generate test data grid delta = 0.1 x = np.arange(-4.0, 8.0, delta) y = np.arange(-4.0, 8.0, delta) X, Y = np.meshgrid(x, y) (sx, sy) = X.shape Xf = np.reshape(X, (1, sx * sy)) Yf = np.reshape(Y, (1, sx * sy)) Dtest = np.append(Xf, Yf, axis=0) Dtest = np.append(Dtest, np.reshape([1.0] * (sx * sy), (1, sx * sy)), axis=0)
Dy[i * NUM_DATA:(i + 1) * NUM_DATA] = i # generate structured object sobj = SOMultiClass(Dtrain.T, y=Dy, classes=NUM_CLASSES) # unsupervised methods lsvdd = LatentSVDD(sobj, 0.9) lsvdd.fit() spca = LatentPCA(sobj) spca.fit() socsvm = LatentOCSVM(sobj, .2) socsvm.fit() # supervised methods ssvm = SSVM(sobj) ssvm.train() # generate test data grid delta = 0.2 x = np.arange(-8.0, 8.0, delta) y = np.arange(-8.0, 8.0, delta) X, Y = np.meshgrid(x, y) (sx, sy) = X.shape Xf = np.reshape(X, (1, sx * sy)) Yf = np.reshape(Y, (1, sx * sy)) Dtest = np.append(Xf, Yf, axis=0) Dtest = np.append(Dtest, np.ones((1, sx * sy)), axis=0) print(Dtest.shape) # generate structured object predsobj = SOMultiClass(Dtest, NUM_CLASSES)
if i==0: plt.title("LatentSVDD") lsvdd.train_dc() (scores,lats) = lsvdd.apply(predsobj) if i==1: plt.title("StructPCA") spca.train_dc() (scores,lats) = spca.apply(predsobj) if i==2: plt.title("StructOCSVM") socsvm.train_dc() (scores,lats) = socsvm.apply(predsobj) if i==3: plt.title("SSVM") ssvm.train() (scores,lats) = ssvm.apply(predsobj) # plot scores Z = np.reshape(scores,(sx,sy)) plt.contourf(X, Y, Z) plt.scatter(Dtrain[0,:],Dtrain[1,:],10) # plot latent variable Z = np.reshape(lats,(sx,sy)) plt.subplot(2,4,i+4+1) plt.contourf(X, Y, Z) plt.scatter(Dtrain[0,:],Dtrain[1,:],10) plt.show()
# generate raw training data Dtrain1 = ToyData.get_gaussian(1000,dims=2,means=[4.0,2.0],vars=[1.0,0.3]) Dtrain2 = ToyData.get_gaussian(100,dims=2,means=[-2.0,1.0],vars=[0.3,1.3]) Dtrain3 = ToyData.get_gaussian(100,dims=2,means=[3.0,-1.0],vars=[0.3,0.3]) Dtrain4 = ToyData.get_gaussian(50,dims=2,means=[6.0,-3.0],vars=[0.2,0.1]) Dtrain = co.matrix([[Dtrain1], [Dtrain2], [Dtrain3], [Dtrain4]]) Dtrain = co.matrix([[Dtrain.trans()],[co.matrix(1.0,(1250,1))]]).trans() Dy = co.matrix([[co.matrix(0,(1,1000))], [co.matrix(1,(1,100))], [co.matrix(2,(1,100))], [co.matrix(3,(1,50))]]) # generate structured object sobj = SOMultiClass(Dtrain,NUM_CLASSES,Dy) # train svdd ssvm = SSVM(sobj,1.0) (ws,slacks) = ssvm.train() print(ws) # print(slacks) # generate test data grid delta = 0.1 x = np.arange(-4.0, 8.0, delta) y = np.arange(-4.0, 8.0, delta) X, Y = np.meshgrid(x, y) (sx,sy) = X.shape Xf = np.reshape(X,(1,sx*sy)) Yf = np.reshape(Y,(1,sx*sy)) Dtest = np.append(Xf,Yf,axis=0) Dtest = np.append(Dtest,np.reshape([1.0]*(sx*sy),(1,sx*sy)),axis=0) print(Dtest.shape)