예제 #1
0
def train(dataset):
    db = DB(join(dbpath,'train.db'))
    tbl_manifest = '%s_man'%dataset
    tbl_rss = '%s_rss'%dataset

    n_feature = int(db.queryone('value',tbl_manifest,'key="n_feature"')[0])
    
    data = []
    cls = []
    for y,entry in db.query(['p_id','entry'], tbl_rss):
        dic = json.loads(entry)
        x = [-100]*n_feature
        for m_id,rss in dic.items():
            x[int(m_id)] = float(rss)
        data.append(x)
        cls.append(y)
    neigh = KNeighborsClassifier(n_neighbors=5)
    neigh.fit(data,cls)
    
    fout = open('model.dat','w')
    pickle.dump(neigh,fout)
    fout.close()
예제 #2
0
rootpath = os.getenv('WLAN_ROOT')
datapath = join(rootpath,'raw_data')
dbpath = join(datapath,'DB')
db = DB(join(dbpath,'train.db'))
dataset = db.queryone('value','meta','key="current"')[0]
tbl_mac = '%s_mac'%dataset
tbl_pt = '%s_pnt'%dataset
tbl_relation = '%s_rel'%dataset
tbl_manifest = '%s_man'%dataset
tbl_rss = '%s_rss'%dataset

map_mac = {}
map_pt = {}
feas_map = {}
mac_dic = dict([(mac,m_id) for mac,m_id in db.query(['mac','m_id'],tbl_mac)])
pid_dic = dict([(p_id,pt) for p_id,pt in db.query(['p_id','pt'],tbl_pt)])
coord_dic = dict([(p_id,(int(x),int(y))) for p_id,x,y in db.query(['p_id','x','y'],tbl_pt)])
n_point = int(db.queryone('value',tbl_manifest,'key="n_point"')[0])
p2p_matrix = [[float('inf')]*n_point for i in range(n_point)]
for i in range(n_point):
    p2p_matrix[i][i] = 0.0
for i,j,dis in db.query(['src_pt','dest_pt','distance'],tbl_relation):
    p2p_matrix[i][j] = p2p_matrix[j][i] = dis

def get_macid(mac):
    return mac_dic[mac]

def get_pointname(pid):
    return pid_dic[pid]
예제 #3
0
#!/usr/bin/env python
from sklearn import svm 
import pickle
import json
from dbhelper import DB
import sys

device = sys.argv[1]

db = DB('train.db')
n_feature = int(db.queryone('value','manifest','key="n_feature"')[0])

data = []
cls = []
for y,entry in db.query(['p_id','entry'], 'rss_'+device):
    dic = json.loads(entry)
    x = [-100]*n_feature
    for m_id, rss in dic.items():
        x[int(m_id)] = float(rss)
    data.append(x)
    cls.append(y)
clf = svm.SVC()
clf.fit(data,cls)

fout = open('model.dat','w')
pickle.dump(clf,fout)
fout.close()