def main(): predictor = MFUPredictor() db_helper = ApeicDBHelper() users = db_helper.get_users() for user in users: logs = db_helper.get_logs(user) sessions = db_helper.get_sessions(user) training_logs, testing_logs = split(sessions, aggregated=True) predictor.train(training_logs) launches = map(lambda x: x['application'], testing_logs) predictions = map(lambda x: predictor.predict(x), testing_logs) hr, mrr = predictor.test(launches, predictions) print hr, mrr
def main(): predictor = LUPredictor() db_helper = ApeicDBHelper() users = db_helper.get_users() for user in users: sessions = db_helper.get_sessions(user) training_logs, testing_logs = split(sessions, aggregated=True) predictor.train(training_logs) launches = map(lambda x: x['application'], testing_logs[2:]) predictions = map(lambda i: predictor.predict(\ {'lu1': testing_logs[i-1]['application'], 'lu2': testing_logs[i-2]['application']}), \ xrange(2, len(testing_logs))) hr, mrr = predictor.test(launches, predictions) print hr, mrr
############################################################################## # Generate sample data centers = [[1, 1], [-1, -1], [1, -1]] X, labels_true = make_blobs(n_samples=100, centers=centers, cluster_std=0.4, random_state=0) X = StandardScaler().fit_transform(X) from numpy import array from predictor.predictor import Predictor, split db_helper = ApeicDBHelper() for user in db_helper.get_users()[7:]: sessions = db_helper.get_sessions(user) training_logs, testing_logs = split(sessions, aggregated=True) training_logs = filter( lambda x: x['latitude'] != 0 and x['longitude'] != 0, training_logs) latlng_pairs = list( set(map(lambda x: (x['latitude'], x['longitude']), training_logs))) print latlng_pairs print len(latlng_pairs) # X = array(latlng_pairs) # print X.size # print X result = [] for la1, ln1 in latlng_pairs: dists = [] for la2, ln2 in latlng_pairs: lat1, lng1 = map(radians, [la1, ln1])
from sklearn.preprocessing import StandardScaler ############################################################################## # Generate sample data centers = [[1, 1], [-1, -1], [1, -1]] X, labels_true = make_blobs(n_samples=100, centers=centers, cluster_std=0.4, random_state=0) X = StandardScaler().fit_transform(X) from numpy import array from predictor.predictor import Predictor, split db_helper = ApeicDBHelper() for user in db_helper.get_users()[7:]: sessions = db_helper.get_sessions(user) training_logs, testing_logs = split(sessions, aggregated=True) training_logs = filter(lambda x: x['latitude'] != 0 and x['longitude']!= 0, training_logs) latlng_pairs = list(set(map(lambda x: (x['latitude'], x['longitude']), training_logs))) print latlng_pairs print len(latlng_pairs) # X = array(latlng_pairs) # print X.size # print X result = [] for la1, ln1 in latlng_pairs: dists = [] for la2, ln2 in latlng_pairs: lat1, lng1 = map(radians, [la1, ln1]) lat2, lng2 = map(radians, [la2, ln2])