def rate(request): try: menu_item_id = request.GET["item"] except KeyError: return HttpResponseBadRequest(error_json('item required but not prodided')) try: username = request.GET["username"] except KeyError: return HttpResponseBadRequest(error_json('username required but not prodided')) try: rating = request.GET["rating"] except KeyError: return HttpResponseBadRequest(error_json('rating required but not prodided')) try: item = MenuItem.objects.get(locu_id=menu_item_id) user = User.objects.get(username=username) except MenuItem.DoesNotExist: return HttpResponseBadRequest(error_json('Menu item "%s" does not exist'%menu_item_id)) except User.DoesNotExist: return HttpResponseBadRequest(error_json('User "%s" does not exist'%username)) if rating.lower() == "like": r = Rating(user=user, menu_item=item, value=Rating.LIKE) r.save() elif rating.lower() == "dislike": r = Rating(user=user, menu_item=item, value=Rating.DISLIKE) r.save() else: return HttpResponseBadRequest(error_json('Rating "%s" is not valid'%rating)) from ml import ML ML.build() return HttpResponse("")
def testMultilayerPerceptronClassifier(self): # TODO param layers is neccesary ml = ML() t = Training("classification", "MultilayerPerceptronClassifier", {"layers": [4, 5, 4, 3]}, 0.7, sample_multiclass_classification_data, "./model/mlp") print(ml.train_model(t))
def main(): """ Main function. """ warnings.filterwarnings("ignore") # get data from csv file, and catch error reading file try: df = pd.read_csv('data/dataset_mood_smartphone.csv', sep=",") except OSError as e: print("ERROR: cannot open or read input file") # get command line options args = get_args() df = util.init_data(df) # initial model if args.prediction_models == "ml": model = ML() elif args.prediction_models == "temporal_algorithm": model = Temporal() elif args.prediction_models == "benchmark": model = Benchmark() else: sys.exit("BUG! this should not happen.") # call pipline predictions, evaluation_scores = model.pipeline(df) # print output util.output_to_file(predictions, args.pred_file) # util.output_to_file(evaluation_scores, args.eval_file) util.output_to_screen(evaluation_scores)
def testLinearSVC(self): ml = ML() t = Training("classification", "LinearSVC", { "maxIter": 10, "regParam": 0.3 }, 0.7, sample_libsvm_data, "./model/svc") ml.train_model(t)
def testLibsvm(self): s = Source("../data/test.data", "csv", json.dumps({ "header": True, "delimiter": "," }), {}) ml = ML() df = ml.read_source(s) print(df.schema.json())
def run(): from api.importer import import_items print("importing menu items") import_items("../turk/data/menu_item.json") print("importing ratings") from api.rating_importer import import_ratings import_ratings("../turk/data/training.csv") from ml import ML print("rebuilding model") ML.build()
def testLogisticRegression(self): ml = ML() t = Training( "classification", "LogisticRegression", { "maxIter": 10, "regParam": 0.3, "elasticNetParam": 0.8, "family": "multinomial" }, 0.7, iris, "./model/lr") ml.train_model(t)
def __init__(self, C, toter, maxIter, kernel='linear'): ML.__init__(self, 'svm') self.kernel = kernel self.En = [] self.C = C self.toter = toter self.maxIter = maxIter self.b = 0 self.alphas = None self.kij = {}
def __init__(self, port, row_list, voltage, current, power, cumpower): self.port = port self.row_list = row_list self.columns = ['ax1', 'ay1', 'az1', 'gx1', 'gy1', 'gz1','ax2', 'ay2', 'az2', 'gx2', 'gy2', 'gz2'] self.df = pd.DataFrame(columns = self.columns) self.counter = 0 self.voltage = voltage self.current = current self.power = power self.cumpower = cumpower self.model = ML()
def __init__(self,values,all_data,**kwargs): self.period = kwargs.get('period', 0.005) self.start_time = values['time'] self.previous_time = values['time'] self.previous_be = values['be'] self.duration = kwargs.get('duration', float('inf')) self.previous_vel = values['av'] sys.path.insert(0, "../../Machine_Learning") from ml import ML self.ml = ML("../../Machine_Learning/winner.pkl")
def _process(programs): ml = ML(programs) f = open("features.csv", "r") out = open("featuresml.txt", "w") next(f) for linha in f: data = linha.strip("\n") id = linha.split(",")[0] res = ml.prediction(data.split(",")) out.write(str(id) + "," + str(res) + "\n") out.close() f.close()
def testCSV(self): ml = ML() # ml.compute_statistics(iris, "sepallength") t = Training( "classification", "LogisticRegression", { "maxIter": 10, "regParam": 0.3, "elasticNetParam": 0.8, "family": "multinomial" }, 0.7, iris, "./model/lr") # ml.train_model(t) ml.model_predict_single(t, iris)
def testJDBC(self): s = Source( "", "jdbc", json.dumps({ "url": "jdbc:mysql://10.110.17.222/insight_ml", "driver": "com.mysql.jdbc.Driver", "dbtable": "user_info", "user": "******", "password": "" }), {}) ml = ML() df = ml.read_source(s) ml.compute_statistics(s, "user_name") print(df.schema.json())
class Machine_Learning(): def __init__(self,values,all_data,**kwargs): self.period = kwargs.get('period', 0.005) self.start_time = values['time'] self.previous_time = values['time'] self.previous_be = values['be'] self.duration = kwargs.get('duration', float('inf')) self.previous_vel = values['av'] sys.path.insert(0, "../../Machine_Learning") from ml import ML self.ml = ML("../../Machine_Learning/winner.pkl") def algo(self,values,all_data): time.sleep(0.01) if values['time'] - self.start_time < self.duration: action = self.ml.get_action([ values['be'], values['av'] ]) print values['time'], values['be'], values['av'], action if action == 1: return ["legs_retracted", 1.0] elif action == 0: return ["legs_extended", 1.0] elif action == 3: return "torso_extended" elif action == 2: return "torso_retracted" elif action == 4: pass else: return 'switch'
def register_ml(self, ml): """Register a new ML object in flow""" self.clfs[ml.meta['name']] = ML(classifier=ml.clf, name=ml.meta['name']) return self
def test_init(self): ml = ML() assert hasattr(ml, 'clf') assert ml.clf is None assert ml.meta == {'name': ''}
class Machine_Learning_bool: def __init__(self,values,all_data,**kwargs): self.period = kwargs.get('period', 0.005) self.start_time = values['time'] self.previous_time = values['time'] self.previous_be = values['be'] self.duration = kwargs.get('duration', float('inf')) self.previous_vel = values['av'] sys.path.insert(0, "../../Machine_Learning") from ml import ML self.ml = ML("../../Machine_Learning/ MACHINE_LEARNING_FILE ") def algo(sefl,values,all_data): time.sleep(0.01) if values['time'] - self.start_time < self.duration: torso_bool, legs_bool = values['t_move'], values['l_move'] action = self.ml.get_action([ values['be'], values['av'], torso_bool, legs_bool ]) print values['time'], values['be'], values['av'], action if (action == 0 and not legs_bool): return ["legs_extended", 0.8] elif (action == 1 and not legs_bool): return ["legs_retracted", 0.8] elif (action = 2 and not torso_bool): return ["torso_retracted"] elif (action == 3 and not torso_bool): return ["torso_extended"] elif action == 4: pass else:
class Machine_Learning(): def __init__(self, values, all_data, **kwargs): self.period = kwargs.get('period', 0.005) self.start_time = values['time'] self.previous_time = values['time'] self.previous_be = values['be'] self.duration = kwargs.get('duration', float('inf')) self.previous_vel = values['av'] sys.path.insert(0, "../../Machine_Learning") from ml import ML self.ml = ML() def algo(self, values, all_data): if values['time'] - self.start_time < self.duration: action = self.ml.get_action([values['be'], values['av']]) if action == 0: return "legs_in" elif action == 1: return "legs_out" elif action == 2: return "torso_out" elif action == 3: return "torso_in" elif action == 4: pass print values['time'], values['be'], values['av'], action time.sleep(0.01)
def __init__(self, alg, features, max_depth=sys.maxint, min_sample=1, min_e=0.01, rf=False): ML.__init__(self, alg) self.alg = alg self.features = features self.max_depth = max_depth self.min_sample = min_sample self.min_e = min_e self.rf = rf self.model = None self.error = 0 # self.split_A = [] self.best_split_func = self.split_func[self.alg]
def __init__(self, max_features=5000, test=False, feature_stratergy="Tfidf"): self.info = Information() self.preprocess = Preprocess() self.make_features = make_features(test) self.ml = ML() self.resue = test if self.resue: self.vectorizer = pickle.load( open("vectorizers/vectorizer.pkl", "rb")) print("INFERENCE PHASE") else: print("TRAINING PHASE") if feature_stratergy == "Tfidf": self.vectorizer = TfidfVectorizer(max_features=max_features) elif feature_stratergy == "Count": self.vectorizer = CountVectorizer(max_features=max_features)
class StoreData(): def __init__(self, port, row_list, voltage, current, power, cumpower): self.port = port self.row_list = row_list self.columns = ['ax1', 'ay1', 'az1', 'gx1', 'gy1', 'gz1','ax2', 'ay2', 'az2', 'gx2', 'gy2', 'gz2'] self.df = pd.DataFrame(columns = self.columns) self.counter = 0 self.voltage = voltage self.current = current self.power = power self.cumpower = cumpower self.model = ML() def run(self): #print("storing data") self.storeData() def storeData(self): #df = pd.DataFrame(self.list, columns = self.columns, header = None) self.df.loc[self.counter] = self.row_list #self.df.append(pd.DataFrame([self.row_list], columns=self.columns), ignore_index=False) #print("list appended!") #print(self.df) self.counter += 1 #check if client program should be closed shouldClose = False #collect required amount of data for machine learning model if(self.counter == 60): #function call action = self.model.predict(self.df) print(action) if action == 'logout': shouldClose = True data = "#" + action + "|" + str(self.voltage) + "|" + str(self.current) + "|" + str(self.power) + "|" + str(self.cumpower) + "|" client.sendToServer(data, shouldClose) quit() else: #send action to server data = "#" + action + "|" + str(self.voltage) + "|" + str(self.current) + "|" + str(self.power) + "|" + str(self.cumpower) + "|" client.sendToServer(data, shouldClose) self.df = pd.DataFrame(columns=self.columns) # print('Going to sleep bye bye ...') time.sleep(2) print('Woke up.') self.counter = 0
def server(): ml = ML() # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Bind the socket to the port server_address = ('localhost', 10002) log.debug('Inicializando %s na porta %s', server_address[0], server_address[1]) sock.bind(server_address) # Listen for incoming connections sock.listen(1) while True: # Wait for a connection log.debug('Esperando por uma conexao...') connection, client_address = sock.accept() try: log.debug('Conexao de %s', client_address) # Recebendo dados data = connection.recv(2048) data = data.strip("\n") log.debug('\tRecebido %s', data) if data == "EXIT": break; #print '\tRecebido %s' % data res = ml.prediction(data.split(",")) # Enviando dados connection.sendall(str(res) + "\n") log.debug("\tEnviado: %s", res) finally: connection.close() sock.close()
def build(self): main_box = BoxLayout() painter_size = '500dp' draw_parent = BoxLayout(orientation='vertical', size_hint_x = None, width = painter_size ) labels_box = BoxLayout(orientation='vertical') main_box.add_widget(draw_parent) main_box.add_widget(labels_box) self.painter = MyPaintWidget(self.update_res, size_hint_y = None, height = painter_size ) clearbtn = Button(text='Clear') clearbtn.bind(on_release=self.clear_canvas) draw_parent.add_widget(self.painter) draw_parent.add_widget(clearbtn) # addbtn = Button(text='add') # addbtn.bind(on_release=self.add_ev) # labels_box.add_widget(addbtn) w_nums = [] for i in range(10): n_w = NumRes(i) w_nums.append(NumRes(i)) labels_box.add_widget(n_w) self.w_nums = w_nums self.labels_box = labels_box self.machine = ML((28*28, 16, 16, 10, )) self.machine.load_weight('weights_m.out') return main_box
def get_ranked_items(request): try: username = request.GET["username"] except KeyError: return HttpResponseBadRequest(error_json("username required but not provided")) try: user = User.objects.get(username=username) except User.DoesNotExist: return HttpResponseBadRequest(error_json('user "%s" not found in database'%username)) lat = request.GET.get("lat", None) lon = request.GET.get("lon", None) if lat == None and lon != None: return HttpResponseBadRequest(error_json('lon parameter provided without lat parameter')) if lat != None and lon == None: return HttpResponseBadRequest(error_json('lat parameter provided without lon parameter')) if lat != None: try: lat = float(lat) except ValueError: return HttpResponseBadRequest(error_json('lat parameter invalid')) try: lon = float(lon) except ValueError: return HttpResponseBadRequest(error_json('lon parameter invalid')) page = int(request.GET.get("page", 1)) count = int(request.GET.get("size", 50)) max_distance = float(request.GET.get("radius", 10)) #km from haversine import distance from ml import ML try: rankings = ML.get(username) except KeyError: rankings = fakedict(0) if lat != None: items = [ (rankings[x.locu_id], x) for x in MenuItem.objects.all().select_related('venue') if distance((lat,lon), (x.venue.lat, x.venue.lon)) <= max_distance and x.locu_id in rankings] else: items = [ (rankings[x.locu_id], x) for x in MenuItem.objects.all() if x.locu_id in rankings] #from random import shuffle #shuffle(items) items.sort(reverse=True) if lat != None: output = [ item_to_json_dict(i[1], distance((lat,lon), (i[1].venue.lat, i[1].venue.lon)), ranking=i[0]) for i in items[count*(page-1):count*page] ] else: output = [ item_to_json_dict(i[1], ranking=i[0]) for i in items[count*(page-1):count*page] ] return HttpResponse(json.dumps(output, indent=4))
def __init__(self): ML.__init__(self, 'Random Forest') pass
def testOneVsRest(self): # TODO param classifier is neccesary ml = ML() t = Training("classification", "OneVsRest", {"maxIter": 10}, 0.7, sample_multiclass_classification_data, "./model/ovr") print(ml.train_model(t))
def testNaiveBayes(self): ml = ML() t = Training("classification", "NaiveBayes", {"smoothing": 2.0}, 0.7, sample_libsvm_data, "./model/bayes") print(ml.train_model(t))
def testGBTClassifier(self): ml = ML() t = Training("classification", "GBTClassifier", {"maxDepth": 10}, 0.7, sample_libsvm_data, "./model/gbt") print(ml.train_model(t))
def testRandomForestClassifier(self): ml = ML() t = Training("classification", "RandomForestClassifier", {"maxDepth": 10}, 0.7, sample_libsvm_data, "./model/rf") print(ml.train_model(t))
import numpy as np import pandas as pd import matplotlib.pyplot as plt import mplleaflet import IPython from db_psql import DBPsql from ml import ML app = Flask(__name__) bootstrap = Bootstrap(app) db_psql = DBPsql() db_psql.connect() ml = ML(db_psql) # ml.close_db_connection() @app.route('/') def index(): return render_template('index.html') def load_model(db): try: query = """ SELECT clf FROM Model WHERE id = 1 """ db_psql.cursor.execute(query) clf_obj = db_psql.cursor.fetchone()[0] return pickle.loads(clf_obj) except Exception as e: print(e)
def testGeneralizedLinearRegression(self): ml = ML() t = Training("regression", "GeneralizedLinearRegression", {"regParam": 0.3}, 0.7, sample_linear_regression_data, "./model/glr") print(ml.train_model(t))
def testAFTSurvivalRegression(self): # TODO not have test data ml = ML() t = Training("regression", "AFTSurvivalRegression", {}, 0.7, sample_libsvm_data, "./model/aftsr") print(ml.train_model(t))
def testGBTRegressor(self): ml = ML() t = Training("regression", "GBTRegressor", {"maxDepth": 6}, 0.7, sample_libsvm_data, "./model/gbtr") print(ml.train_model(t))