def main(): # read data from csv to data frame data_arr = np.genfromtxt(sys.argv[1], delimiter=',', dtype="|U5") # applies one hot enconding over samples set samples_size = len(data_arr) # number of samples in training set data_arr = one_hot_encoding(data_arr, samples_size) # read labels training set data test_x = np.genfromtxt(sys.argv[3], delimiter=',', dtype="|U5") test_size = len(test_x) # number of samples in test set test_x = one_hot_encoding(test_x, test_size) test_x = np.array(test_x, dtype=float) test_x = z_score(test_x) # normalize the data-set data_set = np.array(data_arr, dtype=float) data_set = z_score(data_set) # load labels file label_set = np.genfromtxt(sys.argv[2], delimiter=',', dtype=float) # split the data set to 80% training set and 20% to the test set # trained models with perceptron, svm and passive aggressive algorithm regulation_constant = 0.25 # regulation constant value m_perc = perceptron.getBestModel(data_set, label_set, samples_size) m_svm = svm.getBestModel(data_set, label_set, samples_size, regulation_constant) m_pa = pa.getBestModel(data_set, label_set, samples_size) # prints out the prediction of given models for each sample in test set tester.print_results(m_perc, m_svm, m_pa, test_x, test_size)
[test_create_order, True], [test_get_all_orders, True], [test_get_requested_orders, True], [test_get_requested_and_cooking_orders, True], ] valid_order_id = None def get_valid_order_id(): global valid_order_id if valid_order_id: return valid_order_id valid_order_req = session.post(api_url + "create_order", json={ "table_num": 1, "items": [1] }) valid_order_id = valid_order_req.json().get("data")["order_id"] return valid_order_id if __name__ == "__main__": if len(sys.argv) > 1 and sys.argv[1] == "v": verbose = True total = 0 passed = 0 failed = [] total, passed, failed = tester.run_tests(tests, total, passed, failed) tester.print_results(total, passed, failed)