def update(client_data): global sources, x, saving, saving_num, statistics write_raw_data(client_data) client_data_list = client_data.split(',') data_num = int(client_data_list[1]) antenna_id = int(client_data_list[2]) beacon_id = int(client_data_list[3]) rssi = int(client_data_list[-1]) sources[antenna_id][beacon_id].stream(dict(x=[x], y=[rssi])) if data_num == conf.AUTO_START_NUMBER: save_data() if saving: write_data(client_data) saving_num += 1 label1.text = "saved: %s/%s" % (saving_num, conf.SAVED_DATA_NUMBER) statistics['data'].append(rssi) if rssi < statistics['min']: statistics['min'] = rssi elif rssi > statistics['max']: statistics['max'] = rssi statistics['sum'] = statistics['sum'] + rssi label2.text = "min=%s, max=%s, avg=%.3f" % ( statistics['min'], statistics['max'], statistics['sum'] / saving_num) if saving_num == conf.SAVED_DATA_NUMBER: saving = False label1.text = label1.text + " finished! " + str( len(statistics['data'])) label2.text = label2.text + ", median=" + str( np.median(statistics['data'])) write_raw_data("end saving") show_statistics() os.system('say "completed"')
def sqs_notify(event,context): validation = helper.validate_sqs_queue(event) if validation['statusCode'] != 200: return validation for record in event['Records']: validatedrequest = helper.create_request_sqs(record['attributes'],record) sns_publisher.publish_sns_message(SNS_ARN,validatedrequest) data_writer.write_data(validatedrequest) return helper.formatResponse('Messages sent to DynamoDB and SNS successfully',helper.ok)
def api_notify(event, context): validation = helper.validate_response(event) if validation['statusCode']!=200: return validation validatedrequest = helper.create_request(event) sns_status = sns_publisher.publish_sns_message(SNS_ARN,validatedrequest) dynamoDbStatus = data_writer.write_data(validatedrequest) return helper.boolean_based_response(sns_status, dynamoDbStatus)
def validate_dark_signal(folderpath): ds_grouped = calculate_DS(folderpath) result = {} for channel, value in ds_grouped.items(): result[channel] = value.get('average') json_path = data_writer.write_data(result) return json_path
def validate_prnu(folder_path, dark_signal_json_path, on_ground_cpf_path): prnu = calculate_PRNU(folder_path, dark_signal_json_path, on_ground_cpf_path) result = {} for channel, val in prnu.items(): result[channel] = val.get('average') result_path = data_writer.write_data(result) return result_path
from bernoulli_nb_tester import BernoulliNBTester from data_writer import write_data # Initializing variables train_data = TrainData(sys.argv[1]) test_data = TestData(sys.argv[2]) config.CLASS_PRIOR_DELTA = float(sys.argv[3]) config.COND_PROB_DELTA = float(sys.argv[4]) config.MODEL_FILE = sys.argv[5] config.SYS_OUTPUT = sys.argv[6] class_labels = Counter(train_data.data_labels) # Model training and evaluation trainer = BernoulliNBTrainer(class_labels) train_eval = BernoulliNBTester(train_data, trainer, class_labels) test_eval = BernoulliNBTester(test_data, trainer, class_labels) write_data(train_eval, test_eval, trainer) # Printing training results for label in class_labels: config.LABEL_STRING += label + ' ' class_labels = sorted(class_labels) print('Confusion matrix for the training data:') train_eval.print_confusion_matrix(class_labels) print('Training accuracy=', train_eval.get_accuracy_score()) print('\n\n') print('Confusion matrix for the test data:') test_eval.print_confusion_matrix(class_labels) print('Test accuracy=', test_eval.get_accuracy_score())
def interact(): skip_class = False data = data_loader.read_data('in.data', skip_class=skip_class, skip_header=False) data = np.array(data) if not skip_class: split = np.split(data, [-1], axis=1) data = split[0] org_classes = split[1] data = data.astype(np.float) k_m = 0 k_n = 10000 while k_m < 1 or k_m > data.shape[0]: k_m = raw_input("Input number of k-means: ") k_m = int(k_m) if k_m < 1: print "K too small. Try k > 0" elif k_m > data.shape[0]: print "K too large. Try k <= " + str(data.shape[0]) means, new_classes = k_means(data, k_m) if k_m == 3: mappings, coded_classes, matches = k_means_test(data, means, new_classes, org_classes) data_writer.write_tests('out.data', "K-means", k_m, data, coded_classes, org_classes, matches) print("Output of k-Means with test written to out.data file") else: data_writer.write_data('out.data', "K-means", k_m, data, new_classes) print("Output of k-Means written to out.data file") while k_n >= data.shape[0]: k_n = raw_input("Input number of k-NN: ") k_n = int(k_n) if k_n >= data.shape[0]: print "K too large. Try k < " + str(data.shape[0]) new_element = [] coo = 0 print("Add coordinates of the new element") for x in range(data.shape[1]): coo = raw_input("Enter float for " + str(x + 1) + ". coordinate: ") if coo == "": coo = 0 coo = float(coo) new_element.append(coo) new_element = np.array([new_element]) new_class = k_nn(data, new_classes, k_n, new_element) if k_m == 3: new_class = mappings[new_class] new_class = np.array([[new_class]]) data_writer.write_data('out.data', "K-NN", k_n, new_element, new_class) print("Output of k-NN written to out.data file")
import data_reader as dr import data_writer as dw import car import solver filenames = [ "a_example", "b_should_be_easy", "c_no_hurry", "d_metropolis", "e_high_bonus" ] if __name__ == "__main__": for filename in filenames: print("operating on {}".format(filename)) input_data = dr.read_data("input/" + filename + ".in") list_of_cars = car.get_car_list(input_data["num_vehs"]) # takes number of cars and returns list of cars of that size output_data = solver.solve( list_of_cars = list_of_cars, list_of_rides = input_data["rides"], sim_steps = input_data["sim_steps"] ) dw.write_data(output_data, "output/" + filename + ".out")