def mode_test(): """The main testing mode entrypoint""" start_time = time.time() io = get_io() logline("test") enter_group() logline("reconstructing model") model = create_model(1) logline("applying learned weights") model = apply_weights(model, io) logline("reading testing files") test_files = read_test_files(io) logline("running testing data") enter_group() run_tests(io, model, test_files) exit_group() exit_group() logline("done training, runtime is {}".format( Timer.stringify_time(Timer.format_time(time.time() - start_time))))
def mode_train(): """The main training mode entrypoint""" start_time = time.time() io = get_io() logline("using GPU?", tf.test.is_gpu_available()) logline("train") enter_group() logline("loading preprocessed data") preprocessed = load_preprocessed(io) logline("creating models") train_model = create_model(batch_size=io.get("batch_size")) logline("fitting model") enter_group() fit_model(io, train_model, preprocessed) exit_group() logline("exporting model") export_model(train_model, io) exit_group() logline("done training, runtime is {}".format( Timer.stringify_time(Timer.format_time(time.time() - start_time))))
def start_server(io: IO): global interval interval = io.get("interval") port = io.get("port") httpd = HTTPServer(("", port), partial(WebServer, directory=os.path.join(CUR_DIR, "public"))) logline("listening at port", port) enter_group() try: httpd.serve_forever() except KeyboardInterrupt: pass httpd.server_close() exit_group() logline("stopped listening")
def mode_preprocess(): """The main preprocessing entrypoint""" start_time = time.time() preprocessed = [] io = get_io() logline("preprocessing") enter_group() logline("reading input paths") enter_group() input_paths = collect_input_paths(io) for input_path in input_paths: logline('found path: "{}"'.format(input_path)) exit_group() logline("iterating files") enter_group() for file in get_files(input_paths): if not file: error("no files") return None features = gen_features(file) outputs = gen_outputs(file, io) feature_arr = list(map(lambda x: x.to_arr(), features)) output_arr = list(map(lambda x: x.to_arr(), outputs)) assert np.array(feature_arr).shape[1] == Features.length() assert np.array(output_arr).shape[1] == OUT_VEC_SIZE preprocessed.append({ "file_name": file.name, "features": feature_arr, "outputs": output_arr }) logline('done with file: "{}"'.format(file.name)) file.close() exit_group() logline("done iterating files") with open(io.get("output_file"), "wb+") as file: pickle.dump(preprocessed, file) logline("wrote output to file: {}".format(io.get("output_file"))) exit_group() logline("done preprocessing, runtime is {}".format( Timer.stringify_time(Timer.format_time(time.time() - start_time))))