def test_model(train_set, test_set, model, filename=None, prefix="test", epochs=300): """:key """ training = model.build_slice_set(train_set) print("debug - start training " + prefix) tmr = time.process_time_ns() model.train(training, training, epochs) train_time = time.process_time_ns() - tmr print("debug - start testing") true_map = LTMap.LiteTuxMap(1, 1) for lvlfile in test_set: s = prefix + "," + str(train_time) + "," + lvlfile + "," + str( epochs) + "," lvl_chunks = model.build_slice_set([lvlfile], True) true_map.load(lvlfile) predicted_map = LTMap.LiteTuxMap(true_map.width, true_map.height) for i in range(lvl_chunks.shape[0]): slc = model.predict(lvl_chunks[i]) model.decode_slice(slc[0], predicted_map, i * model.cols) print(predicted_map.to_vertical_string()) test = MapTest(true_map, predicted_map) s += str(test.get_total_tile_errors()) + "," s += str(test.get_average_tile_hamming_error()) + "," s += str(test.get_average_distance_error()) + "," print(s) if filename is not None: with open(filename, "a") as f: f.write(s) f.write("\n")
def train_and_test(model): results = {"train_time": 0, "test_time": 0, "errors": [], "tiles": []} # train the model training = model.build_slice_set(FullTileTesting.TRAIN_LEVELS) tstart = time.process_time() model.train(training, training, 300) tend = time.process_time() results["train_time"] = tend - tstart # test the model true_map = LTMap.LiteTuxMap(1, 1) tstart = time.process_time() for lvlfile in FullTileTesting.TEST_LEVELS: level_chunks = model.build_slice_set([lvlfile], True) true_map.load(lvlfile) results["tiles"].append(true_map.width * true_map.height) predicted_map = LTMap.LiteTuxMap(true_map.width, true_map.height) for i in range(level_chunks.shape[0]): slc = model.predict(level_chunks[i]) model.decode_slice(slc[0], predicted_map, i * model.cols) test = FullTileTesting.MapTest(true_map, predicted_map) results["errors"].append(test.get_total_tile_errors()) tend = time.process_time() results["test_time"] = tend - tstart return results
def test_map_frequency_count(self): test_map = LTMap.LiteTuxMap(1, 1) test_map.from_json_string(self.TEST_MAP) tile_freq = LTMap.TileFrequencyMetric(test_map, 16) for t in range(16): self.assertEqual(tile_freq.get_tile_count(t), t) self.assertEqual(tile_freq.get_tile_percent(12), .1)
def test_one_hot_decode(): ohe = OneHotEncoder(4, 4) lvl = LTMap.LiteTuxMap(4, 4) lvl.from_json_string(SMALL_TEST_MAP) rmap = LTMap.LiteTuxMap(4, 4) slc = ohe.encode_slice(lvl, 0) ohe.decode_slice(slc, rmap, 0) print(rmap.to_vertical_string()) mt = MapTest(lvl, rmap) print(mt)
def batchConvert(list): sm = LTMap.LTSpeedrunStateManager(4, True) for filename in list: newname = filename[3:] newname = newname.replace(".txt", ".json") print("Processing ", filename) ltm = convertVGLCMario(filename) print(ltm.to_vertical_string()) board = LTMap.LTPathBoard(ltm, sm) board.process_all_paths(0, 8) if len(board.get_nodes_in_column(ltm.width-1)) > 0: print("Saving map ", newname) ltm.save(newname)
def test_inverted_map_comparison(self): lvl1 = LTMap.LiteTuxMap(4, 4) lvl1.from_json_string(SMALL_TEST_MAP) lvl2 = LTMap.LiteTuxMap(4, 4) lvl2.from_json_string(SMALL_TEST_MAP_INVERSED) mt = FullTileTesting.MapTest(lvl1, lvl2) self.assertEqual(mt.get_total_tile_errors(), 16) self.assertEqual(mt.get_total_hamming_error(), 64) self.assertEqual(mt.get_average_tile_hamming_error(), 4.0) self.assertEqual(mt.hamming_errors_to_string(), "4444\n4444\n4444\n4444\n") self.assertEqual(mt.get_total_distance_error(), 128) self.assertEqual(mt.get_average_distance_error(), 8.0) self.assertEqual(mt.distance_errors_to_string(), "15 7 1 9 \n13 5 3 11 \n11 3 5 13 \n9 1 7 15 \n")
def test_bad_middle_map_comparison(self): lvl1 = LTMap.LiteTuxMap(4, 4) lvl1.from_json_string(SMALL_TEST_MAP) lvl2 = LTMap.LiteTuxMap(4, 4) lvl2.from_json_string(SMALL_TEST_MAP_MID_MIXED) mt = FullTileTesting.MapTest(lvl1, lvl2) self.assertEqual(mt.get_total_tile_errors(), 4) self.assertEqual(mt.get_total_hamming_error(), 10) self.assertEqual(mt.get_average_tile_hamming_error(), 0.625) self.assertEqual(mt.hamming_errors_to_string(), "0000\n0130\n0240\n0000\n") self.assertEqual(mt.get_total_distance_error(), 12) self.assertEqual(mt.get_average_distance_error(), 0.75) self.assertEqual(mt.distance_errors_to_string(), "0 0 0 0 \n0 1 5 0 \n0 1 5 0 \n0 0 0 0 \n")
def get_input_output_sets_for_model(self, level_list): ltm = LTMap.LiteTuxMap(1, 1) step_size = self.model.cols in_set = None out_set = None for lvl in level_list: ltm.load(lvl) slice_set = None end_col = ltm.width - self.model.cols for c in range(0, end_col, step_size): if slice_set is None: slice_set = np.reshape(self.model.encode_slice(ltm, c), (1, self.model.in_nodes)) else: slice_set = np.vstack([ slice_set, np.reshape(self.model.encode_slice(ltm, c), (1, self.model.in_nodes)) ]) if in_set is None: in_set = slice_set[0:-1, :] out_set = slice_set[1:, :] else: in_set = np.vstack([in_set, slice_set[0:-1, :]]) out_set = np.vstack([out_set, slice_set[1:, :]]) return (in_set, out_set)
def test_model(train_set, test_set, model, filename=None, prefix="test", epochs=300): """:key """ results = {"train_time": 0, "errors": [], "tiles": []} train_input, train_output = get_input_output_sets_for_model( model, train_set) print("debug - start training " + prefix) tmr = time.process_time_ns() model.train(train_input, train_output, epochs) train_time = time.process_time_ns() - tmr results["train_time"] = train_time print("debug - start testing") true_map = LTMap.LiteTuxMap(1, 1) for lvlfile in test_set: s = prefix + "," + str(train_time) + "," + lvlfile + "," + str( epochs) + "," lvl_chunks = model.build_slice_set([lvlfile], True) true_map.load(lvlfile) predicted_map = LTMap.LiteTuxMap(true_map.width, true_map.height) model.decode_slice(lvl_chunks[0], predicted_map, 0) for i in range(lvl_chunks.shape[0] - 1): slc = model.predict(lvl_chunks[i]) model.decode_slice(slc[0], predicted_map, (i + 1) * model.cols) print(predicted_map.to_vertical_string()) test = FullTileTesting.MapTest(true_map, predicted_map) s += str(test.get_total_tile_errors()) + "," #s+= str(predicted_map.width*predicted_map.height) + "," s += str(test.get_average_tile_hamming_error()) + "," s += str(test.get_average_distance_error()) + "," s += str(true_map.width * true_map.height) results["errors"].append(test.get_total_tile_errors()) results["tiles"].append(true_map.width * true_map.height) print(s) if filename is not None: with open(filename, "a") as f: f.write(s) f.write("\n") return results
def predict_map(self, lvl): x,y = self.map_to_training_set(lvl) predicted_map = LTMap.LiteTuxMap(lvl.width, lvl.height) for i in range(self.memory_size): predicted_map.set_tile(i//lvl.height,i%lvl.height, lvl.get_tile(i//lvl.height,i%lvl.height)) offset = self.memory_size for i in range(x.shape[0]): pred = model.predict(x[i:i+1, :, :]) predicted_map.set_tile(offset // lvl.height, offset % lvl.height, self.prediction_to_tile(pred)) offset += 1 return predicted_map
def test_same_map_comparison(self): lvl1 = LTMap.LiteTuxMap(4, 4) lvl1.from_json_string(SMALL_TEST_MAP) mt = FullTileTesting.MapTest(lvl1, lvl1) self.assertEqual(mt.get_total_tile_errors(), 0) self.assertEqual(mt.get_total_hamming_error(), 0) self.assertEqual(mt.get_average_tile_hamming_error(), 0) self.assertEqual(mt.hamming_errors_to_string(), "0000\n0000\n0000\n0000\n") self.assertEqual(mt.get_total_distance_error(), 0) self.assertEqual(mt.get_average_distance_error(), 0) self.assertEqual(mt.distance_errors_to_string(), "0 0 0 0 \n0 0 0 0 \n0 0 0 0 \n0 0 0 0 \n")
def convertVGLCMario(level_name): f = open(level_name, "r") raw_level = f.readlines() f.close() ltm = LTMap.LiteTuxMap(len(raw_level[0])-1, len(raw_level)) for r in range(len(raw_level)): s = raw_level[r] for c in range(len(s)-1): tid = MARIO_LT_MAPPING.get(s[c]) if tid is None: tid = 16 ltm.set_tile(c,r, tid) #print(s) return ltm
def test_one_hot_encode(): ohe = OneHotEncoder(4, 4) lvl = LTMap.LiteTuxMap(4, 4) lvl.from_json_string(SMALL_TEST_MAP) slc = ohe.encode_slice(lvl, 0) i = 0 for r in range(4): for c in range(4): print(c, ",", r, end=': ') for b in range(16): print(slc[i], end=" ") i += 1 print()
def __init__(self, map_1, map_2, tile_bits=4): self.map_1 = map_1 self.map_2 = map_2 self.hamming = LTMap.LiteTuxMap(map_1.width, map_1.height) self.distances = LTMap.LiteTuxMap(map_1.width, map_1.height) self.total_tile_errors = 0 self.total_hamming_error = 0 self.total_distance_error = 0 for c in range(self.map_1.width): for r in range(self.map_1.height): t1 = map_1.get_tile(c, r) t2 = map_2.get_tile(c, r) ham: int = 0 mask = 1 self.total_distance_error += abs(t1 - t2) self.distances.set_tile(c, r, abs(t1 - t2)) for b in range(tile_bits): if (t1 & mask) != (t2 & mask): ham += 1 mask *= 2 self.hamming.set_tile(c, r, ham) if ham > 0: self.total_tile_errors += 1 self.total_hamming_error += ham
def build_map(self, cols, noise=.1, clean=True): # TODO handle None start slice predicted_map = LTMap.LiteTuxMap(cols, self.model.rows) prediction = self.model.predict(self.start_slice) cur_col = 0 while (cur_col < cols): #print(self.tester.compare_env_encoding_to_col_string(prediction[0], prediction[0], int(prediction.shape[1]//self.cols_in_slice))) for i in range(prediction.shape[1]): if random.random() < noise: prediction[0, i] = 1 if prediction[0, 1] < .5 else 0 prediction = self.model.predict(prediction) if clean: prediction = self.cleaner.predict(prediction) self.model.decode_slice(prediction[0], predicted_map, cur_col) cur_col += self.cols_in_slice print(predicted_map.to_vertical_string())
def generate_map(self, cols, seedmap): tiles_in_map = seedmap.height * cols x,y = self.map_to_training_set(seedmap) predicted_map = LTMap.LiteTuxMap(cols, seedmap.height) for i in range(self.memory_size): predicted_map.set_tile(i//seedmap.height,i%seedmap.height, seedmap.get_tile(i//seedmap.height,i%seedmap.height)) p = x[0:1, :, :] for i in range(self.memory_size, tiles_in_map): pred = model.predict(p) tid = self.generate_noisy_tile(pred) predicted_map.set_tile(i // seedmap.height, i % seedmap.height, tid) p = np.roll(p,-16) for b in range(16): p[0,self.memory_size-1,b] = 1 if b == tid else 0 return predicted_map
def build_slice_set(self, file_list, chunks=False): slice_set = None ltm = LTMap.LiteTuxMap(1, 1) step_size = self.cols if chunks else 1 for filename in file_list: print("processing ", filename) ltm.load(filename) end_col = ltm.width - self.cols for c in range(0, end_col, step_size): if slice_set is None: slice_set = np.reshape(self.encode_slice(ltm, c), (1, self.in_nodes)) else: slice_set = np.vstack([ slice_set, np.reshape(self.encode_slice(ltm, c), (1, self.in_nodes)) ]) return slice_set
def test_frequency_report(self): test_map = LTMap.LiteTuxMap(1, 1) test_map.from_json_string(self.TEST_MAP) tile_freq = LTMap.LiteTuxFrequencyMetrics(test_map) tile_freq.generate_report()
p = np.roll(p,-16) for b in range(16): p[0,self.memory_size-1,b] = 1 if b == tid else 0 return predicted_map test = LiteTuxOHE(verbose=True) #build model if we don't have one saved gpu_devices = tf.config.experimental.list_physical_devices('GPU') for device in gpu_devices: tf.config.experimental.set_memory_growth(device, True) #model = test.build_simple_model() model = test.build_deep_model() if os.path.isfile("ohe_lstm.h5"): model = keras.models.load_model("ohe_lstm.h5") test.model = model else : x, y = test.load_map_file_as_training_set("../litetux/levels/mario1-1.json") model.fit(x,y,epochs=50,batch_size=1) model.save("ohe_lstm.h5") test.log("saved model") test_map = LTMap.LiteTuxMap() test_map.load("../litetux/levels/mario1-1.json") #pred_map = test.predict_map(test_map) pred_map = test.generate_map(150,test_map) print(pred_map.to_vertical_string()) ''' for slc in range(x.shape[0]): pred = model.predict(x[slc:slc+1, :, :]) print(slc, test.prediction_to_tile(y[slc:slc+1,:]), test.prediction_to_tile(pred)) '''
def test_group_frequency(self): test_map = LTMap.LiteTuxMap(1, 1) test_map.from_json_string(self.TEST_MAP) tile_freq = LTMap.TileFrequencyMetric(test_map, 16) self.assertEqual(tile_freq.get_tile_group_count([1, 7, 10]), 18) self.assertEqual(tile_freq.get_tile_group_percentage([1, 7, 10]), .15)
def load_map_file_as_training_set(self, filename): lvl = LTMap.LiteTuxMap() lvl.load(filename) self.log(lvl.to_vertical_string()) return self.map_to_training_set(lvl)