def get_steering_inputs(path: str): """ Returns a tuple of steering inputs + timestamp from a given GBX file. """ try: gbx_obj = Gbx(path) except Exception as excp: print(f'Error parsing: {excp}') return None ghosts = gbx_obj.get_classes_by_ids([GbxType.CTN_GHOST, GbxType.CTN_GHOST_OLD]) if not ghosts: ghost = try_parse_old_ghost(gbx_obj) if not ghost: print('Error: no ghosts') return None if not ghost.control_entries: print('Error: no control entries') return None else: ghost = ghosts[0] steering_inputs, timeline = get_steer_events(ghost.control_entries) return steering_inputs, timeline
def process(replay_gbx: Gbx, trace_offset: int): ''' Processes a single replay using a given trace offset. Args: replay_gbx (pygbx.Gbx): the Gbx object to process trace_offset (int): the trace offset to use for tracing out blocks Returns: list: the block tuples ''' replays = replay_gbx.get_classes_by_ids( [GbxType.REPLAY_RECORD, GbxType.REPLAY_RECORD_OLD]) ghosts = replay_gbx.get_classes_by_ids( [GbxType.GAME_GHOST, GbxType.CTN_GHOST]) if not ghosts or not replays: return None ghost = ghosts[0] replay = replays[0] challenge = replay.track.get_class_by_id(GbxType.CHALLENGE) if not challenge: return None filtered = [] for block in challenge.blocks: t = block_to_tup(block) if not t: continue filtered.append(t) return process_blocks(filtered, ghost, trace_offset)
def analyze_replay(path: str): try: g = Gbx(path) except Exception as e: print(f'Error parsing: {e}') return None ghosts = g.get_classes_by_ids([GbxType.CTN_GHOST, GbxType.CTN_GHOST_OLD]) if not ghosts: ghost = try_parse_old_ghost(g) if not ghost: print('Error: no ghosts') return None if not ghost.control_entries: print('Error: no control entries') return None else: ghost = ghosts[0] ghost = ghosts[0] results = { 'version': ghost.game_version, 'login': ghost.login, 'max_spikes': 0, 'spikes': 0 } partitions = partition_steer_events(ghost.control_entries, TIME_PERIOD) spikes = [] for partition in partitions: spikes_num = -1 spike_dir = 0 for i in range(1, len(partition)): diff = partition[i] - partition[i - 1] if abs(diff) <= NOISE_THRESHOLD: continue if diff > 0 and spike_dir != 1: spike_dir = 1 spikes_num += 1 elif diff < 0 and spike_dir != -1: spike_dir = -1 spikes_num += 1 spikes_num = max(0, spikes_num) spikes.append(spikes_num) if len(spikes) == 0: max_spikes = 0 else: max_spikes = max(spikes) results['max_spikes'] = max_spikes results['spikes'] = spikes return results
def try_parse_old_ghost(g: Gbx): ghost = CGameCtnGhost(0) parser = g.find_raw_chunk_id(0x2401B00F) if parser: ghost.login = parser.read_string() parser = g.find_raw_chunk_id(0x2401B011) if parser: parser.seen_loopback = True g.read_ghost_events(ghost, parser, 0x2401B011) return ghost return None
def try_extract_2020(g: Gbx): cbp = g.find_raw_chunk_id(0x0309201D) if not cbp: return print('Pos:', cbp.pos) cbp.skip(8) # PIKS + skip size cbp.skip(5 * 4) # unknown data ticks = cbp.read_uint32() print(ticks) data_size = cbp.read_uint32() print(data_size) step = cbp.read_uint32() # something? i = 0 step = ticks / ((data_size - 4) / 2) # print(step) # cbp.skip(9) while i < data_size - 4: b = cbp.read_byte() print(hex(b), end=' ') if b != 0xFF: print(hex(b)) print(i * step) i += 2 else: i += 1
def process_path(path, write_func): g = Gbx(path) ghosts = g.get_classes_by_ids([GbxType.CTN_GHOST, GbxType.CTN_GHOST_OLD]) if not ghosts: ghost = try_parse_old_ghost(g) else: ghost = ghosts[0] if not ghost: return # if not ghost.control_entries: # try_extract_2020(g) print_inputs(ghost, write_func)
def try_parse_old_ghost(gbx: Gbx): """ Carry over from original script 'average_steering_partitions.py. Kept to sustain compatibility. """ ghost = CGameCtnGhost(0) parser = gbx.find_raw_chunk_id(0x2401B00F) if parser: ghost.login = parser.read_string() parser = gbx.find_raw_chunk_id(0x2401B011) if parser: parser.seen_loopback = True gbx.read_ghost_events(ghost, parser, 0x2401B011) return ghost return None
def uses_binary_input(path: str): try: g = Gbx(path) except: print(f'Error parsing: {e}') ghosts = g.get_classes_by_ids([GbxType.CTN_GHOST, GbxType.CTN_GHOST_OLD]) if not ghosts: ghost = try_parse_old_ghost(g) if not ghost: print('Error: no ghosts') return None if not ghost.control_entries: print('Error: no control entries') return None else: ghost = ghosts[0] results = { 'version': ghost.game_version, 'login': ghost.login, 'max_taps': 0, 'taps': 0 } ghost = ghosts[0] if ghost.login != 'acceleracer_01': return None for entry in ghost.control_entries: if entry.event_name == 'Steer': return None partitions = partition_binary_events(ghost.control_entries, TIME_PERIOD) # print(partitions) m = 0 for p in partitions: m = max(m, len(p)) results['max_taps'] = m results['taps'] = partitions return results
def process_fname(fname: str, trace_offset: int) -> tuple: ''' Processes a single filename given the trace offset Args: fname (str): the filename to the replay file trace_offset (int): the trace offset to use for tracing out blocks Returns: tuple: (filename, block_tuples) ''' print('\tProcessing: \t{}'.format(fname)) replay_file = Gbx(fname) return fname, process(replay_file, trace_offset)
def main(): path = sys.argv[1] # login = sys.argv[2] i = 0 reps = {} for root, subdirs, files in os.walk(path): for filename in files: if filename.lower().endswith('.gbx'): try: g = Gbx(os.path.join(root, filename)) ghost = g.get_classes_by_ids( [GbxType.CTN_GHOST, GbxType.CTN_GHOST_OLD])[0] if ghost.login not in reps: reps[ghost.login] = [filename] else: reps[ghost.login].append(filename) except: pass i += 1 print(json.dumps(reps))
def save_gbx(options: dict, template: str, output: str): context = GbxSaveContext(True) def data_replace(s, rep, offset, rlen=-1): if rlen == -1: rlen = len(rep) return s[:offset] + rep + s[offset + rlen:] temp_gbx = Gbx(template) challenge = temp_gbx.get_class_by_id(GbxType.CHALLENGE) common = temp_gbx.get_class_by_id(0x03043003) if 'rotation' in options: track = rotate_track_challenge(challenge, options['rotation']) elif 'input' in options: track = pickle.load(open(options['input'], 'rb')) elif 'track_data' in options: track = options['track_data'] track = populate_flags(track) context.append_to_string_store(challenge.map_uid) context.append_to_string_store(challenge.environment) context.append_to_string_store(challenge.map_author) context.append_to_string_store(challenge.map_name) context.append_to_string_store(challenge.mood) context.append_to_string_store(challenge.env_bg) context.append_to_string_store(challenge.env_author) udata = bytes(temp_gbx.data) temp = open(template, 'rb') data = temp.read() # We have to be very careful of order we save the data. # We begin saving the data from the very end to the beggining of the file, # so that all Gbx's class offsets are always valid. # Modifying body # Blocks context.write_uint32(len(track)) for block in track: context.write_block(block) info = temp_gbx.positions['block_data'] if info.valid: udata = data_replace(udata, context.data, info.pos, info.size) # The mood # info = temp_gbx.positions['mood'] # if info.valid: # mood = random.choices(MOODS, MOOD_WEIGHTS)[0] # print(mood) # udata = data_replace(udata, write_lookback_string( # stored_strings, seen_lookback, mood), info.pos, info.size) # Map name in editor context.reset() if 'map_name' in options: map_name = options['map_name'] else: map_name = get_map_name(output) context.write_string(map_name) # The map name info = temp_gbx.positions['map_name'] if info.valid: udata = data_replace(udata, context.data, info.pos, info.size) compressed = lzo.compress(bytes(udata), 1, False) fs = open(output, 'wb+') # New data and compressed data size data_size_offset = temp_gbx.positions['data_size'].pos comp_data_size_offset = data_size_offset + 4 comp_data_offset = comp_data_size_offset + 4 data = data_replace(data, struct.pack('I', len(udata)), data_size_offset) data = data_replace(data, struct.pack( 'I', len(compressed)), comp_data_size_offset) data = data_replace(data, compressed, comp_data_offset) # Modifying header # The track name in map chooser info = temp_gbx.positions['track_name'] if info.valid: data = data_replace(data, context.data, info.pos, info.size) # New chunk size since track name length could change user_data_diff = len(common.track_name) - len(map_name) info = temp_gbx.positions['50606083'] if info.valid: prev = temp_gbx.root_parser.pos temp_gbx.root_parser.pos = info.pos new_chunk_size = temp_gbx.root_parser.read_uint32() - user_data_diff temp_gbx.root_parser.pos = prev data = data_replace(data, struct.pack( 'I', new_chunk_size), info.pos, info.size) # Finally, the user data size new_user_data_size = temp_gbx.user_data_size - user_data_diff info = temp_gbx.positions['user_data_size'] if info.valid: data = data_replace(data, struct.pack( 'I', new_user_data_size), info.pos, info.size) fs.write(data) fs.close()
def process_fname(fname, trace_offset): print('\tProcessing: \t{}'.format(fname)) replay_file = Gbx(fname) return fname, process(replay_file, trace_offset)
return 180 if rot == 3: return 90 return 0 if len(sys.argv) <= 1: track = pickle.load(open('data/train_data.pkl', 'rb+'))[0][1] print('Visualizing first track from the training data.') else: try: train_idx = int(sys.argv[1]) track = pickle.load(open('data/train_data.pkl', 'rb+'))[train_idx][1] print(f'Visualizing track {train_idx} from the training data.') except ValueError: gfile = Gbx(sys.argv[1]) challenges = gfile.get_classes_by_ids( [GbxType.CHALLENGE, GbxType.CHALLENGE_OLD]) if challenges: challenge = challenges[0] track = [block_to_tup(block) for block in challenge.blocks] print('Visualizing track from the provided challenge file.') else: print('Cannot load track: invalid file') quit() print(track) arr_img = pygame.image.load('block_images/arr.jpg') arr_img = pygame.transform.scale(arr_img, (10, 10))