def _create_subset_solver(self) -> 'ConstraintSolver.SubsetSolver': unmapped_sources_bitset = BitSet(len(self.sources)) unmapped_sources_bitset.set_all() subset_solver = ConstraintSolver.SubsetSolver( parent_solver=self, sources=self.sources, wip_solution_state=self.destinations, unmapped_sources_bitset=unmapped_sources_bitset, evaluator_class=self._evaluator_class, indent_level=0, debugging=self._debugging) # Get next source node from the BFS queue self._current_subset_solver_tree_node_index = self._subset_tree_visit_queue.pop( 0) # Apply moves from our parents before us. stack = [] iter_node = self._subset_tree[ self._current_subset_solver_tree_node_index] while iter_node is not None: stack.append(iter_node) iter_node = iter_node.parent while len(stack) > 0: node = stack.pop() for move in node.moves_list: subset_solver._execute_move(move) return subset_solver
def _get_changes_to_fit( self, destination_index: int, destination: BitSet ) -> Tuple[List['IntervalsToBitSetsEvaluator.ChangeList'], List[Tuple[ int, int]]]: change_lists = [] fragment_infos = [] # Find the intervals where our source Interval can fit. # e.g., if our Interval were a length of 3, and our BitSet looked like this: # 00100001000 <- BitSet # ABCDEFGHIJK <- Bit Pos # ChangeLists = [(D->G), (I->K)] range_start_idx = self.source.begin range_end_idx = self.source.end source_len = self.source.length # We'll start at the range's beginning, and stop when we either go off the BitSet or hit the end range. curr_clear_bit_idx = destination.get_next_unset_bit_index( range_start_idx) while (curr_clear_bit_idx is not None) and (curr_clear_bit_idx <= range_end_idx): # We are on a zero within the begin..end range of our source's interval. # Find the next one value, which will bound our search. next_set_bit_idx = destination.get_next_set_bit_index( curr_clear_bit_idx) if next_set_bit_idx is None: # If we ran off the edge, set the bound at the last value. next_set_bit_idx = destination.get_num_bits() if next_set_bit_idx > range_end_idx: # Make it bound to our top end of the range. next_set_bit_idx = range_end_idx + 1 # How big is this new interval? possible_interval = Interval.create_from_fixed_range( curr_clear_bit_idx, next_set_bit_idx - 1) if possible_interval.length >= source_len: # Our interval will fit within this one. Now pick an interval *within* the possible # that fits our source and introduces the least fragmentation. change_list_fragment_info = self._get_best_change_list_for_possible_interval( possible_interval, destination) change_list = change_list_fragment_info[0] fragment_info = change_list_fragment_info[1] change_lists.append(change_list) fragment_infos.append(fragment_info) # Find the next zero AFTER our one. curr_clear_bit_idx = destination.get_next_unset_bit_index( next_set_bit_idx) return (change_lists, fragment_infos)
def _get_score_for_changes( self, change_list: 'RasterPixelsToSpritesEvaluator.ChangeList', destination: BitSet) -> int: score = 0 if change_list is None: # If there are no changes, this is "free" and we can get rid of it. score = -math.inf else: # If we're the next pixel in the raster scan, give our moves a high priority. Otherwise # push 'em off. next_pixel_idx = destination.get_next_unset_bit_index(0) if next_pixel_idx == self.source_index: # We're next up. Keep 'em equal. score = 0 else: # We're not next in the queue. Don't do anything. score = math.inf return score
def __init__(self, dest_sprite_index: int, added_pixels_bitset: BitSet, overlapped_pixels_bitset: BitSet): super().__init__() # Which sprite got added as a result of this move? self.dest_sprite_index = dest_sprite_index # Which pixels got added as a result of this move? self.added_pixels_bitset = added_pixels_bitset # How many pixels was that? # This isn't used, as it was discovered that overlapping pixels was # a better heuristic for minimal sprite coverage. # self.num_pixels_added = added_pixels_bitset.get_num_bits_set() # How much does this overlap with previously covered pixels? self.overlapped_pixels_bitset = overlapped_pixels_bitset # How many pixels was that? self.num_pixels_overlapped = overlapped_pixels_bitset.get_num_bits_set( )
intervals = [] # The font must begin at a specific location. font_VRAM_interval = Interval.create_fixed_length_at_start_point( 20, len(unique_patterns_lists[0])) intervals.append(font_VRAM_interval) # Can go anywhere. We'll treat them as contiguous, but we could just as easily split them up into multiples. flag_VRAM_interval = Interval(begin=0, end=448, length=len(unique_patterns_lists[1])) intervals.append(flag_VRAM_interval) # Find a home for them. bitsets = [] VRAMPositions = BitSet(448) bitsets.append(VRAMPositions) interval_to_VRAM_solver = ConstraintSolver( sources=intervals, destinations=bitsets, evaluator_class=IntervalsToBitSetsEvaluator, debugging=None) while (len(interval_to_VRAM_solver.solutions) == 0) and (interval_to_VRAM_solver.is_exhausted() == False): interval_to_VRAM_solver.update() # How'd the solution go? solution = interval_to_VRAM_solver.solutions[0] # Track where each pattern interval will go.
def __init__(self, parent_solver: 'ConstraintSolver', sources: List[object], wip_solution_state: List[object], unmapped_sources_bitset: BitSet, evaluator_class, indent_level: int, debugging): timer = parent_solver.timer_name_to_timer["SubsetInit"] timer.begin() # Store our parent solver, so that we can alert them when done. self._parent_solver = parent_solver # Store our indent level so that we can trace debug properly. self.indent_level = indent_level # Remember what type of debugging we're doing. self.debugging = debugging # Store our evaluator class, so that we can construct them appropriately. self._evaluator_class = evaluator_class # Create an evaluator for every unmapped source. self._source_index_to_evaluator = {} self._unmapped_sources_bitset = BitSet.copy_construct_from( unmapped_sources_bitset) unmapped_source_index = self._unmapped_sources_bitset.get_next_set_bit_index( 0) while unmapped_source_index is not None: source = sources[unmapped_source_index] evaluator = self._evaluator_class.factory_constructor( unmapped_source_index, source) self._source_index_to_evaluator[ unmapped_source_index] = evaluator unmapped_source_index = self._unmapped_sources_bitset.get_next_set_bit_index( unmapped_source_index + 1) # Keep a reference to the sources (we won't alter these) self._sources = sources # Create a deep copy of our WIP solution state, as we *will* be altering that. self._wip_solution_state = copy.deepcopy(wip_solution_state) # Flag all of our destination nodes as dirty. self._dirty_destination_indices_bitset = BitSet( len(wip_solution_state)) self._dirty_destination_indices_bitset.set_all() # Track which of the destinations are flagged as "empty" (those without # anything assigned to them). # We do this so that we don't do a ton of comparisons against each and # every empty destination, when the results will be exactly the same. # We'll keep the first such one in the set, but all subsequent ones # won't be considered. self._empty_destinations_bitset = BitSet(len(wip_solution_state)) first_empty_already_found = False for dest_index in range(len(wip_solution_state)): destination = wip_solution_state[dest_index] if self._evaluator_class.is_destination_empty(destination): self._empty_destinations_bitset.set_bit(dest_index) if first_empty_already_found == False: first_empty_already_found = True else: # Clear out the dirty flag so that *this* empty isn't considered fair game self._dirty_destination_indices_bitset.clear_bit( dest_index) timer.end()
class SubsetSolver: def __init__(self, parent_solver: 'ConstraintSolver', sources: List[object], wip_solution_state: List[object], unmapped_sources_bitset: BitSet, evaluator_class, indent_level: int, debugging): timer = parent_solver.timer_name_to_timer["SubsetInit"] timer.begin() # Store our parent solver, so that we can alert them when done. self._parent_solver = parent_solver # Store our indent level so that we can trace debug properly. self.indent_level = indent_level # Remember what type of debugging we're doing. self.debugging = debugging # Store our evaluator class, so that we can construct them appropriately. self._evaluator_class = evaluator_class # Create an evaluator for every unmapped source. self._source_index_to_evaluator = {} self._unmapped_sources_bitset = BitSet.copy_construct_from( unmapped_sources_bitset) unmapped_source_index = self._unmapped_sources_bitset.get_next_set_bit_index( 0) while unmapped_source_index is not None: source = sources[unmapped_source_index] evaluator = self._evaluator_class.factory_constructor( unmapped_source_index, source) self._source_index_to_evaluator[ unmapped_source_index] = evaluator unmapped_source_index = self._unmapped_sources_bitset.get_next_set_bit_index( unmapped_source_index + 1) # Keep a reference to the sources (we won't alter these) self._sources = sources # Create a deep copy of our WIP solution state, as we *will* be altering that. self._wip_solution_state = copy.deepcopy(wip_solution_state) # Flag all of our destination nodes as dirty. self._dirty_destination_indices_bitset = BitSet( len(wip_solution_state)) self._dirty_destination_indices_bitset.set_all() # Track which of the destinations are flagged as "empty" (those without # anything assigned to them). # We do this so that we don't do a ton of comparisons against each and # every empty destination, when the results will be exactly the same. # We'll keep the first such one in the set, but all subsequent ones # won't be considered. self._empty_destinations_bitset = BitSet(len(wip_solution_state)) first_empty_already_found = False for dest_index in range(len(wip_solution_state)): destination = wip_solution_state[dest_index] if self._evaluator_class.is_destination_empty(destination): self._empty_destinations_bitset.set_bit(dest_index) if first_empty_already_found == False: first_empty_already_found = True else: # Clear out the dirty flag so that *this* empty isn't considered fair game self._dirty_destination_indices_bitset.clear_bit( dest_index) timer.end() def assess_moves(self): # If no unmapped sources remain, flag success if len(self._source_index_to_evaluator.values()) == 0: # We're done, successfully! raise ConstraintSolver.AllItemsMappedSuccessfully() timer = self._parent_solver.timer_name_to_timer["AssessMoves"] timer.begin() # If we have dirty destinations, update each node to alert them. next_dirty_destination_index = self._dirty_destination_indices_bitset.get_next_set_bit_index( 0) while next_dirty_destination_index is not None: destination = self._wip_solution_state[ next_dirty_destination_index] for evaluator in self._source_index_to_evaluator.values(): evaluator.update_moves_for_destination( next_dirty_destination_index, destination) # We're no longer dirty. self._dirty_destination_indices_bitset.clear_bit( next_dirty_destination_index) # On to the next... next_dirty_destination_index = self._dirty_destination_indices_bitset.get_next_set_bit_index( next_dirty_destination_index + 1) timer.end() def choose_next_moves(self): # Find the edge(s) with the best scores. best_score = math.inf best_moves = [] for evaluator in self._source_index_to_evaluator.values(): timer = self._parent_solver.timer_name_to_timer["GetBestMoves"] timer.begin() score_moves_tuple = evaluator.get_list_of_best_moves() timer.end() score = score_moves_tuple[0] moves = score_moves_tuple[1] if len(moves) == 0: # No moves? We've failed. # Emit debugging. if self.debugging is not None: indent_str = self.indent_level * '\t' print( f"{indent_str}{self.__hash__()}: FAILED. No moves available." ) raise ConstraintSolver.SolverFailed_NoMovesAvailableError() if score < best_score: # Replace our previous best best_score = score best_moves = moves elif score == best_score: # Append these moves for move in moves: best_moves.append(move) if best_score == -math.inf: # SPECIAL CASE: These moves are free. Take them all now. self._parent_solver._append_moves(self, [best_moves]) else: # Otherwise, fork the state for other possibilities. child_move_lists = [] for move in best_moves: child_move_list = [move] child_move_lists.append(child_move_list) self._parent_solver._append_moves(self, child_move_lists) # Increment our indent level self.indent_level = self.indent_level + 1 def _execute_move(self, move: Move): timer = self._parent_solver.timer_name_to_timer["ExecuteMove"] timer.begin() # Emit debugging. if self.debugging is not None: indent_str = self.indent_level * '\t' print( f"{indent_str}{self.__hash__()}: Move {move.source_index} to {move.dest_index}." ) # Apply it source_index = move.source_index evaluator = self._source_index_to_evaluator[source_index] source = evaluator.source dest_index = move.dest_index destination = self._wip_solution_state[dest_index] change_list = move.change_list # Call the static function to apply. self._evaluator_class.apply_changes(source, destination, change_list) # Remove the source evaluator, as we are now mapped. del self._source_index_to_evaluator[move.source_index] self._unmapped_sources_bitset.clear_bit(move.source_index) # Flag that this destination is now dirty. self._dirty_destination_indices_bitset.set_bit(dest_index) # If this index was once an empty, flag a new one as the available one. # Remember: we only ever want ONE empty at any given time. if self._empty_destinations_bitset.is_set(dest_index): # Clear current one, but ONLY if we've verified that it is no longer empty # (we don't allow moves that leave a destination empty, as that could lead # to a source being improperly mapped to a dest). if self._evaluator_class.is_destination_empty(destination): raise Exception( "Destination was left empty after a move, which may lead to incorrect assignment." ) else: # Destination is actually NOT empty any longer. self._empty_destinations_bitset.clear_bit(dest_index) # Can we find another one? next_empty = self._empty_destinations_bitset.get_next_set_bit_index( dest_index + 1) if next_empty is not None: # Mark it as dirty so that we can evaluate it as a possible move destination. self._dirty_destination_indices_bitset.set_bit( next_empty) timer.end()
def create_final_palette_mapping(self) -> Mapping[int, int]: # This returns a mapping of color entry indices to # final palette indices. We need to do this because # the color entries aren't in any particular order, but # the final palette requires a specific order. # For example, color entry #2 may specify a slot value # of 7. In this case, there would be a mapping of 2 : 7. # Start with all color entries unassigned. unassigned_source_bitset = BitSet(len(self.color_entries)) unassigned_source_bitset.set_all() # Start with all final palette slots unassigned. unassigned_dest_bitset = BitSet(len(self.color_entries)) unassigned_dest_bitset.set_all() color_entry_index_to_final_slot_map = {} # Find any that have a specific slot and map them first. for color_entry_index in range(len(self.color_entries)): color_entry = self.color_entries[color_entry_index] if color_entry.is_empty(): # Don't care about colors that are empty. unassigned_source_bitset.clear_bit(color_entry_index) else: # See if this has a slot. slot = color_entry.intentions.get_intention( ColorEntry.INTENTION_SLOT) if slot is not None: # Map to this slot. color_entry_index_to_final_slot_map[ color_entry_index] = slot # This entry has been assigned. unassigned_source_bitset.clear_bit(color_entry_index) # The destination slot has been assigned. unassigned_dest_bitset.clear_bit(color_entry_index) # Let's go back through any that are unassigned in the source list. unassigned_source_idx = unassigned_source_bitset.get_next_set_bit_index( 0) unassigned_dest_idx = unassigned_dest_bitset.get_next_set_bit_index(0) while unassigned_source_idx is not None: color_entry_index_to_final_slot_map[ unassigned_source_idx] = unassigned_dest_idx # Clear currents unassigned_source_bitset.clear_bit(unassigned_source_idx) unassigned_dest_bitset.clear_bit(unassigned_dest_idx) # Advance unassigned_source_idx = unassigned_source_bitset.get_next_set_bit_index( unassigned_source_idx + 1) unassigned_dest_idx = unassigned_dest_bitset.get_next_set_bit_index( unassigned_dest_idx + 1) return color_entry_index_to_final_slot_map
for y_start in range(y_min, y_max + 1): for x_start in range(x_min, x_max + 1): # Track which pixel indices are in this sprite. pixel_indices_in_sprite = [] for y in range(y_start, y_start + sprite_height): for x in range(x_start, x_start + sprite_width): if (x, y) in pixel_pos_to_unique_pixel_idx: pixel_index = pixel_pos_to_unique_pixel_idx[(x, y)] pixel_indices_in_sprite.append(pixel_index) # Did we have any pixels? if len(pixel_indices_in_sprite) > 0: # Yes. Create a potential sprite with all of the pixels it covers # into a bit set. coverage = BitSet(len(pixel_list)) for pixel_index in pixel_indices_in_sprite: coverage.set_bit(pixel_index) # Append the positions and the coverages in separate lists # (we'll just use the coverage set for the solver) potential_sprite_upper_left_positions.append((x_start, y_start)) potential_sprite_pixel_coverage_bitsets.append(coverage) # Sanity check: How many sprites hold the first pixel index? print(f"Sprites holding pixel 0 (location {pixel_list[0]}):") num_containing = 0 for sprite_idx in range(len(potential_sprite_upper_left_positions)): coverage = potential_sprite_pixel_coverage_bitsets[sprite_idx] if coverage.is_set(0): ul_pos = potential_sprite_upper_left_positions[sprite_idx]
def _get_best_change_list_for_possible_interval( self, possible_interval: Interval, destination: BitSet ) -> Tuple['IntervalsToBitSetsEvaluator.ChangeList', Tuple[int, int]]: # Figure out where the best place within the possible interval # to assign ourselves. We want the source block to be # positioned as close as possible to another block to # minimize fragmentation. # Example 1: # Our block consists of BBB # We have the following BitSet: # 0011000000000 # ^^^^^^ <- Possible interval # BAD: # 00110BBB00000 # 001100BBB0000 # 0011000BBB000 # ^^^^^^ # BEST: # 0011BBB000000 <- No fragmentation introduced # # Example 2: # Our block constists of BBB # We have the following BitSet: # 1100000000000 # ^^^^^ <- Possible interval # # No perfect choice here. Default to minimizing known # fragmentation: # 1100BBB000000 # ^^^^^ <- Only introduced a 2-spot fragment # 10000001 # 01234567 # ^^^ Potential Interval (2->4) # Bits to Left: 1, Bits to Right: 2 # Look to the left of the BEGINNING of our interval. left_set_bit_idx = destination.get_previous_set_bit_index( possible_interval.begin) num_bits_to_left = 0 if left_set_bit_idx is not None: num_bits_to_left = possible_interval.begin - left_set_bit_idx - 1 # Look to the right of the END of our interval. right_set_bit_idx = destination.get_next_set_bit_index( possible_interval.end) num_bits_to_right = destination.get_num_bits( ) - possible_interval.end - 1 if right_set_bit_idx is not None: num_bits_to_right = right_set_bit_idx - possible_interval.end - 1 if num_bits_to_left <= num_bits_to_right: # We choose to the left. chosen_interval = Interval.create_fixed_length_at_start_point( possible_interval.begin, self.source.length) # Smallest is the distance from our left edge to the nearest 1. smallest_fragment = num_bits_to_left # Largest is the distance from the right edge of the possible interval # PLUS the difference between our possible and source lengths. largest_fragment = num_bits_to_right + (possible_interval.length - self.source.length) # Return a tuple of (change list, (smallest, largest)) # We do this because we don't want the change list to hold fragment details # (since those will change after subsequent moves), but we don't want to have # to recalculate the fragments separately. return (IntervalsToBitSetsEvaluator.ChangeList( possible_interval, chosen_interval), (smallest_fragment, largest_fragment)) else: # Go to the right edge. chosen_interval = Interval.create_fixed_length_from_end_point( possible_interval.end, self.source.length) # Smallest is the distance from our right edge to the nearest 1. smallest_fragment = num_bits_to_right # Largest is the distance from the left edge of the possible interval # PLUS the difference between our possible and source lengths. largest_fragment = num_bits_to_left + (possible_interval.length - self.source.length) # Return a tuple of (change list, (smallest, largest)) # We do this because we don't want the change list to hold fragment details # (since those will change after subsequent moves), but we don't want to have # to recalculate the fragments separately. return (IntervalsToBitSetsEvaluator.ChangeList( possible_interval, chosen_interval), (smallest_fragment, largest_fragment))
def is_destination_empty(destination: BitSet) -> bool: return destination.are_all_clear()
def apply_changes(source: Interval, destination: BitSet, change_list: 'IntervalsToBitSetsEvaluator.ChangeList'): # Apply our changes, which is a run of bits to set. for bit_idx in range(change_list.chosen_interval.begin, change_list.chosen_interval.end + 1): destination.set_bit(bit_idx)
def update_moves_for_destination(self, destination_index: int, destination: BitSet): # We have three cases: # 1. The destination already contains us, in which case we issue a free move. # 2. We are NOT the next pixel up, so we issue a fake move that keeps us # in the running until we are called up. # 3. We ARE the next pixel to solve for in the destination, in which case # we create all of our possible moves if destination.is_set(self.source_index): # Case 1: A previous move already covered this pixel. # We'll just move into the destination. # Create a move that is "free" and has no change list to worry about. move = Move(source_index=self.source_index, dest_index=destination_index, change_list=None) potential_move = RasterPixelsToSpritesEvaluator.PotentialMove( move=move, base_score=-math.inf) self._destination_to_potential_move_list[destination_index] = [ potential_move ] else: # Are we the next pixel in the queue? next_pixel_idx = destination.get_next_unset_bit_index(0) if next_pixel_idx != self.source_index: # Case 2: We are NOT the next pixel up. This will be the most common situation. # As a result, we'll create a dummy move the first time we hit this case and # then use that as our default move subsequently. if destination_index not in self._destination_to_potential_move_list: # We need *a* move to signal to the constraint solver that we're not completely # at a dead end, so give a move that will raise an exception if it is attempted to be used. invalid_change = RasterPixelsToSpritesEvaluator.InvalidSourceChangeList( ) move = Move(source_index=self.source_index, dest_index=destination_index, change_list=invalid_change) # Create a move with an awful score. potential_move = RasterPixelsToSpritesEvaluator.PotentialMove( move=move, base_score=math.inf) self._destination_to_potential_move_list[ destination_index] = [potential_move] else: # Case 3: We're up! # We'll submit *all* of our potential sprites as candidates, # with this in mind: # All of our potential sprites start on the same raster line as we're on candidate_lists = [] sprite_idx = self.source.pixel_to_potential_sprites_bitset.get_next_set_bit_index( 0) while sprite_idx is not None: # Record which bits got changed (these are the unique bits, which may be # different than what our sprite originally covered due to previous moves # overlapping). sprite_coverage = self.source.sprite_pixel_coverages[ sprite_idx] overlap = sprite_coverage.get_intersection_bitset( destination) changed = sprite_coverage.get_difference_bitset( destination) changed.intersect_with(sprite_coverage) change_list = RasterPixelsToSpritesEvaluator.ValidChangeList( dest_sprite_index=sprite_idx, added_pixels_bitset=changed, overlapped_pixels_bitset=overlap) candidate_lists.append(change_list) sprite_idx = self.source.pixel_to_potential_sprites_bitset.get_next_set_bit_index( sprite_idx + 1) # This may go against the spirit of the solver, but because we know that it uses a BFS approach # we want to put the change lists we think will have the most success FIRST in the list so that # they get explored before others. We don't want to omit the other options, just prioritize those # with an heuristic. change_lists = [] while len(candidate_lists) > 0: best_pixels_idx = -1 best_pixels_value = math.inf # Find the one matching the best heuristic. for candidate_idx, candidate in enumerate(candidate_lists): # HEURISTIC: MOST PIXELS BEING ADDED # In testing, this produced worse results than choosing # sprites with the lowest overlap of already-chosen pixels. #pixels_value = candidate.num_pixels_added # HEURISTIC: FEWEST OVERLAP pixels_value = candidate.num_pixels_overlapped if pixels_value < best_pixels_value: best_pixels_value = pixels_value best_pixels_idx = candidate_idx change_lists.append(candidate_lists[best_pixels_idx]) candidate_lists.pop(best_pixels_idx) potential_moves = [] for change_list in change_lists: move = Move(source_index=self.source_index, dest_index=destination_index, change_list=change_list) # Create a potential move with an equal score for all of these moves, so that they all get evaluated. potential_move = RasterPixelsToSpritesEvaluator.PotentialMove( move=move, base_score=0) potential_moves.append(potential_move) self._destination_to_potential_move_list[ destination_index] = potential_moves