def flattened_options(self) -> t.Iterator[FrozenMultiset[T]]: if isinstance(self, NodeAny): for child in self._children: if isinstance(child, BaseNode): yield from child.flattened_options else: yield FrozenMultiset((child,)) else: accumulated = [] anys = [] for child in self.flattened: if isinstance(child, BaseNode): anys.append(child) else: accumulated.append(child) for combination in itertools.product( *( _any.flattened_options for _any in anys ) ): yield FrozenMultiset( itertools.chain( accumulated, *combination, ) )
def removed_printings(self) -> FrozenMultiset[Printing]: if self._removed_printings is None: self._removed_printings = ( FrozenMultiset(self._original.all_printings) - FrozenMultiset(self._current.all_printings)) return self._removed_printings
def __init__( self, children: t.Union[ t.Iterable[PrintingNodeChild], t.Mapping[PrintingNodeChild, int] ], ): self._children = FrozenMultiset(children)
def __init__( self, cubeables: t.Union[t.Iterable[C], t.Iterable[t.Tuple[C, int]], t.Mapping[C, int], None] = None, ): self._cubeables = FrozenMultiset() if cubeables is None else FrozenMultiset(cubeables) self._models: t.Optional[FrozenMultiset[M]] = None self._traps: t.Optional[FrozenMultiset[T]] = None self._garbage_traps: t.Optional[FrozenMultiset[T]] = None self._tickets: t.Optional[FrozenMultiset[I]] = None self._purples: t.Optional[FrozenMultiset[P]] = None self._laps: t.Optional[FrozenMultiset[L]] = None
def deserialize(cls, value: serialization_model, inflator: Inflator) -> TrapCollection: return cls( FrozenMultiset({ Trap.deserialize(trap, inflator): multiplicity for trap, multiplicity in value['traps'] }), )
def groupify(self, items: t.Iterable[T]) -> Grouping[T]: items = Multiset(items) categories = [] for category in self._categories: matches = FrozenMultiset(category.criteria.matches(items, self._extraction_strategy)) if not matches: continue categories.append(self._group_type()(category.name, matches)) items -= matches if items and self._include_others: categories.append(self._group_type()('Others', FrozenMultiset(items))) return Grouping(self._name, categories)
def tickets(self) -> FrozenMultiset[I]: if self._tickets is None: self._tickets = FrozenMultiset( cubeable for cubeable in self._cubeables if isinstance(cubeable, BaseTicket) ) return self._tickets
def traps(self) -> FrozenMultiset[T]: if self._traps is None: self._traps = FrozenMultiset( cubeable for cubeable in self._cubeables if isinstance(cubeable, BaseTrap) ) return self._traps
def models(self) -> FrozenMultiset[M]: if self._models is None: self._models = FrozenMultiset( cubeable for cubeable in self._cubeables if isinstance(cubeable, OrpBase) ) return self._models
def laps(self) -> FrozenMultiset[L]: if self._laps is None: self._laps = FrozenMultiset( cubeable for cubeable in self._cubeables if isinstance(cubeable, BaseLap) ) return self._laps
def purples(self) -> FrozenMultiset[P]: if self._purples is None: self._purples = FrozenMultiset( cubeable for cubeable in self._cubeables if isinstance(cubeable, BasePurple) ) return self._purples
def garbage_traps(self) -> FrozenMultiset[T]: if self._garbage_traps is None: self._garbage_traps = FrozenMultiset( cubeable for cubeable in self._cubeables if ( isinstance(cubeable, BaseTrap) and cubeable.intention_type == IntentionType.GARBAGE ) ) return self._garbage_traps
def check(cls, updater: CubeUpdater) -> t.Optional[ReportNotification]: non_existent_cuts = FrozenMultiset( { cubeable: multiplicity for cubeable, multiplicity in (~updater.patch.cube_delta_operation.cubeables).positive() } ) - updater.cube.cubeables if non_existent_cuts: return cls(non_existent_cuts) else: return None
def check(cls, updater: CubeUpdater) -> t.Optional[NodesWithoutGroups]: nodes = FrozenMultiset( { node: multiplicity for node, multiplicity in updater.patch.node_delta_operation.nodes.items() if multiplicity > 0 and not node.groups } ) if not nodes: return None return cls(nodes)
def get_map_slot(self, expansion_collection: t.Union[ExpansionCollection, t.Collection[Printing]]) -> MapSlot[Printing]: return MapSlot( { FrozenMultiset( printing for printing in ( expansion_collection[option.collection_key].printings if isinstance(expansion_collection, ExpansionCollection) else expansion_collection ) if printing.in_booster and option.pattern.match(printing) ): weight for option, weight in self._options.items() } )
def as_trap_collection(self) -> FrozenMultiset[Trap]: traps = [] for trap in self.traps: cubeables = [] if not trap: raise Exception('Empty trap') for constrained_node in trap: cubeable = constrained_node.node if isinstance(cubeable, AllNode) and len(cubeable.children) == 1: cubeables.extend(cubeable.children) else: cubeables.append(cubeable) traps.append(Trap(AllNode(cubeables))) return FrozenMultiset(traps)
def check(cls, updater: CubeUpdater) -> t.Optional[ReportNotification]: non_garbage_cube = Cube( ( cubeable for cubeable in updater.cube.cubeables if not ( isinstance(cubeable, Trap) and cubeable.intention_type == IntentionType.GARBAGE ) ) ) new_no_garbage_cube = Cube( ( cubeable for cubeable in (updater.cube + updater.patch.cube_delta_operation).cubeables if not ( isinstance(cubeable, Trap) and cubeable.intention_type == IntentionType.GARBAGE ) ) ) old_cardboards = FrozenMultiset( printing.cardboard for printing in itertools.chain( non_garbage_cube.all_printings, updater.node_collection.all_printings, ) ) new_cardboards = FrozenMultiset( printing.cardboard for printing in itertools.chain( new_no_garbage_cube.all_printings, (updater.node_collection + updater.patch.node_delta_operation).all_printings, ) ) return CardboardChange( FrozenCounter( new_cardboards.elements() ) - FrozenCounter( old_cardboards.elements() ) )
def __init__( self, printings: t.Iterable[Printing], decks: t.Optional[t.Iterable[Deck]] = None, ): self._printings: FrozenMultiset[Printing] = ( printings if isinstance(printings, FrozenMultiset) else FrozenMultiset(printings) ) self._decks = ( () if decks is None else ( decks if isinstance(decks, tuple) else tuple(decks) ) )
class ConstrainedNodes(Serializeable): def __init__(self, nodes: t.Iterable[ConstrainedNode]): self._nodes = FrozenMultiset(nodes) def serialize(self) -> serialization_model: return { 'nodes': self._nodes } @classmethod def deserialize(cls, value: serialization_model, inflator: Inflator) -> 'ConstrainedNodes': return cls( nodes = ( ConstrainedNode.deserialize(node, inflator) for node in value['nodes'] ) ) def __iter__(self) -> t.Iterable[ConstrainedNode]: return self._nodes.__iter__() def __hash__(self) -> int: return hash(self._nodes) def __eq__(self, other: object) -> bool: return ( isinstance(other, self.__class__) and self._nodes == other._nodes ) def __repr__(self) -> str: return '{}({})'.format( self.__class__.__name__, self._nodes, )
def calculate( generations: int, trap_amount: int, max_delta: t.Optional[int] = None, create_proxy_pdfs: bool = True, persist_traps: bool = True, ): random.seed() db = Loader.load() image_loader = ImageLoader() fetcher = ConstrainedNodeFetcher(db) cube_loader = CubeLoader(db) trap_collection_persistor = TrapCollectionPersistor(db) constrained_nodes = fetcher.fetch_garbage() print(f'loaded {len(constrained_nodes)} nodes') cube = cube_loader.load() cube_traps = FrozenMultiset( trap for trap in cube.traps if ( trap.intention_type == IntentionType.GARBAGE or trap.intention_type == IntentionType.LAND_GARBAGE ) ) blue_print = ConstraintSetBluePrint( ( algorithm.ValueDistributionHomogeneityConstraint, 2, {}, ), ( algorithm.GroupExclusivityConstraint, 2, {'group_weights': GROUP_WEIGHTS}, ), ( algorithm.SizeHomogeneityConstraint, 1, {}, ), ) if max_delta is not None and max_delta > 0: distributor = DeltaDistributor( constrained_nodes = constrained_nodes, trap_amount = trap_amount, origin_trap_collection = cube_traps, constraint_set_blue_print = blue_print, max_trap_delta = max_delta, mate_chance = .45, mutate_chance = .35, tournament_size = 3, population_size = 600, ) else: distributor = Distributor( constrained_nodes = constrained_nodes, trap_amount = trap_amount, constraint_set_blue_print = blue_print, mate_chance = .5, mutate_chance = .45, tournament_size = 4, population_size = 400, ) random_fitness = statistics.mean( map(distributor.constraint_set.total_score, distributor.sample_random_population) ) st = time.time() winner = distributor.evaluate(generations).best print(f'Done in {time.time() - st} seconds') distribution_model = DistributionModel( tuple( tuple( NewConstrainedNode( node.value, node.node, node.groups, ) for node in trap ) for trap in winner.traps ) ) print('saved nodes:', sum(map(len, distribution_model.traps))) with open(os.path.join(paths.OUT_DIR, 'old_distribution.json'), 'w') as f: f.write(JsonId(db).serialize(distribution_model)) print('Random fitness:', random_fitness) try: print('Current cube fitness:', distributor.evaluate_cube(cube_traps)) except ValueError: print('Nodes does not match current cube') _, added, removed = distributor.trap_collection_to_trap_distribution(cube_traps, constrained_nodes) print('added:', added) print('removed:', removed) print('Winner fitness:', winner.fitness.values[0]) distributor.show_plot() winner_traps = winner.as_trap_collection for trap in winner_traps: trap._intention_type = IntentionType.GARBAGE new_traps = winner_traps - cube_traps removed_traps = cube_traps - winner_traps print('New traps', len(new_traps)) trap_collection = TrapCollection(winner_traps) print('\n------------------------------------------------\n') print(trap_collection.minimal_string_list) print('\n------------------------------------------------\n') if persist_traps: trap_collection_persistor.persist(trap_collection) print('traps persisted') if create_proxy_pdfs: out, new_out, removed_out = GARBAGE_OUT_PATH, GARBAGE_NEW_OUT_PATH, GARBAGE_REMOVED_OUT_PATH proxy_laps( laps = winner_traps, image_loader = image_loader, file_name = out, ) proxy_laps( laps = new_traps, image_loader = image_loader, file_name = new_out, ) proxy_laps( laps = removed_traps, image_loader = image_loader, file_name = removed_out, ) print('proxying done')
def __init__(self, nodes: t.Iterable[ConstrainedNode]): self._nodes = FrozenMultiset(nodes)
def get_map_slot(self, fantasy_set: FantasySet) -> MapSlot[Cubeable]: return MapSlot({ fantasy_set.rarity_map.get(key).cubeables or FrozenMultiset(): weight for key, weight in self._key_map.items() })
def __init__(self, nodes: t.Iterable[ConstrainedNode]): self._nodes = nodes if isinstance( nodes, FrozenMultiset) else FrozenMultiset(nodes) self._nodes_map: t.Optional[t.Mapping[PrintingNode, ConstrainedNode]] = None
def __init__(self, before: ConstrainedNode, after: t.Iterable[Printing]): self._before = before self._after = after if isinstance( after, FrozenMultiset) else FrozenMultiset(after)
def __init__(self, options: t.Iterable[Option]): self._options: FrozenMultiset[Option] = ( options if isinstance(options, FrozenMultiset) else FrozenMultiset(options) )
def __init__(self, slots: t.Iterable[MapSlot[T]]): self.slots: FrozenMultiset[MapSlot] = slots if isinstance(slots, FrozenMultiset) else FrozenMultiset(slots)
def __init__(self, before: t.Iterable[Printing], after: ConstrainedNode): self._before = before if isinstance( before, FrozenMultiset) else FrozenMultiset(before) self._after = after
def __init__(self, slots: t.Iterable[KeySlot]): self._slots: FrozenMultiset[KeySlot] = ( slots if isinstance(slots, FrozenMultiset) else FrozenMultiset(slots) )
def __init__(self, atoms: t.Iterable[ManaCostAtom] = None): self._atoms = atoms if isinstance( atoms, FrozenMultiset) else FrozenMultiset(atoms)
def __init__(self, options: t.Mapping[FrozenMultiset[T], int]): self.options: FrozenMultiset[FrozenMultiset[T]] = ( options if isinstance(options, FrozenMultiset) else FrozenMultiset(options) )