def from_db_value(self, value, expression, connection) -> t.Optional[Serializeable]: if value is None: return return JsonId(db).deserialize( self._model_type, value, )
def serialize(cls, serializeable: cubeupdate.CubeChange) -> compacted_model: return { 'type': serializeable.__class__.__name__, 'id': serializeable.persistent_hash(), 'explanation': serializeable.explain(), 'content': JsonId.serialize(serializeable), 'category': serializeable.category.value, }
def to_python(self, value) -> t.Optional[Serializeable]: if isinstance(value, self._model_type): return value if value is None: return return JsonId(db).deserialize( self._model_type, value, )
def upload_limited_deck(self, pool_id: t.Union[str, int], name: str, deck: Deck) -> LimitedDeck: return LimitedDeck.deserialize( self._make_request(f'limited/pools/{pool_id}', method='POST', data={ 'deck': JsonId.serialize(deck), 'name': name, }), self, )
def init(cls): cls.pixmap_loader = PixmapLoader( pixmap_executor=30, printing_executor=30, imageable_executor=30, ) try: cls.db = Loader.load() except DBLoadException: update() cls.db = Loader.load() cls.serialization_strategy = JsonId(cls.db)
def _persist_cube(cls, cube: Cube) -> None: if not os.path.exists(cls.LOCAL_CUBES_PATH): os.makedirs(cls.LOCAL_CUBES_PATH) with open( os.path.join( cls.LOCAL_CUBES_PATH, datetime.datetime.strftime( datetime.datetime.today(), cls.TIMESTAMP_FORMAT, ), ), 'w', ) as f: f.write(JsonId.serialize(cube))
def persist(self, trap_collection: TrapCollection): if not os.path.exists(self._OUT_DIR): os.makedirs(self._OUT_DIR) with open( os.path.join( self._OUT_DIR, datetime.datetime.strftime( datetime.datetime.today(), self.TIMESTAMP_FORMAT, ), ) + '.json', 'w', ) as f: f.write(JsonId.serialize(trap_collection))
class TrapCollectionPersistor(object): _OUT_DIR = os.path.join(paths.OUT_DIR, 'trap_collections') TIMESTAMP_FORMAT = '%y_%m_%d_%H_%M_%S' def __init__(self, db: CardDatabase): self._db = db self._strategy = JsonId(self._db) def get_all_trap_collections(self) -> t.Iterator[TrapCollection]: if not os.path.exists(self._OUT_DIR): os.makedirs(self._OUT_DIR) trap_collections = os.listdir(self._OUT_DIR) if not trap_collections: return names_times = [] #type: t.List[t.Tuple[str, datetime.datetime]] for name in trap_collections: try: names_times.append(( name, datetime.datetime.strptime( os.path.splitext(name)[0], self.TIMESTAMP_FORMAT, ), )) except ValueError: pass if not names_times: return sorted_pairs = sorted(names_times, key=lambda item: item[1], reverse=True) for name, time in sorted_pairs: with open(os.path.join(self._OUT_DIR, name), 'r') as f: yield self._strategy.deserialize(TrapCollection, f.read()) def get_most_recent_trap_collection(self) -> t.Optional[TrapCollection]: all_collections = self.get_all_trap_collections() try: return all_collections.__next__() except StopIteration: return None def get_trap_collection(self, name: str) -> TrapCollection: with open(os.path.join(self._OUT_DIR, name), 'r') as f: return self._strategy.deserialize(TrapCollection, f.read()) def persist(self, trap_collection: TrapCollection): if not os.path.exists(self._OUT_DIR): os.makedirs(self._OUT_DIR) with open( os.path.join( self._OUT_DIR, datetime.datetime.strftime( datetime.datetime.today(), self.TIMESTAMP_FORMAT, ), ) + '.json', 'w', ) as f: f.write(JsonId.serialize(trap_collection))
def serialize_cardboard_cubeable_string( cardboard_cubeable: CardboardCubeable) -> t.Any: return cardboard_cubeable.name if isinstance( cardboard_cubeable, Cardboard) else JsonId.serialize(cardboard_cubeable)
def deserialize(cls, s: t.AnyStr) -> Pool: return JsonId(Context.db).deserialize(Pool, s)
def serialize_cubeable_string(cubeable: Cubeable) -> t.Any: return str(cubeable.id) if isinstance( cubeable, Printing) else JsonId.serialize(cubeable)
def _receive_message(self, message_type: str, content: t.Any) -> None: if not message_type == 'update': return if DISTRIBUTOR_SERVICE.is_patch_locked(self._patch_pk): self._send_message('status', status = 'locked') return with transaction.atomic(): try: patch = ( models.CubePatch.objects .select_for_update() .get(pk = self._patch_pk) ) except models.CubePatch.DoesNotExist: self._send_error(f'no patch with id {self._patch_pk}') self.close() return update = content.get('update') change_undoes = content.get('change_undoes') if not update and not change_undoes: self._send_error('update must have at least one of "updates" or "change_undoes" fields') return patch_update = CubePatch() if update: try: update = RawStrategy(db).deserialize( CubePatch, update, ) except (KeyError, AttributeError): self._send_error('bad request') return patch_update += update if change_undoes: undoes: t.List[t.Tuple[cubeupdate.CubeChange, int]] = [] try: for undo, multiplicity in change_undoes: undoes.append( ( JsonId(db).deserialize( CUBE_CHANGE_MAP[undo['type']], undo['content'], ), multiplicity, ) ) except (KeyError, TypeError, ValueError): traceback.print_exc() self._send_error('bad request') return for undo, multiplicity in undoes: patch_update -= (undo.as_patch() * multiplicity) patch.patch += patch_update patch.save() meta_cube = patch.versioned_cube.latest_release.as_meta_cube() msg = { 'type': 'cube_update', 'update': { 'patch': orpserialize.CubePatchOrpSerializer.serialize( patch.patch ), 'verbose_patch': orpserialize.VerbosePatchSerializer.serialize( patch.patch.as_verbose( meta_cube ) ), 'preview': orpserialize.MetaCubeSerializer.serialize( meta_cube + patch.patch ), 'updater': serializers.UserSerializer(self.scope['user']).data, 'update': orpserialize.VerbosePatchSerializer.serialize( patch_update.as_verbose( meta_cube ) ) }, } async_to_sync(self.channel_layer.group_send)( self._group_name, msg, )
def get(self, request: Request, *args, **kwargs) -> Response: return Response( status=status.HTTP_200_OK, content_type='application/text', data=JsonId.serialize(self.get_object().pool), )
def __init__(self, db: CardDatabase): self._db = db self._fetcher = CubeFetcher(self._db) self._strategy = JsonId(self._db)
def serialize(self, deck: Deck) -> t.AnyStr: return JsonId.serialize(deck)
def from_native(self, value: str) -> T: if value[0] == '{': return JsonId(db).deserialize(CardboardNode, value) return db.cardboards[value]
def to_native(self, value: T) -> str: if isinstance(value, Cardboard): return value.name return JsonId.serialize(value)
def to_native(self, value: T) -> str: if isinstance(value, Printing): return str(value.id) return JsonId.serialize(value)
def from_native(self, value: str) -> T: if value[0] == '{': return JsonId(db).deserialize(PrintingNode, value) return db.printings[int(value)]
def post(self, request: Request, *args, **kwargs) -> Response: try: pool = models.Pool.objects.select_related('session').get( id=kwargs['pk'], user=request.user) except models.Pool.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) allow_cheating = ( pool.session.allow_cheating and not SeatResult.objects.filter( scheduled_seat__match__round__tournament__limited_session=pool. session, scheduled_seat__participant__player=request.user, ).exists()) if not ((pool.session.state == models.LimitedSession.LimitedSessionState.DECK_BUILDING or (pool.session.state == models.LimitedSession.LimitedSessionState.PLAYING and allow_cheating)) and (allow_cheating or not pool.session.open_decks or not pool.pool_decks.exists())): return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) try: deck = JsonId(db).deserialize(Deck, request.data.get('deck', '{}')) except (SerializationException, JSONDecodeError): return Response({'errors': ['invalid decks definition']}, status=status.HTTP_400_BAD_REQUEST) valid, errors = check_deck_subset_pool( pool.pool, deck.seventy_five, pool.session.infinites.cardboards, strict=False, ) if not valid: return Response({'errors': errors}, status=status.HTTP_400_BAD_REQUEST) game_format = Format.formats_map.get(pool.session.format) if game_format is not None: valid, errors = game_format.deckcheck(deck) if not valid: return Response({'errors': errors}, status=status.HTTP_400_BAD_REQUEST) with transaction.atomic(): pool.pool_decks.update(latest=False) cheating = (pool.session.state != models.LimitedSession.LimitedSessionState.DECK_BUILDING or pool.session.open_decks and pool.pool_decks.exists()) pool_deck = models.PoolDeck.objects.create( deck=deck, pool=pool, name=request.data.get('name', 'decks'), cheating=cheating, ) if (pool.session.state == models.LimitedSession.LimitedSessionState.DECK_BUILDING and all( models.Pool.objects.filter( session=pool.session).values_list('pool_decks', flat=True))): pool.session.create_tournament() pool.session.state = models.LimitedSession.LimitedSessionState.PLAYING pool.session.playing_at = datetime.datetime.now() pool.session.save(update_fields=('state', 'playing_at')) elif pool.session.tournament and cheating: TournamentParticipant.objects.filter( tournament=pool.session.tournament, player=request.user, ).update(deck_id=pool_deck.id) return Response( serializers.PoolDeckSerializer(pool_deck, context={ 'request': request }).data, status=status.HTTP_201_CREATED, )
def __init__(self, db: CardDatabase): self._db = db self._strategy = JsonId(self._db)
def serialize(cls, pool: Pool) -> t.AnyStr: return JsonId.serialize(pool)
def calculate( generations: int, trap_amount: int, max_delta: t.Optional[int] = None, create_proxy_pdfs: bool = True, persist_traps: bool = True, ): random.seed() db = Loader.load() image_loader = ImageLoader() fetcher = ConstrainedNodeFetcher(db) cube_loader = CubeLoader(db) trap_collection_persistor = TrapCollectionPersistor(db) constrained_nodes = fetcher.fetch_garbage() print(f'loaded {len(constrained_nodes)} nodes') cube = cube_loader.load() cube_traps = FrozenMultiset( trap for trap in cube.traps if ( trap.intention_type == IntentionType.GARBAGE or trap.intention_type == IntentionType.LAND_GARBAGE ) ) blue_print = ConstraintSetBluePrint( ( algorithm.ValueDistributionHomogeneityConstraint, 2, {}, ), ( algorithm.GroupExclusivityConstraint, 2, {'group_weights': GROUP_WEIGHTS}, ), ( algorithm.SizeHomogeneityConstraint, 1, {}, ), ) if max_delta is not None and max_delta > 0: distributor = DeltaDistributor( constrained_nodes = constrained_nodes, trap_amount = trap_amount, origin_trap_collection = cube_traps, constraint_set_blue_print = blue_print, max_trap_delta = max_delta, mate_chance = .45, mutate_chance = .35, tournament_size = 3, population_size = 600, ) else: distributor = Distributor( constrained_nodes = constrained_nodes, trap_amount = trap_amount, constraint_set_blue_print = blue_print, mate_chance = .5, mutate_chance = .45, tournament_size = 4, population_size = 400, ) random_fitness = statistics.mean( map(distributor.constraint_set.total_score, distributor.sample_random_population) ) st = time.time() winner = distributor.evaluate(generations).best print(f'Done in {time.time() - st} seconds') distribution_model = DistributionModel( tuple( tuple( NewConstrainedNode( node.value, node.node, node.groups, ) for node in trap ) for trap in winner.traps ) ) print('saved nodes:', sum(map(len, distribution_model.traps))) with open(os.path.join(paths.OUT_DIR, 'old_distribution.json'), 'w') as f: f.write(JsonId(db).serialize(distribution_model)) print('Random fitness:', random_fitness) try: print('Current cube fitness:', distributor.evaluate_cube(cube_traps)) except ValueError: print('Nodes does not match current cube') _, added, removed = distributor.trap_collection_to_trap_distribution(cube_traps, constrained_nodes) print('added:', added) print('removed:', removed) print('Winner fitness:', winner.fitness.values[0]) distributor.show_plot() winner_traps = winner.as_trap_collection for trap in winner_traps: trap._intention_type = IntentionType.GARBAGE new_traps = winner_traps - cube_traps removed_traps = cube_traps - winner_traps print('New traps', len(new_traps)) trap_collection = TrapCollection(winner_traps) print('\n------------------------------------------------\n') print(trap_collection.minimal_string_list) print('\n------------------------------------------------\n') if persist_traps: trap_collection_persistor.persist(trap_collection) print('traps persisted') if create_proxy_pdfs: out, new_out, removed_out = GARBAGE_OUT_PATH, GARBAGE_NEW_OUT_PATH, GARBAGE_REMOVED_OUT_PATH proxy_laps( laps = winner_traps, image_loader = image_loader, file_name = out, ) proxy_laps( laps = new_traps, image_loader = image_loader, file_name = new_out, ) proxy_laps( laps = removed_traps, image_loader = image_loader, file_name = removed_out, ) print('proxying done')
def value_to_string(self, obj): return JsonId.serialize(self.value_from_object(obj))
class CubeLoader(object): LOCAL_CUBES_PATH = os.path.join(paths.APP_DATA_PATH, 'cubes') TIMESTAMP_FORMAT = '%d_%m_%y_%H_%M_%S' def __init__(self, db: CardDatabase): self._db = db self._fetcher = CubeFetcher(self._db) self._strategy = JsonId(self._db) def get_local_cube_paths( self) -> t.Iterator[t.Tuple[str, datetime.datetime]]: if not os.path.exists(self.LOCAL_CUBES_PATH): os.makedirs(self.LOCAL_CUBES_PATH) cubes = os.listdir(self.LOCAL_CUBES_PATH) if not cubes: return names_times = [] #type: t.List[t.Tuple[str, datetime.datetime]] for cube in cubes: try: names_times.append(( cube, datetime.datetime.strptime(cube, self.TIMESTAMP_FORMAT), )) except ValueError: pass if not names_times: return sorted_pairs = sorted(names_times, key=lambda item: item[1], reverse=True) for name, time in sorted_pairs: yield os.path.join(self.LOCAL_CUBES_PATH, name), time def _get_all_local_cubes( self) -> t.Iterator[t.Tuple[Cube, datetime.datetime]]: for path, time in self.get_local_cube_paths(): with open(path, 'r') as f: yield self._strategy.deserialize(Cube, f.read()), time def _get_current_local_cube(self) -> t.Optional[Cube]: try: return self._get_all_local_cubes().__next__()[0] except StopIteration: return None @classmethod def _persist_cube(cls, cube: Cube) -> None: if not os.path.exists(cls.LOCAL_CUBES_PATH): os.makedirs(cls.LOCAL_CUBES_PATH) with open( os.path.join( cls.LOCAL_CUBES_PATH, datetime.datetime.strftime( datetime.datetime.today(), cls.TIMESTAMP_FORMAT, ), ), 'w', ) as f: f.write(JsonId.serialize(cube)) def rollback(self) -> None: try: path, time = self.get_local_cube_paths().__next__() except StopIteration: return os.remove(path) def check_and_update(self) -> bool: local_cube = self._get_current_local_cube() remote_cube = self._fetcher.fetch_cube() if local_cube is None or local_cube != remote_cube: self._persist_cube(remote_cube) return True return False def load(self) -> Cube: cube = self._get_current_local_cube() if cube is None: if not self.check_and_update(): raise CubeLoadException() return self._get_current_local_cube() return cube def all_cubes(self) -> t.Iterator[t.Tuple[Cube, datetime.datetime]]: if self._get_current_local_cube() is None: if not self.check_and_update(): raise CubeLoadException() return self._get_all_local_cubes()
def test(): db = Loader.load() strategy = JsonId(db) cube = CubeLoader(db).load() constrained_nodes = NodeCollection( ConstrainedNodeFetcher(db).fetch_garbage()) groups = GroupMap(_GROUP_WEIGHTS) # s = '{"cube_delta": {}, "nodes_delta": {"nodes": []}}' # patch = strategy.deserialize(CubePatch, s) patch = CubePatch( CubeDeltaOperation({ db.cardboards['Brainstorm'].from_expansion('ICE'): -1, db.cardboards['Brainstorm'].from_expansion('EMA'): 1, # Trap( # AllNode( # ( # db.cardboards['Brainstorm'].from_expansion('ICE'), # db.cardboards['Web'].from_expansion('LEA'), # ) # ), # intention_type=IntentionType.SYNERGY, # ): 2 }), NodesDeltaOperation({ # ConstrainedNode( # node = AllNode( # ( # db.cardboards['Web'].from_expansion('LEA'), # ) # ), # groups = ['ok', 'lmao'], # value = 2, # ): 1, ConstrainedNode( node=AllNode(( db.cardboards['Brainstorm'].from_expansion('ICE'), db.cardboards['Web'].from_expansion('LEA'), )), groups=['lolHAHA'], value=1, ): 1, })) print(patch) meta_cube = MetaCube(cube, constrained_nodes, groups) verbose_patch = patch.as_verbose(meta_cube) print(verbose_patch) updater = CubeUpdater(meta_cube, patch) print(updater) report = UpdateReport(updater) for notification in report.notifications: print(notification.title + '\n' + notification.content + '\n\n')
def deserialize(self, s: t.AnyStr) -> Deck: return JsonId(self._db).deserialize(Deck, s)
def get_prep_value(self, value): if value is None: return None if isinstance(value, dict): return json.dumps(value) return JsonId.serialize(value)