def get(self, id_or_type: CompIdOrType, allow_disabled: bool = False) -> Nullable[Component]: ''' Gets a component from this entity by ComponentId or ComponentType. Will return the component instance or Null(). ''' # Try to get by type first... component = self._components.get(id_or_type, None) log.debug("Get component '{}' by type: {}", id_or_type, component) if component: return self.comp_or_null(component, allow_disabled) # Ok... id maybe? component = self._component_manager.get(id_or_type) if component: # Make sure it's ours... # TODO [2020-06-19]: compare by entity_id instead of getting again # and comparing instance id? my_comp = self._components.get(type(component), None) if my_comp and component == my_comp: # If we don't want disabled, and this one isn't enabled # (and is therefore disabled), there isn't one for you to # have. return self.comp_or_null(component, allow_disabled) # Fall thorough - ain't got that one. return Null()
def _update_mitosis(self) -> VerediHealth: ''' Generic tick function. We do the same thing every tick state we process so do it all here. ''' # Already did our broadcast - nothing more to do. if self._registration_broadcast: log.debug("CommandRegistrationBroadcast: Did our thing already.") return self._health_check(SystemTick.MITOSIS) # Doctor checkup. if not self._healthy(SystemTick.MITOSIS): self._health_meter_update = self._health_log( self._health_meter_update, log.Level.WARNING, "HEALTH({}): Skipping ticks - our system health " "isn't good enough to process.", self.health) return self._health_check(SystemTick.MITOSIS) reg_broadcast = self._commander.registration(self.id, Null()) log.debug("CommandRegistrationBroadcast about to broadcast: {}", reg_broadcast) # TODO [2020-06-27]: better place to register these? veredi.zest.debug.registration.register(reg_broadcast) # All we want to do is send out the command registration broadcast. # Then we want to not tick this again. self._event_notify(reg_broadcast) self._registration_broadcast = True # Did a thing this tick so say we're PENDING... return VerediHealth.PENDING
def spawn(self, other_class: Optional[Type['VerediContext']], spawned_name: str, spawned_key: Optional[str], *args: Any, **kwargs: Any) -> None: ''' Makes a new instance of the passed in type w/ our context pushed to its own. Not a deep copy, currently. Could be - used to be. Returns spawned context. ''' log.debug("Spawning: {} with name: {}, key: {}, args: {}, kwargs: {}", other_class, spawned_name, spawned_key, args, kwargs, context=self) other = other_class(spawned_name, spawned_key, *args, **kwargs) if other.key == self._key: other.pull_to_sub(self.sub, Conflict.RECEIVER_MUNGED) other.pull(self, Conflict.RECEIVER_MUNGED | Conflict.QUIET) else: other.pull(self, Conflict.RECEIVER_MUNGED) return other
def event_skill_req(self, event: SkillRequest) -> None: ''' Skill thingy requested to happen; please resolve. ''' # Doctor checkup. if not self._health_ok_event(event): return entity, component = self._manager.get_with_log( f'{self.klass}.command_skill', event.id, self._component_type, event=event) if not entity or not component: # Entity or component disappeared, and that's ok. return amount = component.total(event.skill) log.debug("Event {} - {} total is: {}", event, event.skill, amount, context=event.context) # Have EventManager create and fire off event for whoever wants the # next step. if component.id != ComponentId.INVALID: next_event = SkillResult(event.id, event.type, event.context, component_id=component.id, skill=event.skill, amount=amount) self._event_notify(next_event)
def _update(self) -> VerediHealth: ''' SystemTick.STANDARD tick function. ''' # Doctor checkup. if not self._health_ok_tick(SystemTick.STANDARD): return self.health for entity in self._wanted_entities(SystemTick.STANDARD): # Check if entity in turn order has a (skill) action queued up. # Also make sure to check if entity/component still exist. if not entity: continue component = self.manager.component.get(self._component_type) if not component or not component.has_action: continue action = component.dequeue log.debug("Entity {}, Comp {} has skill action: {}", entity, component, action) # Check turn order? # Would that be, like... # - engine.time_flow()? # - What does PF2/Starfinder call it? Like the combat vs # short-term vs long-term 'things are happening' modes... # process action print('todo: a skill thingy', action) return self._health_check(SystemTick.STANDARD)
def _skill(self, name: str) -> Dict[str, Any]: ''' Get `name`'s entry in our persistent data. ''' entry = self.persistent.get(name, None) log.debug("SKILL: {} entry for {}: {}", self.klass, name, entry) return entry
def _ability(self, name: str) -> Dict[str, Any]: ''' Get `name`'s entry in our persistent data or null. ''' entry = self.persistent.get(name, Null()) log.debug("ABILITY: {} entry for {}: {}", self.klass, name, entry) return entry
def _event_entity_life(self, event: EntityLifeEvent) -> None: ''' Entity Life-cycle has changed enough that EntityManager has produced an event for it. See if we should add/remove from our dictionaries. ''' # Doctor checkup. if not self._health_ok_event(event): return # --- # Deal with life-cycle transition. # --- entity_id = event.id entity_cycle = event.type if entity_cycle == EntityLifeCycle.INVALID: # INVALID should never come up, so... complain. log.error("EntityManager pushed Entity {} into {} life cycle. " "Do not know how to handle this.", entity_id, entity_cycle) self.health = VerediHealth.UNHEALTHY return elif entity_cycle == EntityLifeCycle.CREATING: # Don't care about CREATING - waiting for the ALIVE. pass elif entity_cycle == EntityLifeCycle.ALIVE: # They are now alive. Add to dictionaries. id_comp = self.component(entity_id) if not id_comp: # No identity; just store as anonymous for now... log.debug("Entity {} has entered life-cycle '{}' without any " "identity_component. We have no current solution to " "this conundrum... Recording as 'anonymous'.", entity_id, entity_cycle) self._anonymous.add(entity_id) return # Now they have an IdentityComponent - update our dicts. self._user_ident_update(entity_id, user_id=id_comp.user_id, user_key=id_comp.user_key) elif (entity_cycle == EntityLifeCycle.DESTROYING or entity_cycle == EntityLifeCycle.DEAD): # Remove 'em from our dicts. self._user_ident_update(entity_id, delete=True) else: # Ignore. log.debug("Entity {} has entered life-cycle: {}. " "We have nothing to do for that cycle.", entity_id, entity_cycle) return
def _load(self) -> VerediHealth: ''' Load our configuration data from its file. Raises LoadError ''' log_groups = [log.Group.START_UP, log.Group.DATA_PROCESSING] log.group_multi(log_groups, self.dotted, "Configuration load...") # Spawn a context from what we know, and ask the config repo to load # something based on that. ctx = DataBareContext(self.dotted, ConfigContext.KEY, self._path, DataAction.LOAD, self._meta()) log.group_multi(log_groups, self.dotted, "Configuration loading from repo...") with self._repo.load(ctx) as stream: # Decode w/ serdes. # Can raise an error - we'll let it. try: log.group_multi(log_groups, self.dotted, "Configuration deserializing with serdes...") log.debug( "Config Load Context: {}, " "Confgig Repo: {}, " "Confgig Serdes: {}", ctx, self._repo, self._serdes) for each in self._serdes.deserialize_all( stream, self._codec, ctx): log.debug("Config Loading Doc: {}", each) self._load_doc(each) except LoadError as error: log.group_multi(log_groups, self.dotted, "Configuration load/deserialization failed " "with a LoadError. Erroring out.", log_success=False) # Log exception and let bubble up as-is. raise log.exception( error, "Configuration init load/deserialization failed " "with a LoadError: type: {}, str: {}", type(error), str(error)) except Exception as error: log.group_multi(log_groups, self.dotted, "Configuration load/deserialization failed " "with an error of type {}. Erroring out.", type(error), log_success=False) # Complain that we found an exception we don't handle. # ...then let it bubble up as-is. raise log.exception(LoadError, "Unhandled exception! type: {}, str: {}", type(error), str(error)) from error return VerediHealth.HEALTHY
def __contains__(self, key: CompIdOrType): ''' This is for any "if component in entity:" sort of check systems might want to do. ''' # Don't eval all them args unless it'll be used... if log.will_output(log.Level.DEBUG): log.debug("{} contains {}? {} -> {}\n all: {}", self.klass, key, self.get(key), bool(self.get(key)), self._components) return bool(self.get(key))
def id(klass: Type['ConfigContext'], context: VerediContext) -> Nullable[Any]: ''' Checks for an ID link in config's spot in this context. If none, returns Null(). ''' id = context.sub_get(ConfigLink.ID) if not id: log.debug("No id in context! context.id: {}, ", id, context=context) return id
def tear_down_ecs(test_name_file: str, test_case: 'unittest.TestCase', test_name_func: str, enable_debug_logs: bool, meeting: Meeting, engine: Optional[Engine] = None) -> None: ''' Runs Tear-Down functions for each manager in meeting. Runs Tear-Down for engine if supplied. ''' with log.LoggingManager.on_or_off(enable_debug_logs): log.debug("zload.tear_down_ecs running Meeting's " "Unit-Test Tear-Down...") meeting._ut_tear_down() if engine: log.debug("zload.tear_down_ecs running Engine's " "Unit-Test Tear-Down...") engine._ut_tear_down() else: log.debug("zload.tear_down_ecs SKIPPING Engine's " "Unit-Test Tear-Down (engine does not exist)...") log.debug("zload.tear_down_ecs is done for " f"{_test_name(test_name_file, test_case, test_name_func)}.")
def _update_time(self) -> VerediHealth: ''' First in Game update loop. Systems should use this rarely as the game time clock itself updates in this part of the loop. ''' # Doctor checkup. if not self._health_ok_tick(SystemTick.TIME): return self.health tick = SystemTick.TIME print("TODO: THIS TICK!", tick) for entity in self._wanted_entities(tick): # Check if entity in turn order has a combat action queued up. # Also make sure to check if entity/component still exist. if not entity: continue component = entity.get(AttackComponent) if not component or not component.has_action: continue action = component.dequeue log.debug("Entity {}, Comp {} has skill action: {}", entity, component, action) # Check turn order? # Would that be, like... # - engine.time_flow()? # - What does PF2/Starfinder call it? Like the combat vs # short-term vs long-term 'things are happening' modes... # process action print('todo: a skill thingy', action) # TODO [2020-05-26]: this # check turn order # check if entity in turn order has a (combat) action queued up # anywhere. # process action # check for entities to add to turn order tracker return self._health_check(SystemTick.TIME)
def path(klass: Type['ConfigContext'], context: VerediContext) -> Nullable[pathlib.Path]: ''' Checks for a PATH link in config's spot in this context. If none, returns PATH from background.manager.data. ''' path = context.sub_get(ConfigLink.PATH) if null_or_none(path): log.debug( "No path in context; using background's. " "context.path: {}, ", "bg.path: {}", path, background.manager.data.path, context=context) path = background.manager.data.path return path
def ut_inject(self, value: Any, doc_type: Document, *keychain: label.LabelInput) -> None: # Ensure the keychain is in good shape from whatever was passed in. keychain = label.regularize(*keychain) # Get document type data first. doc_data = self._config.get(doc_type, None) data = doc_data if data is None: log.debug("No doc_type {} in our config data {}.", doc_type, self._config) return None # Now hunt for/create the keychain they wanted... for key in keychain[:-1]: data = data.setdefault(key, {}) # And set the key. data[keychain[-1]] = value
def ut_inject(self, value: Any, doc_type: Document, *keychain: str) -> None: ''' Set `value` into our config data for `doc_type` at location specified by the `keychain` keys iterable. ''' # Get document type data first. doc_data = self._config.get(doc_type, None) data = doc_data if data is None: log.debug("No doc_type {} in our config data {}.", doc_type, self._config) return None # Now hunt for/create the keychain they wanted... for key in keychain[:-1]: data = data.setdefault(key, {}) # And set the key. data[keychain[-1]] = value
def add_text(self, entity: Entity, input_safe: str) -> InputId: ''' Add an input string that is about to be processed to the history. Mainly so that historian will assign it an InputId for ongoing use. ''' if not entity: log.debug("No entity for input history; dropping: {}", input_safe) return iid = self.get_id(entity, input_safe) entry = InputHistory(iid, input_safe, entity) # Add entry to global history and to entity's history. self._global.append(entry) self._by_entity.setdefault(entity.id, []).append(entry) self._by_input[iid] = entry return iid
def parse(self, string: str, milieu: Optional[str] = None) -> Optional['MathTree']: ''' Parse input `string` and return the resultant MathTree, or None if parsing/transforming failed at some point. ''' # Set milieu and clear any old vars, also set up our xformer. self._set_up(milieu) log.debug("parse input{}: '{}' ", ("(w/ milieu: '" + self._milieu + "')" if self._milieu else ''), string) syntax_tree = Parser.parse(string) if log.will_output(log.Level.DEBUG): # Dont format tree into string unless we're actually logging it. log.debug("Parser (lark) output: \n{}", Parser.format(syntax_tree)) math_tree = self._transformer.transform(syntax_tree) if log.will_output(log.Level.DEBUG): # Dont format tree into string unless we're actually logging it. log.debug("Math Tree: \n{}", math_tree.pretty()) # Return parsed, transformed, un-evaluated math tree. return math_tree
def _start_server(comms: multiproc.SubToProcComm, context: VerediContext) -> None: ''' Entry function for our mediator server. Basically create mediator from config and call its `start()`. ''' # ------------------------------ # Set-Up # ------------------------------ log_level = ConfigContext.log_level(context) lumberjack = log.get_logger(comms.name, min_log_level=log_level) lumberjack.setLevel(log_level) log.debug(f"_start_server: {comms.name} {log_level}", veredi_logger=lumberjack) # log.set_group_level(log.Group.DATA_PROCESSING, log.Level.DEBUG) # log.set_group_level(log.Group.PARALLEL, log.Level.DEBUG) # --- # Config # --- comms = ConfigContext.subproc(context) if not comms: raise log.exception( TypeError, "MediatorServer requires a SubToProcComm; received None.") config = background.config.config( '_start_server', 'veredi.interface.mediator._start_server', context) # --- # Ignore Ctrl-C. Have parent process deal with it and us. # --- multiproc._sigint_ignore() # --- # Logging # --- # Do not set up log_client here - multiproc does that. # ------------------------------ # Create & Start # ------------------------------ log.debug(f"MediatorSystem's _start_server for {comms.name} " "starting MediatorServer...", veredi_logger=lumberjack) mediator = config.create_from_config('server', 'mediator', 'type', context=context) mediator.start() log.debug(f"MediatorSystem's _start_server for {comms.name} done.", veredi_logger=lumberjack)
def _query(self, entity_id: EntityId, entry: str, context: 'VerediContext') -> Nullable[ValueMilieu]: ''' Get entry from entity's `self._component_type` and return it. Callers should do checks/logs on entity and component if they want more info about missing ent/comp. This just uses Null's cascade to safely skip those checks. ''' # We'll use Null(). Callers should do checks/logs if they want more # info about missing ent/comp. entity, component = self._manager.get_with_log(f'{self.klass}._query', entity_id, self._component_type, context=context) if not entity or not component: return Null() result = self._query_value(component, entry) log.debug("'{}' result is: {}", entry, result, context=context) return result
def config( test_type: zpath.TestType = zpath.TestType.UNIT, rules: Optional[label.LabelInput] = None, game_id: Optional[Any] = None, config_path: Union[pathlib.Path, str, None] = None) -> Configuration: ''' Creates a configuration with the requested `config_path` config file path. If the `config_path` is Falsy, uses with input filename. Passes `rules` and `test_type` to zonfig.rules() to get final rules DotStr. If no `config_path`, gets a default filename via `zpath.config_filename()`. Uses `zpath.config()` to resolve the full config path from input/default. ''' # TODO: group logging for: "if unit_test AND <group> will output..." log.debug(("zmake.config({test_type}): INPUTS: " f"rules: {rules}, " f"game_id: {game_id}, " f"config_path: {config_path}")) rules = zonfig.rules(test_type, rules) config_id = zpath.config_id(test_type, game_id) path = config_path if not path: path = zpath.config_filename(test_type) path = zpath.config(path, test_type) # TODO: group logging for: "if unit_test AND <group> will output..." log.debug(("zmake.config({test_type}): FINAL VALUES: " f"rules: {rules}, " f"game_id: {game_id}, " f"config_id: {config_id}, " f"path: {path}")) config = run.configuration(rules, config_id, path) return config
def create_from_config( self, *keychain: label.LabelInput, context: Optional['VerediContext'] = None, ) -> Nullable[Any]: ''' Gets value from these keychain in our config data, then tries to have our registry create that value. e.g. config.create_from_config('data', 'game', 'repository') -> from config file: 'veredi.repository.file-tree' -> from create_from_label('veredi.repository.file-tree', ...) -> FileTreeRepository object Will use provided context, or create a ConfigContext to use via `make_config_context()` if none provided. Returns thing created using keychain or None. ''' # Ensure the keychain is in good shape from whatever was passed in. keychain = label.regularize(*keychain) config_val = self.get(*keychain) if not isinstance(config_val, str): error_info = ("no config value" if not config_val else "incorrect config value of type " f"'{type(config_val)}' (need str)") log.debug( "Make requested for: {}. But we have {} " "for that. context: {}", error_info, keychain, context) return Null() if not context: context = self.make_config_context() context.add(ConfigLink.KEYCHAIN, list(keychain[:-1])) log.debug("Make requested for: {}. context: {}", keychain, context) # Assume their relevant data is one key higher up... # e.g. if we're making the thing under keychain (GAME, REPO, TYPE), # then the repository we're making will want (GAME, REPO) as its # root so it can get, say, DIRECTORY. ret_val = self.create_from_label(config_val, context=context) log.debug("Made: {} from {}. context: {}", ret_val, keychain, context) return ret_val
def config(filepath: Union[pathlib.Path, str, None], test_type: TestType = TestType.UNIT) -> Optional[pathlib.Path]: ''' Returns pathlib.Path to config test data for `test_type`. ''' path = retval(rooted(test_type, 'config')) # TODO: group logging for: "if unit_test AND <group> will output..." log.debug(f"zpath.config({test_type}): INPUTS: " f"filepath: {filepath}, " f"path: {path}") if not filepath: # TODO: group logging for: "if unit_test AND <group> will output..." log.debug("zpath.config({test_type}): FINAL VALUES: " f"No filepath; using default path: {path}") return path path = path / filepath # TODO: group logging for: "if unit_test AND <group> will output..." log.debug("zpath.config({test_type}): FINAL VALUES: " "Adding filepath... returning: " f"path: {path} " f"retval(): {retval(path)}") return retval(path)
def _event_to_cmd(self, string_unsafe: str, entity: 'Entity', event: 'Event', context: 'VerediContext') -> None: ''' Take args, verify, and send on to commander for further processing. ''' ident = entity.get(IdentityComponent) if not ident: log.debug("No IdentityComponent for entity - cannot process " "input event. Entity '{}'. input-string: '{}', " "event: {}", entity, string_unsafe, event) return string_unsafe = None try: string_unsafe = event.payload except AttributeError: try: string_unsafe = event.string_unsafe except AttributeError as err: log.exception(err, "Event {} does not have 'payload' or " "'string_unsafe' property - input system " "cannot process it as a command.", event, context=context) log.debug("Input from '{}' (by '{}'). input-string: '{}', event: {}", ident.log_name, ident.log_extra, string_unsafe, event) string_safe, string_valid = sanitize.validate(string_unsafe, ident.log_name, ident.log_extra, event.context) if string_valid != sanitize.InputValid.VALID: log.info("Input from '{}' (by '{}'): " "Dropping event {} - input failed validation.", ident.log_name, ident.log_extra, event, context=event.context) # TODO [2020-06-11]: Keep track of how many times user was # potentially naughty? return command_safe = self._commander.maybe_command(string_safe) if not command_safe: log.info("Input from '{}' (by '{}'): " "Dropping event {} - input failed `maybe_command()`.", ident.log_name, ident.log_extra, event, context=event.context) # TODO [2020-06-11]: Keep track of how many times user was # potentially naughty? return # Create history, generate ID. input_id = self._historian.add_text(entity, string_safe) # Get the command processed. cmd_ctx = InputContext(input_id, command_safe, entity.id, ident.log_name, self.dotted) cmd_ctx.pull(event.context) status = self._commander.execute(entity, command_safe, cmd_ctx) # Update history w/ status. self._historian.update_executed(input_id, status) # TODO [2020-06-21]: Success/Failure OutputEvent? if not status.success: log.error("Failed to execute command: {}", string_safe, context=cmd_ctx) return
def register(name: str, klass: Type, deserialize_fn: Optional[YamlDeserialize], serialize_fn: Optional[YamlSerialize], implicit_rx: Optional[re.Pattern] = None) -> None: ''' Basically, register with ourself and with YAML. In detail: - Create a yaml tag from the `name` string. - Register tag/Type in our registry. - Adds `klass` to YAML representer/constructor if provided functions. - Checks that `klass` is a YAMLObject if not provided functions. - Adds an implicit resolver to YAML if provided with an `implicit_rx` regex Pattern. ''' # --- # Tag # --- valid, _ = tags.valid(name) if valid: msg = ("Expecting name string, not YAML Tag string. " "String should not start with '!'." f"Got: '{name}' for {klass}.") error = RegistryError(msg, data={ 'name': name, 'class': klass, 'serialize_fn': serialize_fn, 'deserialize_fn': deserialize_fn, 'implicit_rx': implicit_rx, }) raise log.exception(error, msg) # --- # Register to us. # --- # _internal_register() will check/error if invalid tag. tag = tags.make(name) _internal_register(tag, klass) # --- # Register to YAML # --- # They can also optionally have an implicit resolver... # e.g. !duration can implicity resolve "6 seconds", "5h2s", etc... So these # are equivalent: # - round: 6 seconds # - round: !duration 6 seconds if implicit_rx: # This is for dump and load. yaml.add_implicit_resolver( tag, implicit_rx, # bug until yaml 5.2: Must specify Loader. # https://github.com/yaml/pyyaml/issues/294 # https://github.com/yaml/pyyaml/pull/305 Loader=yaml.SafeLoader) if deserialize_fn and serialize_fn: yaml.add_constructor(tag, deserialize_fn, Loader=yaml.SafeLoader) yaml.add_representer(klass, serialize_fn, Dumper=yaml.SafeDumper) elif not deserialize_fn and not serialize_fn: if not issubclass(klass, yaml.YAMLObject): msg = (f"Class '{klass}' must either derive from YAMLObject or " "provided serializer/deserializer functions for YAML " f"to use. Got: {serialize_fn}, {deserialize_fn}.") error = RegistryError(msg, data={ 'name': name, 'class': klass, 'subclass?': issubclass(klass, yaml.YAMLObject), 'serialize_fn': serialize_fn, 'deserialize_fn': deserialize_fn, 'implicit_rx': implicit_rx, }) raise log.exception(error, msg) else: msg = (f"Class '{klass}' must either derive from YAMLObject or " "provided serializer/deserializer functions for YAML to use." f"Got: {serialize_fn}, {deserialize_fn}.") error = RegistryError(msg, data={ 'name': name, 'class': klass, 'subclass?': issubclass(klass, yaml.YAMLObject), 'serialize_fn': serialize_fn, 'deserialize_fn': deserialize_fn, 'implicit_rx': implicit_rx, }) raise log.exception(error, msg) log.debug(f"YAML Registry added: {name}, {klass}")
def set_up_ecs( test_name_file: str, test_case: 'unittest.TestCase', test_name_func: str, enable_debug_logs: bool, # Optional Debug Stuff: test_type: TestType = TestType.UNIT, debug_flags: Optional[DebugFlag] = None, # Optional ECS: require_engine: Optional[bool] = False, desired_systems: Iterable['SysCreateType'] = None, # Optional to pass in - else we'll make: configuration: Optional[Configuration] = None, time_manager: Optional[TimeManager] = None, event_manager: Optional[EventManager] = None, component_manager: Optional[ComponentManager] = None, entity_manager: Optional[EntityManager] = None, system_manager: Optional[SystemManager] = None, data_manager: Optional[DataManager] = None, identity_manager: Optional[IdentityManager] = None, # Optional to pass in - else we'll make if asked: engine: Optional[Engine] = None, ) -> Tuple[Meeting, Engine, VerediContext, List[SystemId]]: ''' Creates config, managers, if not supplied (via zmake.meeting). Creates a managers' meeting (via zmake.meeting). Creates a real context (via zontext.real_contfig). Creates supplied Systems (using our zload.create_systems). - If none supplied, creates default of: (nothing) - These are (currently) the min required to get from disk to component. Returns: Tuple[Meeting, VerediContext, SystemManager, List[SystemId]] ''' log.debug("zload.set_up_ecs for " f"{_test_name(test_name_file, test_case, test_name_func)}...") with log.LoggingManager.on_or_off(enable_debug_logs): # --- # Configuration # --- if not configuration: log.debug("zload.set_up_ecs creating Configuration...") configuration = configuration or zmake.config(test_type) # --- # ECS Managers # --- log.debug("zload.set_up_ecs creating Meeting...") meeting = run.managers(configuration, time_manager=time_manager, event_manager=event_manager, component_manager=component_manager, entity_manager=entity_manager, system_manager=system_manager, data_manager=data_manager, identity_manager=identity_manager, debug_flags=debug_flags) log.debug("zload.set_up_ecs running Meeting's Unit-Test Set-Up...") meeting._ut_set_up() # --- # Engine # --- engine = None if require_engine: log.debug("zload.set_up_ecs creating Engine...") engine = run.engine(configuration, meeting, debug_flags=debug_flags) log.debug("zload.set_up_ecs running Engine's Unit-Test Set-Up...") engine._ut_set_up() else: log.debug("zload.set_up_ecs SKIPPING Engine " "creation (engine not requested).") log.debug("zload.set_up_ecs SKIPPING Engine's Unit-Test " "Set-Up (engine not requested).") # --- # Config Context # --- log.debug("zload.set_up_ecs creating Context...") context = zontext.real_config(test_name_file, test_case, test_name_func, config=configuration) # --- # Additional Systems? # --- log.debug("zload.set_up_ecs creating systems...") system_manager = meeting.system sids = [] if desired_systems: sids = run.system.many(system_manager, context, *desired_systems) elif not require_engine: sids = run.system.many(system_manager, context) # If you have required systems, see `run.system.many` docstr for # details but: # sids = run.system.many(system_manager, context, # OneSys, TwoSys, RedSys, BlueSys) # Else: our engine creates the requried stuff and we don't want to # double-create. log.debug("zload.set_up_ecs is done for " f"{_test_name(None, test_case, test_name_func)}.") return meeting, engine, context, sids
def add(self, cls_or_func: 'RegisterType', *dotted_label: label.LabelInput) -> None: ''' This function does the actual registration. ''' # Ignored? if self.ignored(cls_or_func): msg = (f"{cls_or_func} is in our set of ignored " "classes/functions that should not be registered.") error = RegistryError(msg, data={ 'registree': cls_or_func, 'dotted': label.normalize(dotted_label), 'ignored': self._ignore, }) raise log.exception(error, msg) # Do any initial steps. dotted_list = label.regularize(*dotted_label) if not self._init_register(cls_or_func, dotted_list): # Totally ignore if not successful. _init_register() should do # all the erroring itself. return # Pull final key off of list so we don't make too many # dictionaries. name = str(cls_or_func) try: # Final key where the registration will actually be stored. leaf_key = dotted_list[-1] except IndexError as error: kwargs = log.incr_stack_level(None) raise log.exception( RegistryError, "Need to know what to register this ({}) as. " "E.g. @register('jeff', 'geoff'). Got no dotted_list: {}", name, dotted_list, **kwargs) from error # Our register - full info saved here. registry_our = self._registry # Background register - just names saved here. registry_bg = background.registry.registry(self.dotted) # ------------------------------ # Get reg dicts to the leaf. # ------------------------------ length = len(dotted_list) # -1 as we've got our config name already from that final # dotted_list entry. for i in range(length - 1): # Walk down into both dicts, making new empty sub-entries as # necessary. registry_our = registry_our.setdefault(dotted_list[i], {}) registry_bg = registry_bg.setdefault(dotted_list[i], {}) # ------------------------------ # Register (warn if occupied). # ------------------------------ # Helpful messages - but registering either way. try: if leaf_key in registry_our: if background.testing.get_unit_testing(): msg = ("Something was already registered under this " f"registry_our key... keys: {dotted_list}, " f"replacing {str(registry_our[leaf_key])}' with " f"this '{name}'.") error = KeyError(leaf_key, msg, cls_or_func) log.exception(error, None, msg, stacklevel=3) else: log.warning( "Something was already registered under this " "registry_our key... keys: {}, replacing " "'{}' with this '{}'", dotted_list, str(registry_our[leaf_key]), name, stacklevel=3) else: log.debug("Registered: keys: {}, value '{}'", dotted_list, name, stacklevel=3) except TypeError as error: msg = (f"{self.klass}.add(): Our " "'registry_our' dict is the incorrect type? Expected " "something that can deal with 'in' operator. Have: " f"{type(registry_our)} -> {registry_our}. Trying to " f"register {cls_or_func} at " f"'{label.normalize(dotted_list)}'. " "Registry: \n{}") from veredi.base.strings import pretty log.exception(error, msg, pretty.indented(self._registry)) # Reraise it. Just want more info. raise # Register cls/func to our registry, save some info to our # background registry. self._register(cls_or_func, dotted_list, leaf_key, registry_our, registry_bg) # ------------------------------ # Finalize (if desired). # ------------------------------ self._finalize_register(cls_or_func, dotted_list, registry_our, registry_bg)