class MaxGroupRestrictionRegister(BaseRestrictionRegister, metaclass=ABCMeta): """Base class for all max modules per group restrictions.""" def __init__(self, fit): # Container for all tracked items, keyed by their group ID # Format: {group ID: {items}} self.__group_item_map = KeyedStorage() # Container for items, which have max group restriction to become # operational # Format: {items} self.__restricted_items = set() fit._subscribe(self, self._handler_map.keys()) @property @abstractmethod def _max_group_attr_id(self): """Attribute ID whose value contains group restriction of item.""" ... def _register_item(self, item): if not isinstance(item, TRACKED_ITEM_CLASSES): return group_id = item._type.group_id # Ignore items, whose type isn't assigned to any group if group_id is None: return # Having group ID is sufficient condition to enter container of all # fitted items self.__group_item_map.add_data_entry(group_id, item) # To enter restriction container, item's type must have restriction # attribute if self._max_group_attr_id not in item._type_attrs: return self.__restricted_items.add(item) def _unregister_item(self, item): # Just clear data containers group_id = item._type.group_id self.__group_item_map.rm_data_entry(group_id, item) self.__restricted_items.discard(item) def validate(self): # Container for tainted items tainted_items = {} # Go through all restricted items for item in self.__restricted_items: # Get quantity of registered items, assigned to group of current # restricted item, and item's restriction value group_id = item._type.group_id quantity = len(self.__group_item_map.get(group_id, ())) max_allowed_quantity = item._type_attrs[self._max_group_attr_id] if quantity > max_allowed_quantity: tainted_items[item] = MaxGroupErrorData( group_id=group_id, quantity=quantity, max_allowed_quantity=max_allowed_quantity) # Raise error if we detected any tainted items if tainted_items: raise RestrictionValidationError(tainted_items)
class MaxGroupRestrictionRegister(BaseRestrictionRegister, metaclass=ABCMeta): """Base class for all max modules per group restrictions.""" def __init__(self, fit): # Container for all tracked items, keyed by their group ID # Format: {group ID: {items}} self.__group_item_map = KeyedStorage() # Container for items, which have max group restriction to become # operational # Format: {items} self.__restricted_items = set() fit._subscribe(self, self._handler_map.keys()) @property @abstractmethod def _max_group_attr_id(self): """Attribute ID whose value contains group restriction of item.""" ... def _register_item(self, item): if not isinstance(item, TRACKED_ITEM_CLASSES): return group_id = item._type.group_id # Ignore items, whose type isn't assigned to any group if group_id is None: return # Having group ID is sufficient condition to enter container of all # fitted items self.__group_item_map.add_data_entry(group_id, item) # To enter restriction container, item's type must have restriction # attribute if self._max_group_attr_id not in item._type_attrs: return self.__restricted_items.add(item) def _unregister_item(self, item): # Just clear data containers group_id = item._type.group_id self.__group_item_map.rm_data_entry(group_id, item) self.__restricted_items.discard(item) def validate(self): # Container for tainted items tainted_items = {} # Go through all restricted items for item in self.__restricted_items: # Get quantity of registered items, assigned to group of current # restricted item, and item's restriction value group_id = item._type.group_id quantity = len(self.__group_item_map.get(group_id, ())) max_allowed_quantity = item.attrs[self._max_group_attr_id] if quantity > max_allowed_quantity: tainted_items[item] = MaxGroupErrorData( group_id=group_id, quantity=quantity, max_allowed_quantity=max_allowed_quantity) # Raise error if we detected any tainted items if tainted_items: raise RestrictionValidationError(tainted_items)
class DmgDealerRegister(BaseStatRegister): """Class which tracks all effects which deal damage. Provides functionality to fetch various aggregated stats. """ def __init__(self, fit): # Format: {item:, {effect1, effect2}} self.__dmg_dealers = KeyedStorage() fit._subscribe(self, self._handler_map.keys()) def get_volley(self, item_filter, tgt_resists): volleys = [] for item in self.__dd_iter(item_filter): volley = item.get_volley(tgt_resists) volleys.append(volley) return DmgStats._combine(volleys) def get_dps(self, item_filter, reload, tgt_resists): dpss = [] for item in self.__dd_iter(item_filter): dps = item.get_dps(reload, tgt_resists) dpss.append(dps) return DmgStats._combine(dpss) def __dd_iter(self, item_filter): for item in self.__dmg_dealers: if item_filter is None or item_filter(item): yield item # Message handling def _handle_effects_started(self, msg): item_effects = msg.item._type_effects for effect_id in msg.effect_ids: effect = item_effects[effect_id] if isinstance(effect, DmgDealerEffect): self.__dmg_dealers.add_data_entry(msg.item, effect) def _handle_effects_stopped(self, msg): item_effects = msg.item._type_effects for effect_id in msg.effect_ids: effect = item_effects[effect_id] if isinstance(effect, DmgDealerEffect): self.__dmg_dealers.rm_data_entry(msg.item, effect) _handler_map = { EffectsStarted: _handle_effects_started, EffectsStopped: _handle_effects_stopped }
class DmgDealerRegister(BaseStatRegister): """Class which tracks all effects which deal damage. Provides functionality to fetch various aggregated stats. """ def __init__(self, fit): # Format: {item:, {effect1, effect2}} self.__dmg_dealers = KeyedStorage() fit._subscribe(self, self._handler_map.keys()) def get_volley(self, item_filter, tgt_resists): volleys = [] for item in self.__dd_iter(item_filter): volley = item.get_volley(tgt_resists) volleys.append(volley) return DmgStats._combine(volleys) def get_dps(self, item_filter, reload, tgt_resists): dpss = [] for item in self.__dd_iter(item_filter): dps = item.get_dps(reload, tgt_resists) dpss.append(dps) return DmgStats._combine(dpss) def __dd_iter(self, item_filter): for item in self.__dmg_dealers: if item_filter is None or item_filter(item): yield item # Message handling def _handle_effects_started(self, msg): item_effects = msg.item._type_effects for effect_id in msg.effect_ids: effect = item_effects[effect_id] if isinstance(effect, DmgDealerEffect): self.__dmg_dealers.add_data_entry(msg.item, effect) def _handle_effects_stopped(self, msg): item_effects = msg.item._type_effects for effect_id in msg.effect_ids: effect = item_effects[effect_id] if isinstance(effect, DmgDealerEffect): self.__dmg_dealers.rm_data_entry(msg.item, effect) _handler_map = { EffectsStarted: _handle_effects_started, EffectsStopped: _handle_effects_stopped}
class SlotIndexRestrictionRegister(BaseRestrictionRegister, metaclass=ABCMeta): """Base class for all slot index restrictions. It doesn't allow multiple items to take the same numbered slot. """ def __init__(self, fit): # All items which possess index of slot are stored in this container # Format: {slot index: {items}} self.__index_item_map = KeyedStorage() fit._subscribe(self, self._handler_map.keys()) @property @abstractmethod def _slot_index_attr_id(self): """This attribute's value on item represents index of slot.""" ... def _handle_item_loaded(self, msg): # Skip items which don't have index specified slot_index = msg.item._type_attrs.get(self._slot_index_attr_id) if slot_index is None: return self.__index_item_map.add_data_entry(slot_index, msg.item) def _handle_item_unloaded(self, msg): slot_index = msg.item._type_attrs.get(self._slot_index_attr_id) if slot_index is None: return self.__index_item_map.rm_data_entry(slot_index, msg.item) _handler_map = { ItemLoaded: _handle_item_loaded, ItemUnloaded: _handle_item_unloaded } def validate(self): tainted_items = {} for slot_index, slot_index_items in self.__index_item_map.items(): # If more than one item occupies the same slot, all items in this # slot are tainted if len(slot_index_items) > 1: for item in slot_index_items: tainted_items[item] = SlotIndexErrorData( slot_index=slot_index) if tainted_items: raise RestrictionValidationError(tainted_items)
class CalculationService(BaseSubscriber): """Service which supports attribute calculation. This class collects data about various items and relations between them, and via exposed methods which provice data about these connections helps attribute map to calculate modified attribute values. """ def __init__(self, solar_system): self.__solar_system = solar_system self.__affections = AffectionRegister() self.__projections = ProjectionRegister() # Format: {projector: {modifiers}} self.__warfare_buffs = KeyedStorage() # Container with affector specs which will receive messages # Format: {message type: set(affector specs)} self.__subscribed_affectors = KeyedStorage() def get_modifications(self, affectee_item, affectee_attr_id): """Get modifications of affectee attribute on affectee item. Args: affectee_item: Item, for which we're getting modifications. affectee_attr_id: Affectee attribute ID; only modifications which influence attribute with this ID will be returned. Returns: Set with tuples in (modification operator, modification value, resistance value, affector item) format. """ # Use list because we can have multiple tuples with the same values # as valid configuration mods = [] for affector_spec in self.__affections.get_affector_specs( affectee_item): affector_modifier = affector_spec.modifier affector_item = affector_spec.item if affector_modifier.affectee_attr_id != affectee_attr_id: continue try: mod_op, mod_value, mod_aggregate_mode, mod_aggregate_key = ( affector_modifier.get_modification(affector_item)) # Do nothing here - errors should be logged in modification # getter or even earlier except ModificationCalculationError: continue # Get resistance value resist_attr_id = affector_spec.effect.resist_attr_id carrier_item = affectee_item._solsys_carrier if resist_attr_id and carrier_item is not None: try: resist_value = carrier_item.attrs[resist_attr_id] except KeyError: resist_value = 1 else: resist_value = 1 mods.append((mod_op, mod_value, resist_value, mod_aggregate_mode, mod_aggregate_key, affector_item)) return mods # Handle fits def _handle_fit_added(self, fit): fit._subscribe(self, self._handler_map.keys()) def _handle_fit_removed(self, fit): fit._unsubscribe(self, self._handler_map.keys()) # Handle item changes which are significant for calculator def _handle_fleet_fit_added(self, msg): fits_effect_applications = {} for projector in self.__projections.get_projectors(): if not isinstance(projector.effect, WarfareBuffEffect): continue projector_fit = projector.item._fit # Affect this fit by buffs existing in fleet if (msg.fit.ship is not None and projector_fit.fleet is msg.fit.fleet): fits_effect_applications.setdefault(projector_fit, []).append( (projector, [msg.fit.ship])) # Affect other fits by buffs from this fit if projector_fit is msg.fit: for fit in msg.fit.fleet.fits: if fit is msg.fit: continue fits_effect_applications.setdefault(projector_fit, []).append( (projector, [fit.ship])) # Apply warfare buffs if fits_effect_applications: for fit, effect_applications in fits_effect_applications.items(): msgs = [] for projector, tgt_items in effect_applications: msgs.append( EffectApplied(projector.item, projector.effect.id, tgt_items)) fit._publish_bulk(msgs) def _handle_fleet_fit_removed(self, msg): fits_effect_unapplications = {} for projector in self.__projections.get_projectors(): if not isinstance(projector.effect, WarfareBuffEffect): continue projector_fit = projector.item._fit # Unaffect this fit by buffs existing in fleet if (msg.fit.ship is not None and projector_fit.fleet is msg.fit.fleet): fits_effect_unapplications.setdefault(projector_fit, []).append( (projector, [msg.fit.ship])) # Unaffect other fits by buffs from this fit if projector_fit is msg.fit: for fit in msg.fit.fleet.fits: if fit is msg.fit: continue fits_effect_unapplications.setdefault(projector_fit, []).append( (projector, [fit.ship])) # Unapply warfare buffs if fits_effect_unapplications: for fit, effect_unapplications in ( fits_effect_unapplications.items()): msgs = [] for projector, tgt_items in effect_unapplications: msgs.append( EffectUnapplied(projector.item, projector.effect.id, tgt_items)) fit._publish_bulk(msgs) def _handle_item_loaded(self, msg): item = msg.item self.__affections.register_affectee_item(item) if isinstance(item, SolarSystemItemMixin): self.__projections.register_solsys_item(item) def _handle_item_unloaded(self, msg): item = msg.item self.__affections.unregister_affectee_item(item) if isinstance(item, SolarSystemItemMixin): self.__projections.unregister_solsys_item(item) def _handle_effects_started(self, msg): item = msg.item effect_ids = msg.effect_ids attr_changes = {} for affector_spec in self.__generate_local_affector_specs( item, effect_ids): # Register the affector spec if isinstance(affector_spec.modifier, BasePythonModifier): self.__subscribe_python_affector_spec(msg.fit, affector_spec) self.__affections.register_local_affector_spec(affector_spec) # Clear values of attributes dependent on the affector spec for affectee_item in self.__affections.get_local_affectee_items( affector_spec): attr_id = affector_spec.modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_ids = attr_changes.setdefault(affectee_item, set()) attr_ids.add(attr_id) # Register projectors for projector in self.__generate_projectors(item, effect_ids): self.__projections.register_projector(projector) # Register warfare buffs effect_applications = [] item_fleet = msg.fit.fleet for effect_id in effect_ids: effect = item._type_effects[effect_id] if not isinstance(effect, WarfareBuffEffect): continue projector = Projector(item, effect) for buff_id_attr_id in WARFARE_BUFF_ATTRS: try: buff_id = item.attrs[buff_id_attr_id] except KeyError: continue getter = (self.__solar_system.source.cache_handler. get_buff_templates) try: buff_templates = getter(buff_id) except BuffTemplatesFetchError: continue affector_attr_id = WARFARE_BUFF_ATTRS[buff_id_attr_id] if not buff_templates: continue for buff_template in buff_templates: modifier = DogmaModifier._make_from_buff_template( buff_template, affector_attr_id) affector_spec = AffectorSpec(item, effect, modifier) self.__warfare_buffs.add_data_entry( projector, affector_spec) tgt_ships = [] for tgt_fit in self.__solar_system.fits: if (tgt_fit is msg.fit or (item_fleet is not None and tgt_fit.fleet is item_fleet)): tgt_ship = tgt_fit.ship if tgt_ship is not None: tgt_ships.append(tgt_ship) effect_applications.append((projector, tgt_ships)) if attr_changes: self.__publish_attr_changes(attr_changes) # Apply warfare buffs if effect_applications: msgs = [] for projector, tgt_items in effect_applications: msgs.append( EffectApplied(projector.item, projector.effect.id, tgt_items)) msg.fit._publish_bulk(msgs) def _handle_effects_stopped(self, msg): # Unregister warfare buffs effect_unapplications = [] for projector in self.__generate_projectors(msg.item, msg.effect_ids): if projector not in self.__warfare_buffs: continue tgt_ships = self.__projections.get_projector_tgts(projector) effect_unapplications.append((projector, tgt_ships)) del self.__warfare_buffs[projector] # Unapply warfare buffs if effect_unapplications: msgs = [] for projector, tgt_items in effect_unapplications: msgs.append( EffectUnapplied(projector.item, projector.effect.id, tgt_items)) msg.fit._publish_bulk(msgs) attr_changes = {} # Remove values of affectee attributes for affector_spec in self.__generate_local_affector_specs( msg.item, msg.effect_ids): # Clear values of attributes dependent on the affector spec for affectee_item in self.__affections.get_local_affectee_items( affector_spec): attr_id = affector_spec.modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_ids = attr_changes.setdefault(affectee_item, set()) attr_ids.add(attr_id) # Unregister the affector spec self.__affections.unregister_local_affector_spec(affector_spec) if isinstance(affector_spec.modifier, BasePythonModifier): self.__unsubscribe_python_affector_spec(msg.fit, affector_spec) # Unregister projectors for projector in self.__generate_projectors(msg.item, msg.effect_ids): self.__projections.unregister_projector(projector) if attr_changes: self.__publish_attr_changes(attr_changes) def _handle_effect_applied(self, msg): attr_changes = {} for affector_spec in self.__generate_projected_affectors( msg.item, (msg.effect_id, )): # Register the affector spec self.__affections.register_projected_affector_spec( affector_spec, msg.tgt_items) # Clear values of attributes dependent on the affector spec for affectee_item in self.__affections.get_projected_affectee_items( affector_spec, msg.tgt_items): attr_id = affector_spec.modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_ids = attr_changes.setdefault(affectee_item, set()) attr_ids.add(attr_id) # Apply projector for projector in self.__generate_projectors(msg.item, (msg.effect_id, )): self.__projections.apply_projector(projector, msg.tgt_items) if attr_changes: self.__publish_attr_changes(attr_changes) def _handle_effect_unapplied(self, msg): attr_changes = {} for affector_spec in self.__generate_projected_affectors( msg.item, (msg.effect_id, )): # Clear values of attributes dependent on the affector spec for affectee_item in self.__affections.get_projected_affectee_items( affector_spec, msg.tgt_items): attr_id = affector_spec.modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_ids = attr_changes.setdefault(affectee_item, set()) attr_ids.add(attr_id) # Unregister the affector spec self.__affections.unregister_projected_affector( affector_spec, msg.tgt_items) # Un-apply projector for projector in self.__generate_projectors(msg.item, (msg.effect_id, )): self.__projections.unapply_projector(projector, msg.tgt_items) if attr_changes: self.__publish_attr_changes(attr_changes) # Methods to clear calculated child attributes when parent attributes change def _revise_regular_attr_dependents(self, msg): """Remove calculated attribute values which rely on passed attribute. Removing them allows to recalculate updated value. Here we process all regular dependents, which include dependencies specified via capped attribute map and via affector specs with dogma modifiers. Affector specs with python modifiers are processed separately. """ affections = self.__affections projections = self.__projections effect_unapplications = [] # Unapply warfare buffs for item, attr_ids in msg.attr_changes.items(): for effect in item._type_effects.values(): projector = Projector(item, effect) if projector not in self.__warfare_buffs: continue if not attr_ids.intersection(WARFARE_BUFF_ATTRS): continue tgt_items = self.__projections.get_projector_tgts(projector) effect_unapplications.append((projector, tgt_items)) msgs = [] for projector, tgt_items in effect_unapplications: msgs.append( EffectUnapplied(projector.item, projector.effect.id, tgt_items)) msg.fit._publish_bulk(msgs) attr_changes = {} for item, attr_ids in msg.attr_changes.items(): # Remove values of affectee attributes capped by the changing # attribute for attr_id in attr_ids: for capped_attr_id in item.attrs._cap_map.get(attr_id, ()): if item.attrs._force_recalc(capped_attr_id): attr_changes.setdefault(item, set()).add(capped_attr_id) # Force attribute recalculation when local affector spec # modification changes for affector_spec in self.__generate_local_affector_specs( item, item._running_effect_ids): affector_modifier = affector_spec.modifier # Only dogma modifiers have source attribute specified, python # modifiers are processed separately if (not isinstance(affector_modifier, DogmaModifier) or affector_modifier.affector_attr_id not in attr_ids): continue # Remove values for affectee_item in affections.get_local_affectee_items( affector_spec): attr_id = affector_modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_changes.setdefault(affectee_item, set()).add(attr_id) # Force attribute recalculation when projected affector spec # modification changes for projector in self.__generate_projectors( item, item._running_effect_ids): tgt_items = projections.get_projector_tgts(projector) # When projector doesn't target any items, then we do not need # to clean anything if not tgt_items: continue for affector_spec in self.__generate_projected_affectors( item, (projector.effect.id, )): affector_modifier = affector_spec.modifier # Only dogma modifiers have source attribute specified, # python modifiers are processed separately if (not isinstance(affector_modifier, DogmaModifier) or affector_modifier.affector_attr_id not in attr_ids): continue for affectee_item in ( affections.get_projected_affectee_items( affector_spec, tgt_items)): attr_id = affector_modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_changes.setdefault(affectee_item, set()).add(attr_id) # Force attribute recalculation if changed attribute defines # resistance to some effect for projector in projections.get_tgt_projectors(item): effect = projector.effect if effect.resist_attr_id not in attr_ids: continue tgt_items = projections.get_projector_tgts(projector) for affector_spec in self.__generate_projected_affectors( projector.item, (effect.id, )): for affectee_item in ( affections.get_projected_affectee_items( affector_spec, tgt_items)): attr_id = affector_spec.modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_changes.setdefault(affectee_item, set()).add(attr_id) # Unregister warfare buffs only after composing list of attributes we # should update for projector, tgt_items in effect_unapplications: del self.__warfare_buffs[projector] if attr_changes: self.__publish_attr_changes(attr_changes) # Register warfare buffs effect_applications = [] for item, attr_ids in msg.attr_changes.items(): if not attr_ids.intersection(WARFARE_BUFF_ATTRS): continue item_fleet = item._fit.fleet for effect_id in item._running_effect_ids: effect = item._type_effects[effect_id] if not isinstance(effect, WarfareBuffEffect): continue projector = Projector(item, effect) for buff_id_attr_id in WARFARE_BUFF_ATTRS: try: buff_id = item.attrs[buff_id_attr_id] except KeyError: continue getter = (self.__solar_system.source.cache_handler. get_buff_templates) try: buff_templates = getter(buff_id) except BuffTemplatesFetchError: continue affector_attr_id = WARFARE_BUFF_ATTRS[buff_id_attr_id] if not buff_templates: continue for buff_template in buff_templates: modifier = DogmaModifier._make_from_buff_template( buff_template, affector_attr_id) affector_spec = AffectorSpec(item, effect, modifier) self.__warfare_buffs.add_data_entry( projector, affector_spec) tgt_ships = [] for tgt_fit in self.__solar_system.fits: if (tgt_fit is msg.fit or (item_fleet is not None and tgt_fit.fleet is item_fleet)): tgt_ship = tgt_fit.ship if tgt_ship is not None: tgt_ships.append(tgt_ship) effect_applications.append((projector, tgt_ships)) if attr_changes: self.__publish_attr_changes(attr_changes) # Apply warfare buffs if effect_applications: msgs = [] for projector, tgt_items in effect_applications: msgs.append( EffectApplied(projector.item, projector.effect.id, tgt_items)) msg.fit._publish_bulk(msgs) def _revise_python_attr_dependents(self, msg): """Remove calculated attribute values when necessary. Here we go through python modifiers, deliver to them message, and if, based on contents of the message, they decide that calculated values should be removed, we remove values which depend on such modifiers. """ attr_changes = {} # If there's no subscribed affector specs for received message type, do # nothing msg_type = type(msg) if msg_type not in self.__subscribed_affectors: return # Otherwise, ask modifier if value of attribute it calculates may # change, and force recalculation if answer is yes for affector_spec in self.__subscribed_affectors[msg_type]: if not affector_spec.modifier.revise_modification( msg, affector_spec.item): continue for affectee_item in self.__affections.get_local_affectee_items( affector_spec): attr_id = affector_spec.modifier.affectee_attr_id if affectee_item.attrs._force_recalc(attr_id): attr_ids = attr_changes.setdefault(affectee_item, set()) attr_ids.add(attr_id) if attr_changes: self.__publish_attr_changes(attr_changes) # Message routing _handler_map = { FleetFitAdded: _handle_fleet_fit_added, FleetFitRemoved: _handle_fleet_fit_removed, ItemLoaded: _handle_item_loaded, ItemUnloaded: _handle_item_unloaded, EffectsStarted: _handle_effects_started, EffectsStopped: _handle_effects_stopped, EffectApplied: _handle_effect_applied, EffectUnapplied: _handle_effect_unapplied, AttrsValueChanged: _revise_regular_attr_dependents } def _notify(self, msg): BaseSubscriber._notify(self, msg) # Relay all messages to python modifiers, as in case of python modifiers # any message may result in deleting dependent attributes self._revise_python_attr_dependents(msg) # Affector-related methods def __generate_local_affector_specs(self, item, effect_ids): """Get local affector specs for passed item and effects.""" affector_specs = set() item_effects = item._type_effects for effect_id in effect_ids: effect = item_effects[effect_id] for modifier in effect.local_modifiers: affector_spec = AffectorSpec(item, effect, modifier) affector_specs.add(affector_spec) return affector_specs def __generate_projected_affectors(self, item, effect_ids): """Get projected affector specs for passed item and effects.""" affector_specs = set() item_effects = item._type_effects for effect_id in effect_ids: effect = item_effects[effect_id] projector = Projector(item, effect) if projector in self.__warfare_buffs: affector_specs.update(self.__warfare_buffs[projector]) for modifier in effect.projected_modifiers: affector_spec = AffectorSpec(item, effect, modifier) affector_specs.add(affector_spec) return affector_specs def __subscribe_python_affector_spec(self, fit, affector_spec): """Subscribe affector spec with python modifier.""" to_subscribe = set() for msg_type in affector_spec.modifier.revise_msg_types: # Subscribe service to new message type only if there's no such # subscription yet if (msg_type not in self._handler_map and msg_type not in self.__subscribed_affectors): to_subscribe.add(msg_type) # Add affector spec to subscriber map to let it receive messages self.__subscribed_affectors.add_data_entry(msg_type, affector_spec) if to_subscribe: fit._subscribe(self, to_subscribe) def __unsubscribe_python_affector_spec(self, fit, affector_spec): """Unsubscribe affector spec with python modifier.""" to_ubsubscribe = set() for msg_type in affector_spec.modifier.revise_msg_types: # Make sure affector spec will not receive messages anymore self.__subscribed_affectors.rm_data_entry(msg_type, affector_spec) # Unsubscribe service from message type if there're no recipients # anymore if (msg_type not in self._handler_map and msg_type not in self.__subscribed_affectors): to_ubsubscribe.add(msg_type) if to_ubsubscribe: fit._unsubscribe(self, to_ubsubscribe) # Warfare buffs-related methods # Projector-related methods def __generate_projectors(self, item, effect_ids): """Get projectors spawned by the item.""" projectors = set() item_effects = item._type_effects for effect_id in effect_ids: effect = item_effects[effect_id] if (effect.category_id == EffectCategoryId.target or isinstance(effect, WarfareBuffEffect)): projector = Projector(item, effect) projectors.add(projector) return projectors # Auxiliary methods def __publish_attr_changes(self, attr_changes): # Format: {fit: {item: {attr_ids}}} fit_changes_regular = {} # Format: {fit: {item: {attr_ids}}} fit_changes_masked = {} for item, attr_ids in attr_changes.items(): item_fit = item._fit item_attr_overrides = item.attrs._override_callbacks item_changes_regular = attr_ids.difference(item_attr_overrides) item_changes_masked = attr_ids.intersection(item_attr_overrides) if item_changes_regular: fit_changes_regular.setdefault(item_fit, {})[item] = item_changes_regular if item_changes_masked: fit_changes_masked.setdefault(item_fit, {})[item] = item_changes_masked # Format: {fit, [messages]} fits_msgs = {} for fit, attr_changes in fit_changes_regular.items(): msg = AttrsValueChanged(attr_changes) fits_msgs.setdefault(fit, []).append(msg) for fit, attr_changes in fit_changes_masked.items(): msg = AttrsValueChangedMasked(attr_changes) fits_msgs.setdefault(fit, []).append(msg) for fit, msgs in fits_msgs.items(): fit._publish_bulk(msgs)
class SlotIndexRestrictionRegister(BaseRestrictionRegister, metaclass=ABCMeta): """Base class for all slot index restrictions. It doesn't allow multiple items to take the same numbered slot. """ def __init__(self, fit): # All items which possess index of slot are stored in this container # Format: {slot index: {items}} self.__index_item_map = KeyedStorage() fit._subscribe(self, self._handler_map.keys()) @property @abstractmethod def _slot_index_attr_id(self): """This attribute's value on item represents index of slot.""" ... @property @abstractmethod def _item_class(self): """Items belonging to this class are restricted.""" ... def _handle_item_loaded(self, msg): item = msg.item # Skip items which do not belong to specified class. Initially there was # no such check, but there was issue with Amarr Battlecruisers skill. As # of 2018-09-19, it still has subSystemSlot attribute with value 125, # which overlaps with t3c core subsystems if not isinstance(item, self._item_class): return # Skip items which don't have index specified slot_index = item._type_attrs.get(self._slot_index_attr_id) if slot_index is None: return self.__index_item_map.add_data_entry(slot_index, item) def _handle_item_unloaded(self, msg): item = msg.item if not isinstance(item, self._item_class): return slot_index = item._type_attrs.get(self._slot_index_attr_id) if slot_index is None: return self.__index_item_map.rm_data_entry(slot_index, item) _handler_map = { ItemLoaded: _handle_item_loaded, ItemUnloaded: _handle_item_unloaded} def validate(self): tainted_items = {} for slot_index, slot_index_items in self.__index_item_map.items(): # If more than one item occupies the same slot, all items in this # slot are tainted if len(slot_index_items) > 1: for item in slot_index_items: tainted_items[item] = SlotIndexErrorData( slot_index=slot_index) if tainted_items: raise RestrictionValidationError(tainted_items)
class ProjectionRegister: """Keeps track of various projection-related connections.""" def __init__(self): # Format: {projectors} self.__projectors = set() # Projectors residing on solar system item # Format: {carrier item: {projectors}} self.__carrier_projectors = KeyedStorage() # Projectors whose carrying solar system item is not present # Format: {projectors} self.__carrierless_projectors = set() # Solar system items affected by projector # Format: {projector: {target items}} self.__projector_tgts = KeyedStorage() # Projectors affecting solar system item # Format: {target item: {projectors}} self.__tgt_projectors = KeyedStorage() # Query methods def get_projector_tgts(self, projector): """Get solar system items which are under effect of passed projector.""" return self.__projector_tgts.get(projector, ()) def get_tgt_projectors(self, tgt_item): """Get projectors influencing passed solar system item.""" return self.__tgt_projectors.get(tgt_item, ()) def get_carrier_projectors(self, carrier_item): """Get projectors which are exerted by passed carrier's items.""" return self.__carrier_projectors.get(carrier_item, ()) def get_projectors(self): """Get all known projectors.""" return self.__projectors # Maintenance methods def register_projector(self, projector): self.__projectors.add(projector) carrier_item = projector.item._solsys_carrier if carrier_item is not None: self.__carrier_projectors.add_data_entry(carrier_item, projector) else: self.__carrierless_projectors.add(projector) def unregister_projector(self, projector): self.__projectors.discard(projector) carrier_item = projector.item._solsys_carrier if carrier_item is not None: self.__carrier_projectors.rm_data_entry(carrier_item, projector) else: self.__carrierless_projectors.discard(projector) def apply_projector(self, projector, tgt_items): self.__projector_tgts.add_data_set(projector, tgt_items) for tgt_item in tgt_items: self.__tgt_projectors.add_data_entry(tgt_item, projector) def unapply_projector(self, projector, tgt_items): self.__projector_tgts.rm_data_set(projector, tgt_items) for tgt_item in tgt_items: self.__tgt_projectors.rm_data_entry(tgt_item, projector) def register_solsys_item(self, solsys_item): projectors = set() for projector in self.__carrierless_projectors: if projector.item._solsys_carrier is solsys_item: projectors.add(projector) if projectors: self.__carrierless_projectors.difference_update(projectors) self.__carrier_projectors.add_data_set(solsys_item, projectors) def unregister_solsys_item(self, solsys_item): projectors = self.__carrier_projectors.get(solsys_item, ()) if projectors: self.__carrierless_projectors.update(projectors) self.__carrier_projectors.rm_data_set(solsys_item, projectors)
class MutableAttrMap: """Map which contains modified attribute values. It provides some of facilities which help to calculate, store and provide access to modified attribute values. """ def __init__(self, item): self.__item = item # Actual container of calculated attributes. # Format: {attribute ID: value} self.__modified_attrs = {} # Override and cap maps are initialized as None to save memory, as they # are not needed most of the time self.__override_callbacks = None self.__cap_map = None def __getitem__(self, attr_id): # Overridden values are priority. Access 'private' override callbacks # map directly due to performance reasons if (self.__override_callbacks is not None and attr_id in self.__override_callbacks): callback, args, kwargs = self.__override_callbacks[attr_id] return callback(*args, **kwargs) # If no override is set, use modified value. If value is stored in # modified map, it's considered valid try: value = self.__modified_attrs[attr_id] # Else, we have to run full calculation process except KeyError: try: value = self.__calculate(attr_id) except CALCULATE_RAISABLE_EXCEPTIONS as e: raise KeyError(attr_id) from e else: self.__modified_attrs[attr_id] = value return value def __len__(self): return len(self.keys()) def __contains__(self, attr_id): return attr_id in self.keys() def __iter__(self): for k in self.keys(): yield k def _force_recalc(self, attr_id): """ Force recalculation of attribute with passed ID. Returns: True if attribute was calculated, False if it wasn't. """ try: del self.__modified_attrs[attr_id] except KeyError: return False else: return True def get(self, attr_id, default=None): # Almost copy-paste of __getitem__ due to performance reasons - # attribute getters should make as few calls as possible, especially # when attribute is already calculated if (self.__override_callbacks is not None and attr_id in self.__override_callbacks): callback, args, kwargs = self.__override_callbacks[attr_id] return callback(*args, **kwargs) try: value = self.__modified_attrs[attr_id] except KeyError: try: value = self.__calculate(attr_id) except CALCULATE_RAISABLE_EXCEPTIONS: return default else: self.__modified_attrs[attr_id] = value return value def keys(self): # Return union of attributes from base, modified and override dictionary return set( chain(self.__item._type_attrs, self.__modified_attrs, self.__override_callbacks or {})) def items(self): return set((attr_id, self.get(attr_id)) for attr_id in self.keys()) def _clear(self): """ Reset map to its initial state. Overrides are not removed. Messages for cleared attributes are not sent. """ self.__modified_attrs.clear() self.__cap_map = None def __calculate(self, attr_id): """Run calculations to find the actual value of attribute. Args: attr_id: ID of attribute to be calculated. Returns: Calculated attribute value. Raises: AttrMetadataError: If metadata of attribute being calculated cannot be fetched. BaseValueError: If base value for attribute being calculated cannot be found. """ item = self.__item # Attribute object for attribute being calculated try: attr = item._fit.solar_system.source.cache_handler.get_attr( attr_id) # Raise error if we can't get metadata for requested attribute except (AttributeError, AttrFetchError) as e: msg = ('unable to fetch metadata for attribute {}, ' 'requested for item type {}').format( attr_id, item._type_id) logger.warning(msg) raise AttrMetadataError(attr_id) from e # Base attribute value which we'll use for modification try: value = item._type_attrs[attr_id] # If attribute isn't available on item type, base off its default value except KeyError: value = attr.default_value # If item type attribute is not specified and default value isn't # available, raise error - without valid base we can't keep going if value is None: msg = ( 'unable to find base value for attribute {} on item type {}' ).format(attr_id, item._type_id) logger.info(msg) raise BaseValueError(attr_id) # Format: {operator: [values]} stack = {} # Format: {operator: [values]} stack_penalized = {} # Format: {(operator, aggregate key): [(value, penalize)]} aggregate_min = {} # Format: {(operator, aggregate key): [(value, penalize)]} aggregate_max = {} # Now, go through all affectors affecting our item for (mod_operator, mod_value, resist_value, mod_aggregate_mode, mod_aggregate_key, affector_item) in ( item._fit.solar_system._calculator.get_modifications( item, attr_id)): # Normalize operations to just three types: assignments, additions, # reduced multiplications try: normalization_func = NORMALIZATION_MAP[mod_operator] # Log error on any unknown operator types except KeyError: msg = ( 'malformed modifier on item type {}: unknown operator {}' ).format(affector_item._type_id, mod_operator) logger.warning(msg) continue # Resistance attribute actually defines resonance, where 1 means 0% # resistance and 0 means 100% resistance mod_value = normalization_func(mod_value) * resist_value # Decide if modification should be stacking penalized or not penalize = (not attr.stackable and affector_item._type.category_id not in PENALTY_IMMUNE_CATEGORY_IDS and mod_operator in PENALIZABLE_OPERATORS) if mod_aggregate_mode == ModAggregateMode.stack: if penalize: stack_penalized.setdefault(mod_operator, []).append(mod_value) else: stack.setdefault(mod_operator, []).append(mod_value) elif mod_aggregate_mode == ModAggregateMode.minimum: aggregate_min.setdefault((mod_operator, mod_aggregate_key), []).append((mod_value, penalize)) elif mod_aggregate_mode == ModAggregateMode.maximum: aggregate_max.setdefault((mod_operator, mod_aggregate_key), []).append((mod_value, penalize)) for container, aggregate_func, sort_func in ((aggregate_min, min, lambda i: (i[0], i[1])), (aggregate_max, max, lambda i: (i[0], not i[1]))): for k, v in container.items(): mod_operator = k[0] mod_value, penalize = aggregate_func(v, key=sort_func) if penalize: stack_penalized.setdefault(mod_operator, []).append(mod_value) else: stack.setdefault(mod_operator, []).append(mod_value) # When data gathering is complete, process penalized modifications. They # are penalized on per-operator basis for mod_operator, mod_values in stack_penalized.items(): penalized_value = self.__penalize_values(mod_values) stack.setdefault(mod_operator, []).append(penalized_value) # Calculate value of non-penalized modifications, according to operator # order for mod_operator in sorted(stack): mod_values = stack[mod_operator] # Pick best modification for assignments, based on high_is_good # value if mod_operator in ASSIGNMENT_OPERATORS: if attr.high_is_good: value = max(mod_values) else: value = min(mod_values) elif mod_operator in ADDITION_OPERATORS: for mod_value in mod_values: value += mod_value elif mod_operator in MULTIPLICATION_OPERATORS: for mod_value in mod_values: value *= 1 + mod_value # If attribute has upper cap, do not let its value to grow above it if attr.max_attr_id is not None: try: max_value = self[attr.max_attr_id] # If max value isn't available, don't cap anything except KeyError: pass else: value = min(value, max_value) # Let map know that capping attribute restricts current # attribute self._cap_set(attr.max_attr_id, attr_id) # Some of attributes are rounded for whatever reason, deal with it after # all the calculations if attr_id in LIMITED_PRECISION_ATTR_IDS: value = round(value, 2) return value def __penalize_values(self, mod_values): """Calculate aggregated reduced multiplier. Assuming all multipliers received should be stacking penalized, and that they are normalized to reduced multiplier form, calculate final reduced multiplier. Args: mod_values: Iterable with reduced multipliers. Returns: Final aggregated reduced multiplier. """ # Gather positive multipliers into one chain, negative into another chain_positive = [] chain_negative = [] for mod_value in mod_values: if mod_value >= 0: chain_positive.append(mod_value) else: chain_negative.append(mod_value) # Strongest modifications always go first chain_positive.sort(reverse=True) chain_negative.sort() # Base final multiplier on 1 value = 1 for penalization_chain in (chain_positive, chain_negative): # Same for intermediate per-chain value chain_value = 1 for pos, mod_value in enumerate(penalization_chain): # Ignore 12th modification and further as non-significant if pos > 10: break # Apply stacking penalty based on modification position chain_value *= 1 + mod_value * PENALTY_BASE**(pos**2) value *= chain_value return value - 1 # Override-related methods @property def _override_callbacks(self): return self.__override_callbacks or {} def _set_override_callback(self, attr_id, callback): """Set override for the attribute in the form of callback.""" if self.__override_callbacks is None: self.__override_callbacks = {} # If the same callback is set, do nothing if self.__override_callbacks.get(attr_id) == callback: return self.__override_callbacks[attr_id] = callback # Exposed attribute value may change after setting/resetting override self.__publish(AttrsValueChanged({self.__item: {attr_id}})) def _del_override_callback(self, attr_id): """Remove override callback from attribute.""" overrides = self.__override_callbacks or {} if attr_id not in overrides: return del overrides[attr_id] # Set overrides map to None if there're none left to save some memory if not overrides: self.__override_callbacks = None # Exposed attribute value may change after removing override self.__publish(AttrsValueChanged({self.__item: {attr_id}})) def _override_value_may_change(self, attr_id): """Notify everyone that callback value may change. When originator of callback knows that callback return value may (or will) change for an attribute, it should invoke this method. """ self.__publish(AttrsValueChanged({self.__item: {attr_id}})) def _get_without_overrides(self, attr_id, default=None): """Get attribute value without using overrides.""" # Partially borrowed from get() method try: value = self.__modified_attrs[attr_id] except KeyError: try: value = self.__calculate(attr_id) except CALCULATE_RAISABLE_EXCEPTIONS: return default else: self.__modified_attrs[attr_id] = value return value # Cap-related methods @property def _cap_map(self): """Returns map which defines value caps. It includes attributes which cap something, and attributes being capped by them. """ # Format {capping attribute ID: {capped attribute IDs}} return self.__cap_map or {} def _cap_set(self, capping_attr_id, capped_attr_id): if self.__cap_map is None: self.__cap_map = KeyedStorage() self.__cap_map.add_data_entry(capping_attr_id, capped_attr_id) def _cap_del(self, capping_attr_id, capped_attr_id): self.__cap_map.rm_data_entry(capping_attr_id, capped_attr_id) if not self.__cap_map: self.__cap_map = None # Auxiliary methods def __publish(self, msg): try: publish_func = self.__item._fit._publish except AttributeError: pass else: publish_func(msg)
class SlotIndexRestrictionRegister(BaseRestrictionRegister, metaclass=ABCMeta): """Base class for all slot index restrictions. It doesn't allow multiple items to take the same numbered slot. """ def __init__(self, fit): # All items which possess index of slot are stored in this container # Format: {slot index: {items}} self.__index_item_map = KeyedStorage() fit._subscribe(self, self._handler_map.keys()) @property @abstractmethod def _slot_index_attr_id(self): """This attribute's value on item represents index of slot.""" ... @property @abstractmethod def _item_class(self): """Items belonging to this class are restricted.""" ... def _handle_item_loaded(self, msg): item = msg.item # Skip items which do not belong to specified class. Initially there was # no such check, but there was issue with Amarr Battlecruisers skill. As # of 2018-09-19, it still has subSystemSlot attribute with value 125, # which overlaps with t3c core subsystems if not isinstance(item, self._item_class): return # Skip items which don't have index specified slot_index = item._type_attrs.get(self._slot_index_attr_id) if slot_index is None: return self.__index_item_map.add_data_entry(slot_index, item) def _handle_item_unloaded(self, msg): item = msg.item if not isinstance(item, self._item_class): return slot_index = item._type_attrs.get(self._slot_index_attr_id) if slot_index is None: return self.__index_item_map.rm_data_entry(slot_index, item) _handler_map = { ItemLoaded: _handle_item_loaded, ItemUnloaded: _handle_item_unloaded } def validate(self): tainted_items = {} for slot_index, slot_index_items in self.__index_item_map.items(): # If more than one item occupies the same slot, all items in this # slot are tainted if len(slot_index_items) > 1: for item in slot_index_items: tainted_items[item] = SlotIndexErrorData( slot_index=slot_index) if tainted_items: raise RestrictionValidationError(tainted_items)