def Handle_Thread_Finished(self, return_value): ''' Clean up after a Run Script thread has finished. ''' super().Handle_Thread_Finished() # Turn the button back on. self.window.action_Run_Script.setEnabled(True) # When done, restore Settings back to the gui values, in case # the script temporarily modified them. #-Removed; signals handle this. #self.window.Store_Settings() # Close any transform log that might be open, to flush it # out and also reset it for a later run. Plugin_Log.Close() # TODO: detect errors in the script and note them; for now, the # thread or framework will tend to print them out. self.window.Print('Script run completed') # Tell any live edit tables to refresh their current values, # since the script may have changed them. Live_Editor.Reset_Current_Item_Values() #-Removed; signals handle this. #self.window.Soft_Refresh() # Send out some signalling flags. self.window.Send_Signal('script_completed', 'files_modified', 'files_loaded') return
def Print_Gate_Distances(title): lines = [f'{title} sector gate distances:'] distances = [x.Get_Gate_Distance() for x in galaxy.class_macros['sectors'].values()] lines.append(', '.join([str(int(x)) for x in sorted(distances)])) # Pick out those of significant separation, eg. over 50 km. distances_over_50 = [x for x in distances if x > 50000] lines.append(f'Average (>50km): {sum(distances_over_50) / len(distances_over_50)}') Plugin_Log.Print('\n'.join(lines))
def Safe_Update_MD(xml_root, xpath, attr, old_text, new_text): 'Helper function for editing md nodes.' # Note: add some safety incase the lookups fail. nodes = xml_root.xpath(xpath) if not nodes: msg = 'Scale_Sector_Size failed to find a target MD script node; skipping this node.' Plugin_Log.Print(msg) Print(msg) else: nodes[0].set(attr, nodes[0].get(attr).replace(old_text, new_text))
def Rebalance_Engines( race_speed_mults = { 'argon' : {'thrust' : 1, 'boost' : 1, 'boost_time' : 1, 'travel' : 1 }, 'paranid' : {'thrust' : 1.03, 'boost' : 1.03, 'boost_time' : 1.2, 'travel' : 0.90 }, 'split' : {'thrust' : 1.35, 'boost' : 1.08, 'boost_time' : 1.2, 'travel' : 0.843}, 'teladi' : {'thrust' : 0.97, 'boost' : 0.97, 'boost_time' : 1, 'travel' : 0.97 }, }, purpose_speed_mults = { 'allround' : {'thrust' : 1, 'boost' : 1, 'boost_time' : 1, 'travel' : 1 }, 'combat' : {'thrust' : 1.05, 'boost' : 1.05, 'boost_time' : 1.43, 'travel' : 0.933}, 'travel' : {'thrust' : 1, 'boost' : 0.75, 'boost_time' : 0.57, 'travel' : 1.33 }, }, adjust_cargo = False, ): ''' Rebalances engine speed related properties across purposes and maker races. Race balance set relative to argon engines of a corresponding size, purpose, mark 1. Higher marks receive the same scaling as their mark 1 counterpart. Purpose balance set relative to allround engines of a corresponding size and mark. * race_speed_mults - Dict, keyed by race name, with relative multipliers for engine speed properties: 'thrust', 'boost', 'travel'. - Relative to corresponding argon engines. - Set to None to disable race rebalancing. - Defaults tuned to vanilla medium mark 1 combat engines, and will nearly reproduce the vanilla medium engine values (with discrepencies for other sizes): ``` race_speed_mults = { 'argon' : {'thrust' : 1, 'boost' : 1, 'boost_time' : 1, 'travel' : 1 }, 'paranid' : {'thrust' : 1.03, 'boost' : 1.03, 'boost_time' : 1.2, 'travel' : 0.90 }, 'split' : {'thrust' : 1.35, 'boost' : 1.08, 'boost_time' : 1.2, 'travel' : 0.843}, 'teladi' : {'thrust' : 0.97, 'boost' : 0.97, 'boost_time' : 1, 'travel' : 0.97 }, } ``` * purpose_speed_mults - Dict, keyed by engine purpose name, with relative multipliers for engine speed properties: 'thrust', 'boost', 'travel'. - Purposes are 'combat', 'allround', and 'travel'. - Set to None to disable purpose rebalancing. - Defaults tuned to vanilla medium mark 1 argon engines, and will nearly reproduce the vanilla medium engine values (with discrepencies for other sizes): ``` purpose_speed_mults = { 'allround' : {'thrust' : 1, 'boost' : 1, 'boost_time' : 1, 'travel' : 1 }, 'combat' : {'thrust' : 1.05, 'boost' : 1, 'boost_time' : 0.89, 'travel' : 1.43 }, 'travel' : {'thrust' : 1, 'boost' : 0.75, 'boost_time' : 1.33, 'travel' : 0.57 }, } ``` * adjust_cargo - Bool, if True then trader and miner ship cargo bays will be adjusted in inverse of the ship's travel thrust change, to maintain roughly the same transport of cargo/time. Assumes trade ships spend 50% of their time in travel mode, and mining ships spend 10%. - Defaults False. - May cause oddities when applied to an existing save. ''' ''' Notes: mk2 engines cost 5x more than mk1, with 21% more speed. mk3 engines cost 5x more than mk2, with 11% more speed. mk3 engines cost 25x more than mk1, with 34% more speed. Medium combat mk1, thrust / boost / travel / boost duration (as % vs arg): arg: 1052 / 8 / 8 / 10 ( 1 / 1 / 1 / 1 ) kha: 1400 / 4 / 7 / 6 ( 1.33 / 0.66 / 1.16 / 0.6) par: 1084 / 8 / 7 / 12 ( 1.03 / 1.03 / 0.90 / 1.2) spl: 1420 / 6.4 / 5 / 12 ( 1.35 / 1.08 / 0.843 / 1.2) tel: 1021 / 8 / 8 / 10 ( 0.97 / 0.97 / 0.97 / 1 ) xen: 1286 / 8 / 8 / 8 ( 1.22 / 1.22 / 1.22 / 0.8) (Prices are about the same, 16k all but 15k split.) Note: in practice, the above is pretty accurate across engines with the exception of split travel drives, which are significantly slower in l and xl (0.52), slightly slower in other M and S engines. The base and boost speeds are pretty exact. TODO: maybe per-size multipliers, to recreate vanilla. Note: large/xl has no combat engine, to avoid using combat as center point. Medium arg mk1, thrust / boost / travel / boost duration, (as % vs allrnd): alr: 1002 / 8 / 9 / 7 ( 1 / 1 / 1 / 1 ) cbt: 1052 / 8 / 8 / 10 ( 1.05 / 1.05 / 0.933 / 1.43 ) trv: 1002 / 6 / 12 / 4 ( 1 / 0.75 / 1.33 / 0.57 ) ''' database = Database() engine_macros = database.Get_Macros('engine_*') + database.Get_Macros('generic_engine_*') # Match up names of properties to engine get/set methods. property_gets = { 'thrust' : lambda e: getattr(e, 'Get_Forward_Thrust')(), 'boost' : lambda e: getattr(e, 'Get_Boost_Thrust' )(), 'travel' : lambda e: getattr(e, 'Get_Travel_Thrust' )(), 'boost_time' : lambda e: getattr(e, 'Get_Boost_Time' )(), } property_sets = { # Forward thrust changes will also rescale reverse thrust. 'thrust' : lambda e,v: getattr(e, 'Set_Forward_Thrust_And_Rescale')(v), 'boost' : lambda e,v: getattr(e, 'Set_Boost_Thrust' )(v), 'travel' : lambda e,v: getattr(e, 'Set_Travel_Thrust' )(v), 'boost_time' : lambda e,v: getattr(e, 'Set_Boost_Time' )(v), } # Classify the engines in a heavily nested dict, list at the bottom. # Only expect one entry at the bottom list, but mods might add more. # First versions is designed for race balancing. size_purpose_mark_race_engines = defaultdict( lambda: defaultdict( lambda: defaultdict( lambda: defaultdict(list)))) # Second version is designed for purpose balancing. size_race_mark_purpose_engines = defaultdict( lambda: defaultdict( lambda: defaultdict( lambda: defaultdict(list)))) for macro in engine_macros: mk = macro.Get_mk() race = macro.Get_makerrace() purpose = macro.Get_Purpose() size = macro.Get_Size() # Skip if any entry missing. if not size or not purpose or not race or not mk: continue # Store it. size_purpose_mark_race_engines[size][purpose][mk][race].append(macro) size_race_mark_purpose_engines[size][race][mk][purpose].append(macro) # Record originals for debug. orig_engine_speeds = defaultdict(dict) # Reuse this code between race and purpose rebalance. for mode, group_speed_mults, nested_engine_dict in [ ('race' , race_speed_mults , size_purpose_mark_race_engines), ('purpose', purpose_speed_mults, size_race_mark_purpose_engines) ]: # Skip if no adjustments specified. if group_speed_mults == None: continue # Work through each group that will be balanced. for size, subdict in nested_engine_dict.items(): for unused, subsubdict in subdict.items(): # There is an issue with mk4 engines: there are no references # in either case: argon or allround. # So, break this into two stages. # Stage 1: search through marks until finding a ref (probably # mk1), determine scaling multipliers per race/purpose and # engine property. # Stage 2: loop again, applying the above multipliers. # Gather multipliers. group_prop_mults = defaultdict(dict) for mk, group_engines in sorted(subsubdict.items()): # Pick out the argon/allround engine (or sample the first). base_engines = group_engines['argon' if mode=='race' else 'allround'] # Skip if not found. if not base_engines: continue base_engine = base_engines[0] # Pick out base properties. base_values = {} for prop, get in property_gets.items(): base_values[prop] = get(base_engine) # Check the other group engines. for group, speed_mults in group_speed_mults.items(): if not group_engines[group]: continue # Sample the first engine. engine = group_engines[group][0] # Go through properties. for prop, get in property_gets.items(): # Skip properties without scalings, or just at 1x. if prop in speed_mults and speed_mults[prop] != 1: # Ratio is between actual value, and wanted value. value = get(engine) wanted = base_values[prop] * speed_mults[prop] # Note: value/wanted may be 0, for travel # drives after the Remove_Engine_Travel_Bonus # transform. Specially handle that case. if value == 0: mult = 0 else: mult = wanted / value group_prop_mults[group][prop] = mult # Apply the multipliers. for mk, group_engines in subsubdict.items(): for group, engines in group_engines.items(): prop_mults = group_prop_mults[group] for engine in engines: # Go through properties. for prop, get in property_gets.items(): # Skip if no mult was found. if prop not in prop_mults: continue orig = get(engine) # Record the unmodified for debug. # Only do this when first seen. if prop not in orig_engine_speeds[engine]: orig_engine_speeds[engine][prop] = orig mult = prop_mults[prop] new = orig * mult # Write it back. property_sets[prop](engine, new) # Printout results. lines = ['\nRebalance_Engines:'] for size, subdict in sorted(size_purpose_mark_race_engines.items()): for purpose, subsubdict in sorted(subdict.items()): for mk, race_engines in sorted(subsubdict.items()): # Sort engines by name. name_engine_dict = {} for engines in race_engines.values(): for engine in engines: if engine in orig_engine_speeds: name_engine_dict[engine.Get_Game_Name()] = engine for name, engine in sorted(name_engine_dict.items()): line = f'{name:<30} : ' not_first = False for prop, get in property_gets.items(): # Note: original is not recorded if this value # was not changed. orig = orig_engine_speeds[engine].get(prop) new = get(engine) if orig == None: # TODO: maybe indicate equivelence below. orig = new line += '{}{:<6}: {:6.0f} -> {:6.0f}'.format( ' , ' if not_first else '', prop, orig, new, ) not_first = True lines.append(line) Plugin_Log.Print('\n'.join(lines) + '\n') # Load all ships, categorize by race and size, check the engine # travel speed change (original to new), and rescale the ship cargo # for mine/trade to balance it somewhat. # TODO: maybe scrap this in favor of a ship transport rate balancer # that can run after this and similar transforms. if adjust_cargo: ship_macros = database.Get_Ship_Macros() # Ships don't spend all of their time in travel mode. # As a quick estimate, assume traders spend 50%, miners 10%. purpose_travel_ratio_dict = {'mine' : 0.1, 'trade' : 0.5} # Prep rules to send to Adjust_Ship_Cargo_Capacity. cargo_scaling_rules = [] for ship in ship_macros: purpose = ship.Get_Primary_Purpose() if purpose not in purpose_travel_ratio_dict: continue # Look up the engine speeds. engine = ship.Select_Engine(engine_macros = engine_macros) # If no engine was found skip; something weird happened. if not engine: continue # If this engine wasn't modified, skip. if engine not in orig_engine_speeds: continue # Add the base and bonus thrusts. orig = orig_engine_speeds[engine]['thrust'] + orig_engine_speeds[engine]['travel'] new = engine.Get_Forward_Thrust() + engine.Get_Travel_Thrust() speed_ratio = new / orig # Calc the esimated overall trip time ratio. # <1 means the ship takes less time, and should hold less cargo. ratio = purpose_travel_ratio_dict[purpose] trip_time_ratio = (1 - ratio) + ratio * orig / new cargo_scaling_rules.append({ 'match_all' : [f'name {ship.name}'], 'multiplier': trip_time_ratio, }) Adjust_Ship_Cargo_Capacity(*cargo_scaling_rules) database.Update_XML() return
def Scale_Sector(galaxy, sector, scaling_factor, debug, precision_steps): ''' Scale a single sector to roughly match the scaling factor. ''' # Basic idea: # - Collect all objects associated with the sector that will be moved, # eg. zones, resource patches, highways, etc. # - Pack these into a standardize class object that includes a sector # position. # - Establish rules on how close objects of various types can be # to each other. # - Shift objects progressively together in a series of small steps. # - When objects reach a min distance to each other, merge them into # a grouped object that can itself be moved further. # - When done, unpeel the grouped objects to determine their final # positions, then push those positions back to the original objects # in some way (eg. cluster-level objects will need to know their # final cluster-level position). # Attach the scaling factor to the sector. Used in sector Size() # calculations to select a minimum. sector.Set_Scaling_Factor(scaling_factor) # Collect objects. objects = [] # Collect connections in the sector (mostly zones). for name, conn in sector.conns.items(): # Zone highways are a little tricky. # The entry/exit points are associated with specific zones, and will # be moved naturally later (when zone connection offsets are # back-applied to the entry/exit). However, the spline positions # are not otherwise represented. # The approach here will treat splines as their own objects to be # moved around. if isinstance(conn.macro, Zone_Highway): for i, spline_pos in enumerate(conn.macro.Get_Splines_With_Dummies()): # The spline position is relative to the zone center # in the sector. sector_pos = conn.position + spline_pos objects.append( Object( sector_pos = sector_pos, name = conn.name + f'_spline[{i}]', connection = conn, spline_pos = spline_pos, type = type(conn.macro), )) else: # If this is a zone, adjust its sector position to re-center it # to be in the middle of the zone objects. if isinstance(conn.macro, Zone): # Eg. if zone is at x=5 in sector, but internally objects # are skewed x=6 further, then this will return x=11 in sector. sector_pos = conn.position + conn.macro.Get_Center() else: sector_pos = conn.position objects.append( Object( sector_pos = sector_pos, name = conn.name, connection = conn, type = type(conn.macro), )) # Look up the position of this sector in its cluster. assert len(sector.parent_conns) == 1 sector_in_cluster_pos = sector.parent_conns[0].position # Collect cluster objects that align with this sector. for macro in sector.cluster_connected_macros: # Explicitly reject audio regions, else they end up clustering # with everything and preventing movement. Also, they have no # physical objects to care about. # TODO: are there any audio regions that aren't simple spheres? if 'audio' in macro.region.name: continue # These cases have only a cluster position normally, and need # a sector position generated. cluster_pos = macro.parent_conns[0].position # If the cluster object is at x=6, and sector is at x=7, then # the cluster object is at x=-1 in sector. sector_pos = cluster_pos - sector_in_cluster_pos # If this has splines, split those out into separate objects, # else pack the thing as-is. These include dummies, to help # with locations along the path. splinepositions = macro.region.Get_Splines_With_Dummies() if splinepositions: for i, spline_pos in enumerate(splinepositions): # The spline position is relative to the region center # in the sector. spline_sector_pos = sector_pos + spline_pos # As well as relative to the cluster position. spline_cluster_pos = cluster_pos + spline_pos objects.append( Object( cluster_pos = spline_cluster_pos, sector_pos = spline_sector_pos, name = macro.name + f'_spline[{i}]', connection = macro.parent_conns[0], spline_pos = spline_pos, type = type(macro), )) else: objects.append( Object( cluster_pos = cluster_pos, sector_pos = sector_pos, name = macro.name, connection = macro.parent_conns[0], type = type(macro), )) # Collect md objects. for md_object in sector.md_objects: objects.append( Object( sector_pos = md_object.position, name = md_object.name, md_object = md_object, type = type(md_object), )) # Collect god objects. for god_object in sector.god_objects: # Handle based on if this has a position or not. if god_object.position: objects.append( Object( sector_pos = god_object.position, name = god_object.name, god_object = god_object, type = type(god_object), )) else: # It is randomized; scale its rand range. god_object.Scale(scaling_factor) # Calculate the sector center (may be offset from 0). sector_center = sector.Get_Center() # Start by adjusting all objects to be centered around 0. # -Removed for now; unsure if this would have any benefit, but it # has high risk of detriment, and makes it a little harder to # debug movements (comparing start/end), and it may lead to more # skew in highways that are otherwise often well centered. #for object in objects: # # Eg. if object is at x=10, center is x=4, then new object pos is x=6. # object.sector_pos -= sector_center # Determine the sector size limit, based on gate distances. # This is used to prevent gates from getting too close, if they # weren't already closer. sector_size = sector.Get_Size() target_sector_size = sector_size * scaling_factor # Debug printout. if debug: lines = [ '', 'Sector : {}'.format(sector.name), ' center : {}'.format(sector_center), ' size : {}'.format(sector.Get_Size()), ' target : {}'.format(target_sector_size), ' initial objects (centered): ', ] for object in objects: lines.append(' '+str(object)) Plugin_Log.Print('\n'.join(lines)) # Put all objects into groups, starting with one per group. # Do this after sector center adjustment, to avoid the group # sector_pos being off. object_groups = [Object_Group([x]) for x in objects] # TODO: any special case forced groupings. # - Spline endpoints with their highway entry/exit zones (maybe handled by radius). # - Hazard regions with objects inside them. # - Superhighway entry/exit pairs (probably handled by radius). # TODO: more detailed algorithm that deals with damage regions better. # Do a series of progressive steppings toward the scaling_factor. # When reducing size, assuming further object is 400 km out, and # the scaling is 0.25, it will move 300 km, so steppings of 1/300 # would move only 1 km per step in this case. # Alternatively, can to a series of multipliers, whose total reaches # the scaling_factor. # step_scale ^ num_steps = scaling # step_scale = scaling ^ (1 / num_steps) # Note: 100 steps is a little slow to run in debug mode; try something # smaller during testing. step_scaling = scaling_factor ** (1 / precision_steps) for step in range(precision_steps): # Start by looking for groups that can/should be merged (since this # may occur on the first iteration for objects that are already # at or below the min allowed distance). if debug: Plugin_Log.Print('Starting step {} of {}'.format(step+1, precision_steps)) # Each loop may do a merge of two groups, but to allow chain merging, # the loops will keep checking until no changes occur. groups_to_check = [x for x in object_groups] while groups_to_check: this_group = groups_to_check.pop(0) # Check against all existing groups. for other_group in object_groups: # Skip self. if this_group is other_group: continue # TODO: tweak merging rules when expanding the sector, # since only highway splines need to be kept together. # Are they close enough that they should merge? # TODO: force merging of region and object on first pass # if they are at the same position. if this_group.Should_Merge_With(other_group, target_sector_size, step_scaling): # Prune both original groups out. object_groups.remove(this_group) object_groups.remove(other_group) if other_group in groups_to_check: groups_to_check.remove(other_group) # Add in the merged group. new_group = Object_Group( objects = this_group.objects + other_group.objects) object_groups.append(new_group) # Set the new_group to be checked. groups_to_check.append(new_group) if debug: lines = ['', 'merging: '] for object in this_group.objects: lines.append(' '+str(object)) lines.append('with:') for object in other_group.objects: lines.append(' '+str(object)) lines.append('center: {}'.format(new_group.sector_pos)) lines.append('') Plugin_Log.Print('\n'.join(lines)) break # Increment everything to be closer (apply change). for group in object_groups: group.Scale_Pos(step_scaling) if debug: lines = [ '', ' final objects (before decentering/recentering): ', ] for object in objects: lines.append(' {}'.format(object)) Plugin_Log.Print('\n'.join(lines)) # De-center the objects. for object in objects: # Put the sector center offset back, if not keeping a global recenter. # -Removed; the initial offset was commented out above. #if not galaxy.recenter_sectors: # object.sector_pos += sector_center # Put the zone offset back. if object.connection and isinstance(object.connection.macro, Zone): object.sector_pos -= object.connection.macro.Get_Center() # If requested, center objects around 0ish. # This may help other scripts that do not account for coreposition to # avoid putting things too far out. if galaxy.recenter_sectors: for object in objects: # Eg. if object is at x=10, center is x=4, then new object pos is x=6. object.sector_pos -= sector_center # Since god has trouble placing zones in tightly packed sectors, # when scaling down add some new manual zones to help out. # Note: can do this earlier, but it is probably faster to add these # post-scaling to avoid them getting position checked a bunch of # time in the above loop. # Do this before the following code that pushes object positions # back to their connections. Create_Zones(galaxy, sector, objects, scaling_factor) # Push the new sector positions back to the original zones/etc. for object in objects: # Adjust cluster-level objects. if object.cluster_pos: # Get the offset. offset = object.sector_pos - object.orig_sector_pos # This could be the base object (conection in cluster), or # a spline of it. if object.spline_pos: # Apply the offset to the spline_pos. object.spline_pos.Update(object.spline_pos + offset) else: # Apply the offset to the cluster position. object.connection.position += offset # Adjust highway splines back to their offset position. elif object.spline_pos: # The connection is the highway center point. # Subtract it off to get the spline offset. object.spline_pos.Update(object.sector_pos - object.connection.position) # Record md object movements. elif object.md_object: if object.md_object.position is not object.sector_pos: object.md_object.position.Update(object.sector_pos) # Record god object movements. elif object.god_object: if object.god_object.position is not object.sector_pos: object.god_object.position.Update(object.sector_pos) # Everything else should be sector-level connections. elif object.connection and object.connection.parent is sector: object.connection.position.Update(object.sector_pos) # Shouldn't be here. else: raise Exception() if debug: lines = [ '', ' final objects: ', ] for object in objects: lines.append(' {}'.format(object)) Plugin_Log.Print('\n'.join(lines)) final_size = sector.Get_Size() lines = [ '', 'final size: {:.0f}'.format(final_size), 'reduction : {:.0f}%'.format((1 - final_size / sector_size)*100), ] Plugin_Log.Print('\n'.join(lines)) return
def Adjust_Ship_Cargo_Capacity(*scaling_rules): ''' Adjusts the cargo capacities of matching ships. If multiple ships use the same storage macro, it is modified by an average of the ship multipliers. Args are one or more dictionaries with these fields, where matching rules are applied in order, with a ship being grouped by the first rule it matches: * multiplier - Float, how much to multiply current cargo capacity by. * match_any - List of matching rules. Any ship matching any of these is included, if not part of match_none. * match_all - List of matching rules. Any ship matching all of these is included, if not part of match_none. * match_none - List of matching rules. Any ship matching any of these is excluded. * cargo_tag - Optional, tag name of cargo types to modify. - Expected to be one of: 'solid', 'liquid', 'container'. - If not given, all cargo types are modified. * skip - Optional, bool, if True then this group is not edited. - Can be used in early matching rules to remove ships from all later matching rules. Example: ``` Adjust_Ship_Cargo_Capacity( {'match_all' : ['purpose mine'], 'multiplier' : 2,}, {'match_all' : ['purpose trade'], 'multiplier' : 1.5}, ) ``` ''' ''' This one is tricky, since cargo is part of a separate storage macro. This macro needs to be looked up for each ship, along with the wanted multiplier. Since ships can share a storage macro, first pass will find multipliers associated with each macro, second pass will resolve conflicts and make changes. ''' # Polish the scaling rules with defaults. Fill_Defaults(scaling_rules, { 'multiplier': 1, 'cargo_tag': None, }) # Load the ships. database = Database() ship_macros = database.Get_Ship_Macros() # Group the ships according to rules. Group_Objects_To_Rules(ship_macros, scaling_rules, Is_Match) # Ships in different rules might use the same storage; average # then together. storage_macro_mults = defaultdict(list) # Loop over the rule/groups. for rule in scaling_rules: if rule['skip'] or rule['multiplier'] == 1: continue ship_macros = rule['matches'] multiplier = rule['multiplier'] cargo_tag = rule['cargo_tag'] # Pass over them, collecting storage units. for ship in ship_macros: for storage in ship.Get_Storage_Macros(): # Skip if not of the right type. if cargo_tag and cargo_tag not in storage.Get_Tags(): continue # Record the multiplier. storage_macro_mults[storage].append(multiplier) lines = ['Storage adjustments:'] # Rescale them all. for storage, mults in storage_macro_mults.items(): multiplier = sum(mults) / len(mults) volume = storage.Get_Volume() new_volume = int(volume * multiplier) # Round a bit to look nicer in game. if new_volume > 10000: new_volume = round(new_volume / 100) * 100 else: new_volume = round(new_volume / 10) * 10 storage.Set_Volume(new_volume) lines.append(f' {storage.name:<45} : {volume:<6} -> {new_volume:<6}') if len(lines) > 1: Plugin_Log.Print('\n'.join(lines[0:1] + sorted(lines[1:])) + '\n') # Apply the xml changes. database.Update_XML() return
def Rescale_Ship_Speeds(*scaling_rules): ''' Rescales the speeds of different ship classes, centering on the give target average speeds. Ships are assumed to be using their fastest race engines. Averaged across all ships of the rule match. Cargo capacity of traders and miners is adjusted to compensate for speed changes, so they move a similar amount of wares. If multiple ships use the same cargo macro, it is adjusted by an average of their speed adjustments. Args are one or more dictionaries with these fields, where matching rules are applied in order, with a ship being grouped by the first rule it matches: * average - Float, the new average speed to adjust to. - If None, keeps the original average. * variation - Float, less than 1, how much ship speeds are allowed to differ from the average relative to the average. - If None, keeps the original variation. - If original variation is less than this, it will not be changed. - Only applies strictly to 90% of ships; up to 10% are treated as outliers, and will have their speed scaled similarly but will be outside this band. - Eg. 0.5 means 90% of ships will be within +/- 50% of their group average speed. * match_any - List of matching rules. Any ship matching any of these is included, if not part of match_none. * match_all - List of matching rules. Any ship matching all of these is included, if not part of match_none. * match_none - List of matching rules. Any ship matching any of these is excluded. * use_arg_engine - Bool, if True then Argon engines will be assumed for all ships instead of their faction engines. * use_split_engine - Bool, if True then Split engines will be assumed. - This will tend to give high estimates for ship speeds, eg. mk4 engines. * skip - Optional, bool, if True then this group is not edited. - Can be used in early matching rules to remove ships from all later matching rules. Example: ``` Rescale_Ship_Speeds( {'match_any' : ['name ship_spl_xl_battleship_01_a_macro'], 'skip' : True}, {'match_all' : ['type scout' ], 'average' : 500, 'variation' : 0.2}, {'match_all' : ['class ship_s'], 'average' : 400, 'variation' : 0.5}, {'match_all' : ['class ship_m'], 'average' : 300, 'variation' : 0.5}, {'match_all' : ['class ship_l'], 'average' : 200, 'variation' : 0.5}, {'match_all' : ['class ship_xl'], 'average' : 150, 'variation' : 0.5}) ``` ''' ''' Note: Vanilla ship speeds have these approximate averages (race engines): xs: 130 (58 to 152) s : 328 (71 to 612) m : 319 (75 to 998) l : 146 (46 to 417) xl: 102 (55 to 164) Split ships exaggerate the high end, eg. alligator(gas) hitting 998. The xs includes boarding pods (29 speed), drones, mass traffic. TODO: option to also bring down variance? Important to keep the ships in tighter bands by class. ''' # Polish the scaling rules with defaults. Fill_Defaults( scaling_rules, { 'average': None, 'variation': None, 'use_arg_engine': False, 'use_split_engine': False, }) database = Database() ship_macros = database.Get_Ship_Macros() engine_macros = database.Get_Macros('engine_*') + database.Get_Macros( 'generic_engine_*') # Remove mk4 engines, since they throw things off a bit. engine_macros = [x for x in engine_macros if x.Get_mk() != '4'] # Group the ships according to rules. Group_Objects_To_Rules(ship_macros, scaling_rules, Is_Match) # Gather speed mult factors for all ships, to be used later to adjust cargo. ship_mults = {} # Loop over the rule/groups. for rule in scaling_rules: if rule['skip']: continue # Unpack the fields for convenience. ship_macros = rule['matches'] average = rule['average'] variation = rule['variation'] use_arg_engine = rule['use_arg_engine'] use_split_engine = rule['use_split_engine'] # If neither average or variation given, do nothing. if average == None and variation == None: return # If there were no matches, skip. if not ship_macros: continue # Select an engine for every ship, to use in speed estimation. for ship in ship_macros: ship.Select_Engine( engine_macros=engine_macros, owner='argon' if use_arg_engine else 'split' if use_split_engine else None, ) # TODO: maybe filter out 0 speed ships here, instead of below. # Collect ship speeds into a dict. ship_orig_speeds = {} for ship in ship_macros: ship_orig_speeds[ship] = ship.Get_Speed() orig_speeds = [x for x in ship_orig_speeds.values() if x > 0] orig_avg = sum(orig_speeds) / len(orig_speeds) # Figure out adjustment based on original and wanted averages. if average != None: # Collect speeds of the ships. Ignore 0s for later stats. # Reuse stored values above. ratio = average / orig_avg # Apply this ratio to all individual ships. for ship in ship_macros: ship.Adjust_Speed(ratio) # Variation adjustment will be per-ship. if variation != None: # Gather the speeds of each ship (maybe with above average change). ship_speed_dict = {} for ship in ship_macros: ship_speed_dict[ship] = ship.Get_Speed() # Compute current average, again tossing 0s. speeds = [x for x in ship_speed_dict.values() if x > 0] current_avg = sum(speeds) / len(speeds) # Gather the differences in ship speeds from average, per ship, # ignoring those with 0 speed. ship_delta_dict = {} for ship in ship_macros: if ship_speed_dict[ship] != 0: ship_delta_dict[ship] = abs(ship_speed_dict[ship] - current_avg) # Sort the ships by deltas. sorted_ships = [ k for k, v in sorted(ship_delta_dict.items(), key=lambda x: x[1]) ] # Get the ship count that covers 90% of the ships. # Note: this may include all ships for small ship counts. ship_count_90p = math.ceil(len(sorted_ships) * 0.9) # From the above, can pick the ship at the 90% cuttoff. # Eg. if 10 ships cover 90%, then the 10th is the cuttoff (index 9). cuttoff_ship = sorted_ships[ship_count_90p - 1] cuttoff_delta = ship_delta_dict[cuttoff_ship] # If delta is 0, this indicates there is just one ship in the # group, or all ships up to 90% have the same speed as the average. # Either way, only continue adjustments if there is a non-0 delta. if cuttoff_delta > 0: # Knowing this original delta, can determine the rescaling of deltas # that is needed to match the wanted variation. ratio = (variation * current_avg) / cuttoff_delta # Only continue if tightening the range. if ratio < 1: # For each ship, rescale its delta, and translate back into a # new ship speed. for ship, orig_delta in ship_delta_dict.items(): orig_speed = ship_speed_dict[ship] new_delta = orig_delta * ratio # Handle based on if this was faster or slower than average. if orig_speed > current_avg: new_speed = current_avg + new_delta else: # TODO: safety clamp to something reasonable, if the # variation was set too high. new_speed = current_avg - new_delta if new_speed <= 0: raise Exception( 'Variation set too large; ship speed went negative' ) # Apply back this speed. speed_ratio = new_speed / orig_speed ship.Adjust_Speed(speed_ratio) # Report changes. # List pairing ship macro to display name. # Note: names can be reused. ship_name_macro_list = [(x, x.Get_Game_Name()) for x in ship_macros] lines = ['\nRescale_Ship_Speeds:'] new_speeds = [] # Loop over sorted names. for ship, game_name in sorted(ship_name_macro_list, key=lambda x: x[1]): orig_speed = ship_orig_speeds[ship] new_speed = ship.Get_Speed() if new_speed > 0: new_speeds.append(new_speed) # Record the multiplier. # If a ship is in multiple groups, multipliers will stack. # (Note: currently don't expect ships to be in multiple groups.) if orig_speed > 0: if ship not in ship_mults: ship_mults[ship] = 1 ship_mults[ship] *= new_speed / orig_speed lines.append( ' {:<65}: {:>3.0f} -> {:>3.0f} ( {:>2.1f}% ) (using {})'. format( f'{game_name} ({ship.name})', orig_speed, new_speed, new_speed / orig_speed * 100 if orig_speed > 0 else 0, ship.engine_macro.Get_Game_Name(), )) # Also give some overall stats. new_average = sum(new_speeds) / len(new_speeds) lines.append( f' Orig average: {orig_avg:.0f} ({min(orig_speeds):.0f} to {max(orig_speeds):.0f})' ) lines.append( f' New average: {new_average:.0f} ({min(new_speeds):.0f} to {max(new_speeds):.0f})' ) Plugin_Log.Print('\n'.join(lines) + '\n') # Adjust ship storage as well. storage_macro_mults = defaultdict(list) for ship, speed_mult in ship_mults.items(): # If a trade or mining ship, adjust cargo. purpose = ship.Get_Primary_Purpose() if purpose in ['mine', 'trade']: # Full adjustment to traders. if purpose == 'trade': cargo_mult = (1 / speed_mult) tags = ['container'] # Reduced adjustment to miners, since they spend more time # collecting rocks or waiting on drones. # TODO: maybe just full adjustment as well. if purpose == 'mine': cargo_mult = (1 / speed_mult) cargo_mult = 1 + (cargo_mult - 1) * 0.75 tags = ['solid', 'liquid'] for storage in ship.Get_Storage_Macros(): # Filter unwanted storage types (not expected to catch anything # in vanilla). storage_tags = storage.Get_Tags() if not any(x in storage_tags for x in tags): continue # Record the multiplier. storage_macro_mults[storage].append(cargo_mult) # Adjust cargo bays by average mult. # Report adjustments. # TODO: reuse Adjust_Ship_Cargo_Capacity somehow. # TODO: maybe scrap this in favor of a ship transport rate balancer # that can run after this and similar transforms. lines = ['Storage adjustments:'] for storage, mults in storage_macro_mults.items(): multiplier = sum(mults) / len(mults) volume = storage.Get_Volume() new_volume = int(volume * multiplier) # Round a bit to look nicer in game. if new_volume > 10000: new_volume = round(new_volume / 100) * 100 else: new_volume = round(new_volume / 10) * 10 storage.Set_Volume(new_volume) lines.append(f' {storage.name:<45} : {volume:<6} -> {new_volume:<6}') if len(lines) > 1: Plugin_Log.Print('\n'.join(lines[0:1] + sorted(lines[1:])) + '\n') # Apply the xml changes. database.Update_XML() return
def Print_Ship_Speeds( use_arg_engine=False, use_split_engine=False, ): ''' Prints out speeds of various ships, under given engine assumptions, to the plugin log. * use_arg_engine - Bool, if True then Argon engines will be assumed for all ships instead of their faction engines. * use_split_engine - Bool, if True then Split engines will be assumed. - This will tend to give high estimates for ship speeds, eg. mk4 engines. ''' ''' Note: Vanilla ship speeds have these approximate averages (race engines): xs: 130 (58 to 152) s : 328 (71 to 612) m : 319 (75 to 998) l : 146 (46 to 417) xl: 102 (55 to 164) Split ships exaggerate the high end, eg. alligator(gas) hitting 998. The xs includes boarding pods (29 speed), drones, mass traffic. ''' database = Database(read_only=True) ship_macros = database.Get_Macros('ship_*') + database.Get_Macros( 'units_*') engine_macros = database.Get_Macros('engine_*') + database.Get_Macros( 'generic_engine_*') # Select an engine for every ship, to use in speed estimation. for ship in ship_macros: ship.Select_Engine( engine_macros=engine_macros, owner='argon' if use_arg_engine else 'split' if use_split_engine else None, ) # Organize ships by class. lines = ['Ship speeds: '] for ship_class in ['ship_xs', 'ship_s', 'ship_m', 'ship_l', 'ship_xl']: lines.append(f'\n {ship_class}:') this_dict = {} # Fill in speeds. speeds = [] for macro in ship_macros: if macro.class_name == ship_class: speed = macro.Get_Speed() this_dict[macro.Get_Game_Name()] = macro # Ignore mismatches and unfinished ships for stats. # Ignore the python. if speed > 5 and macro.name != 'ship_spl_xl_battleship_01_a_macro': speeds.append(speed) # Sort by name. lines += [ f' {k:<35}:{v.Get_Speed():>4.0f} ({v.name}, {v.engine_macro.name})' for k, v in sorted(this_dict.items()) ] # Average of the group. lines.append('\n Average: {:.0f} ({:.0f} to {:.0f})'.format( sum(speeds) / len(speeds), min(speeds), max(speeds), )) Plugin_Log.Print('\n'.join(lines)) return
def Apply_Live_Editor_Patches(file_name=None): ''' This will apply all patches created by hand through the live editor in the GUI. This should be called no more than once per script, and currently should be called before any other transforms which might read the edited values. Pending support for running some transforms prior to hand edits. * file_name - Optional, alternate name of a json file holding the Live_Editor generated patches file. - Default uses the name in Settings. ''' # Make sure the live editor is up to date with patches. # TODO: think about how safe this is, or if it could overwrite # meaningful existing state. Live_Editor.Load_Patches(file_name) # TODO: fork the xml game files at this point, keeping a copy # of the pre-patch state, so that live editor pages loaded # after this point and properly display the xml version from # before the hand edits and later transforms. # This may need to be done wherever pre-edit transform testing # is handled. # Work through the patches. # To do a cleaner job loading/saving game files, categorize # the patches by virtual_path first. path_patches_dict = defaultdict(list) for patch in Live_Editor.Get_Patches(): path_patches_dict[patch.virtual_path].append(patch) for virtual_path, patch_list in path_patches_dict.items(): # Note: if patches get out of date, they may end up failing at # any of these steps. # Load the file. game_file = Load_File(virtual_path) if game_file == None: Plugin_Log.Print(('Warning: Apply_Live_Editor_Patches could' ' not find file "{}"').format(virtual_path)) continue # Modify it in one pass. root = game_file.Get_Root() for patch in patch_list: # Look up the edited node; assume just one xpath match. nodes = root.xpath(patch.xpath) if not nodes: Plugin_Log.Print(('Warning: Apply_Live_Editor_Patches could' ' not find node "{}" in file "{}"').format( patch.xpath, virtual_path)) continue node = nodes[0] # Either update or remove the attribute. # Assume it is safe to delete if the value is an empty string. if patch.value == '': if patch.attribute in node.keys(): node.attrib.pop(patch.attribute) else: node.set(patch.attribute, patch.value) # Put changes back. # TODO: maybe delay this until all patches get applied, putting # back before returning. game_file.Update_Root(root) return