def initialise(self): self.dataCache = findInstalledMods(self.asset_path) self.modNameToIds = dict((data['name'].lower(), data['id']) for data in self.dataCache.values()) for modid in get_global_config().official_mods.ids(): name = get_global_config().official_mods.tag_from_id(modid) self.dataCache[modid] = dict(id=modid, name=name, official=True) self.modNameToIds[name.lower()] = modid return self
def get_separate_mods() -> Iterable[str]: '''A list of mods that should be extracted into separate files.''' config = get_global_config() extract_mods = sorted(set(config.extract_mods if config. extract_mods is not None else config.mods), key=_mod_sorter) return extract_mods
def update_manifest(config=get_global_config()): '''Update manifest file in the exports directory.''' outdir = config.settings.PublishDir manifest = outdir / '_manifest.json' logger.info('Updating manifest file') generate_manifest(outdir, manifest, ignores=['_manifest.json'])
def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser("automate", description=DESCRIPTION, epilog=EPILOG) exclusive = parser.add_mutually_exclusive_group() exclusive.add_argument('--live', action='store_true', help='enable live mode [requires git identity]') parser.add_argument('--remove-cache', action='store_true', help='remove the (dev only) asset tree cache') parser.add_argument('--skip-install', action='store_true', help='skip install/update of game and mods') parser.add_argument('--skip-extract', action='store_true', help='skip extracting all data completely') parser.add_argument( '--skip-commit', action='store_true', help='skip git commit of the output repo (use dry-run mode)') parser.add_argument('--skip-pull', action='store_true', help='skip git pull or reset of the output repo') parser.add_argument('--skip-push', action='store_true', help='skip git push of the output repo') parser.add_argument('--notify', action='store_true', help='enable sending error notifications') parser.add_argument('--list-stages', action='store_true', help='display extraction stage options and exit') parser.add_argument('--maps', action='store', type=maplist, help='override which maps to export (comma-separated)') parser.add_argument('--mods', action=VerifyModsAction, mods=get_global_config().mods, help='override which mods to export (comma-separated)') parser.add_argument( 'sections', action=VerifySectionsAction, default=parse_runlist('all'), roots=ROOT_TYPES, metavar='SECTIONS', nargs='?', help= 'override extraction sections to be run (format: `all,-root1,-root2.stage1`, default: `all`)' ) return parser
def _generate_hierarchy(loader: AssetLoader): config = get_global_config() core_excludes = set(['/Game/Mods/.*', *config.optimisation.SearchIgnore]) mod_excludes = set(config.optimisation.SearchIgnore) # Always load the internal hierarchy ue.hierarchy.tree.clear() ue.hierarchy.load_internal_hierarchy(Path('config') / 'hierarchy.yaml') # Scan /Game, excluding /Game/Mods and any excludes from config ue.hierarchy.explore_path('/Game', loader, core_excludes, disable_debug=True) # Scan /Game/Mods/<modid> for each of the 'core' (build-in) mods for modid in get_official_mods(): ue.hierarchy.explore_path(f'/Game/Mods/{modid}/', loader, mod_excludes, disable_debug=True) # Scan /Game/Mods/<modid> for each installed mod for modid in get_managed_mods(): ue.hierarchy.explore_path(f'/Game/Mods/{modid}/', loader, mod_excludes, disable_debug=True) return ue.hierarchy.tree
def discover_vanilla_species(self) -> Iterator[str]: config = get_global_config() official_modids = set(config.official_mods.ids()) official_modids -= set(config.settings.SeparateOfficialMods) official_mod_prefixes = tuple(f'/Game/Mods/{modid}/' for modid in official_modids) for cls_name in ue.hierarchy.find_sub_classes(CHR_CLS): assetname = cls_name[:cls_name.rfind('.')] # Skip anything in the mods directory that isn't one of the listed official mods if assetname.startswith('/Game/Mods') and not any( assetname.startswith(prefix) for prefix in official_mod_prefixes): continue # Do a full check that this is a species asset if not is_species(cls_name, self.loader, skip_character_check=True): continue modid = self.loader.get_mod_id(assetname) or '' if get_overrides_for_species(assetname, modid).skip_export: continue yield assetname
def __init__(self, loader: AssetLoader): self.loader = loader self.testByRawData = ByRawData(loader) self.testByInheriance = ByInheritance(loader) self.global_excludes = tuple( set(get_global_config().optimisation.SearchIgnore))
def automate(): lock = cache.lock(PURLOVIA_LOCK_NAME) acquired = lock.acquire(blocking=True, timeout=1) if acquired: try: # make sure config is loaded at runtime # pylint: disable=import-outside-toplevel from automate.run import run import ue.context ue.context.disable_metadata() config = get_global_config() logger.info("DEV mode enabled") config.git.UseIdentity = False config.git.SkipCommit = True config.git.SkipPush = True config.errors.SendNotifications = False config.dev.DevMode = True if DO_SIMPLE_RUN: config.run_sections = {"asb.species": True} run(config) finally: lock.release() else: raise AlreadyRunning()
def get_equipment_output(self): global_config = config.get_global_config() equipment_output = global_config['EQUIPMENT_OUTPUT'] initial_warehouse_size = global_config['INITIAL_WAREHOUSE_SIZE'] dirname = os.path.dirname(__file__) path = os.path.join(dirname, '../../solution/' + equipment_output) print('\nGenerating equipment csv output...') with open(path, 'w', newline='') as csvfile: writer = csv.writer(csvfile) # Write final truck list for truck in self.truck_list: writer.writerow([ truck.truck_number, truck.truck_size, truck.purchase_date, truck.truck_size ]) # Write final warehouse list, sort the list with name first self.path.sort(key=lambda item: int(item.warehouse_number[1:])) for warehouse in self.path: if warehouse.warehouse_number == 'D1': continue writer.writerow([ warehouse.warehouse_number, min(initial_warehouse_size, warehouse.size_limit), warehouse.purchase_date, min(initial_warehouse_size, warehouse.size_limit) ]) for i, record in enumerate(warehouse.added_warehouse_size): writer.writerow([ warehouse.warehouse_number, record, warehouse.additional_purchase_date[i], 0 ]) print('Finish {} output generation at {}'.format( equipment_output, path))
def main(): arkman = ArkSteamManager() loader = arkman.getLoader() config = get_global_config() assetname = sys.argv[1] if len(sys.argv) > 1 else None if not assetname: print('Usage: python ueexport.py <assetname>') sys.exit(1) assetname = find_asset(assetname, loader) if not assetname: print("Not found") sys.exit(1) asset = loader[assetname] assert asset.default_export if not asset.default_export: print("Asset has no default export") sys.exit(2) export = asset.default_export data = sanitise_output(export.properties) pprint(data) save_as_json(data, f'output/{asset.name}.{export.name}.json', pretty=True)
def discover_vanilla_levels(self) -> Iterator[str]: config = get_global_config() official_modids = set(config.official_mods.ids()) official_modids -= set(config.settings.SeparateOfficialMods) official_mod_prefixes = tuple(f'/Game/Mods/{modid}/' for modid in official_modids) all_cls_names = list(ue.hierarchy.find_sub_classes(WORLD_CLS)) all_cls_names += ue.hierarchy.find_sub_classes(LEVEL_SCRIPT_ACTOR_CLS) for cls_name in all_cls_names: assetname = cls_name[:cls_name.rfind('.')] # Check if this asset is meant to be skipped overrides = get_overrides_for_map(assetname, '') if overrides.skip_export: continue # Skip anything in the mods directory that isn't one of the listed official mods if assetname.startswith('/Game/Mods') and not any( assetname.startswith(prefix) for prefix in official_mod_prefixes): continue yield assetname
def setup_logger_to_logfile(logger, log_file): log_dir = config.get_global_config().log_dir mkdir_p(log_dir) log_path = os.path.join(log_dir, log_file) handler = logging.handlers.RotatingFileHandler( log_path, maxBytes = 1024 * 1024, encoding = 'utf-8') _setup_log_handler(logger, handler)
def update_current_inventory(self, timestamp): global_config = config.get_global_config() demand_growth_period = int(global_config['DEMAND_GROWTH_PERIOD']) # When both time are within same 30 days period if (self.last_loading_time / demand_growth_period) == (timestamp / demand_growth_period): current_demand = self.get_current_demand(self.last_loading_time) self.inventory = self.inventory - ((timestamp - self.last_loading_time) * current_demand) else: # If timestamp is 31.4, remaining value is 1.4 # remaining value should calculate with growth demand remaining_val = timestamp % demand_growth_period current_demand = self.get_current_demand(timestamp) self.inventory = self.inventory - (remaining_val * current_demand) current_demand = self.get_current_demand(self.last_loading_time) self.inventory = self.inventory - ((timestamp - remaining_val - self.last_loading_time) * current_demand) self.last_loading_time = timestamp # If inventory dropped below 0, program terminated if self.inventory < 0: raise RuntimeError('Warehouse {} inventory dropped below zero: {}'.format(self.warehouse_number, self.inventory)) return self.inventory
def discover_item_assets(progress=False) -> Iterator[str]: '''Discover assets that are likely to be item assets.''' name_checker = ark.discovery.ByRawData(loader) if progress: num_assets = 0 num_found = 0 # Collect ignore paths search_ignores = get_global_config( ).optimisation.SearchIgnore # type: List[str] excludes = tuple(search_ignores) # Step through all candidate asset files for assetname in loader.find_assetnames('.*', exclude=excludes): if progress: num_assets += 1 if not (num_assets % 500): print(f'Scanned {num_assets}, found {num_found}') print(assetname) # Is it likely to be an item? try: if name_checker.is_inventory_item(assetname): if progress: num_found += 1 yield assetname except (ModNotFound, AssetNotFound): pass if progress: print(f'Completed: Scanned {num_assets}, found {num_found}')
def __init__(self, arkman: ArkSteamManager, git: GitManager, config=get_global_config()): self.config: ConfigFile = config self.arkman: ArkSteamManager = arkman self.loader: AssetLoader = arkman.getLoader() self.git = git self.roots: List[ExportRoot] = []
def _generate_hierarchy(loader: AssetLoader): config = get_global_config() core_excludes = set(['/Game/Mods/.*', *config.optimisation.SearchIgnore]) mod_excludes = set(config.optimisation.SearchIgnore) # Always load the internal hierarchy ue.hierarchy.tree.clear() ue.hierarchy.load_internal_hierarchy(Path('config') / 'hierarchy.yaml') # Scan /Game, excluding /Game/Mods and any excludes from config ue.hierarchy.explore_path('/Game', loader, core_excludes, disable_debug=True) # Scan /Game/Mods/<modid> for each of the official mods, skipping ones in SeparateOfficialMods official_modids = set(config.official_mods.ids()) official_modids -= set(config.settings.SeparateOfficialMods) for modid in official_modids: ue.hierarchy.explore_path(f'/Game/Mods/{modid}/', loader, mod_excludes, disable_debug=True) # Scan /Game/Mods/<modid> for each configured mod for modid in config.mods: ue.hierarchy.explore_path(f'/Game/Mods/{modid}/', loader, mod_excludes, disable_debug=True) return ue.hierarchy.tree
def handle_exception(logfile: str, loglines=3, config: ConfigFile = get_global_config()): if not config.errors.SendNotifications: return log: List[str] = get_log_tail(logfile, loglines) exception: List[str] = format_exc() # type: ignore send_to_discord(log, exception, config.errors.MessageHeader)
def discover_vanilla_species(self) -> Iterator[str]: # Scan /Game, excluding /Game/Mods and any excludes from config for species in self.loader.find_assetnames( '.*', '/Game', exclude=('/Game/Mods/.*', *self.global_excludes)): if self._filter_species(species): yield species # Scan /Game/Mods/<modid> for each of the official mods, skipping ones in SeparateOfficialMods official_modids = set(get_global_config().official_mods.ids()) official_modids -= set( get_global_config().settings.SeparateOfficialMods) for modid in official_modids: for species in self.loader.find_assetnames( '.*', f'/Game/Mods/{modid}', exclude=self.global_excludes): if self._filter_species(species): yield species
def _export_biome_zone_volume(world: WorldData, proxy: BiomeZoneVolume): if not get_global_config( ).export_wiki.ExportBiomeData or proxy.bHidden[0].value: return data = extract_biome_zone_volume(world, proxy) if data: world.biomes.append(data)
def find_asset(assetname, loader): if not assetname: from tkinter import filedialog assetname = filedialog.askopenfilename(title='Select asset file...', filetypes=(('uasset files', '*.uasset'), ("All files", "*.*")), initialdir=loader.asset_path) assert assetname # Attempt to work around MingW hijacking /Game as a root path if assetname.startswith('//'): assetname = assetname[1:] if 'MINGW_PREFIX' in os.environ: mingw_base = Path(os.environ['MINGW_PREFIX']).parent try: path = Path(assetname).relative_to(mingw_base) assetname = str(PurePosixPath(path)) except ValueError: pass # Try it as-is first try: clean_path = loader.clean_asset_name(assetname) asset = loader[clean_path] return asset.assetname except Exception: # pylint: disable=broad-except pass # Try a combination of possible roots asset_path_options = ( Path(assetname), Path(assetname).absolute(), Path(assetname).resolve(), ) search_paths = ( '.', Path(get_global_config().settings.DataDir / 'game/ShooterGame'), loader.asset_path, loader.absolute_asset_path, ) for asset_path in asset_path_options: for search_path in search_paths: clean_path = relative_path(asset_path, search_path) if not clean_path: continue clean_path = clean_path.with_suffix('') assetname = str(PurePosixPath(clean_path)) try: asset = loader[assetname] return asset.assetname except AssetNotFound: continue print(f'Not found: {assetname}', file=sys.stderr) sys.exit(404)
def _export_supply_crate_volume(world: WorldData, proxy: SupplyCrateSpawningVolume): if not get_global_config( ).export_wiki.ExportSupplyCrateData or proxy.bHidden[0].value: return data = extract_supply_crate_volume(world, proxy) if data: world.lootCrates.append(data)
def get_managed_mods() -> Iterable[str]: '''A list of mods that should be installed and managed.''' config = get_global_config() official_mods = set(config.official_mods.ids()) separate_mods = set(config.settings.SeparateOfficialMods) extract_mods = set(config.mods) | set(config.extract_mods or ()) managed_mods = sorted(extract_mods - separate_mods - official_mods, key=_mod_sorter) return managed_mods
def get_truck_purchase_cost(self): global_config = config.get_global_config() days_used = int(global_config['SIMULATION_DAYS']) - self.purchase_date truck_purchase_cost = (8350.6 * math.log(self.truck_size) - 14542.5) * days_used / int( global_config['SIMULATION_DAYS']) print('Truck {} purchase cost [ capacity: {}, days used: {} ]: {}'. format(self.truck_number, self.truck_size, days_used, truck_purchase_cost)) return truck_purchase_cost
def get_core_mods() -> Iterable[str]: '''A list of mods that should be included in 'core' extraction data.''' config = get_global_config() official_mods = set(config.official_mods.ids()) separate_mods = set(config.settings.SeparateOfficialMods) extract_mods = set(config.extract_mods if config. extract_mods is not None else config.mods) core_mods = sorted(official_mods - separate_mods - extract_mods, key=_mod_sorter) return core_mods
def test_petscan_timeout(self, mock_get): mock_get.side_effect = requests.exceptions.Timeout response = json.loads( self.app.post('/en/intersection', data = json.dumps({'psid': '123456'}), headers = {'Content-Type': 'application/json'}).data) self.assertEquals(response['id'], '') self.assertEquals(response['page_ids'], []) self.assertEquals(response['ttl_days'], config.get_global_config().intersection_expiration_days)
def initialise_hierarchy(arkman: ArkSteamManager, config: ConfigFile = get_global_config()): version_key = _gather_version_data(arkman, config) loader = arkman.getLoader() gen_fn = lambda _: _generate_hierarchy(loader) output_path = f'{config.settings.DataDir}/asset_hierarchy' data = cache_data(version_key, output_path, gen_fn, force_regenerate=config.dev.ClearHierarchyCache) ue.hierarchy.tree = data
def test_petscan_no_articles(self, mock_get): mock_response = mock_get() mock_response.json.return_value = {'*': [{'a': {'*': []}}]} response = json.loads( self.app.post('/en/intersection', data = json.dumps({'psid': '123456'}), headers = {'Content-Type': 'application/json'}).data) self.assertEquals(response['id'], '') self.assertEquals(response['page_ids'], []) self.assertEquals(response['ttl_days'], config.get_global_config().intersection_expiration_days)
def handle_args(args: Any) -> ConfigFile: config = get_global_config() # Action selections config.run_sections = args.sections # If stage list requested, skip everything else if args.list_stages: config.display_sections = True return config # Logging can be setup now we know we're not aborting setup_logging() if args.live: logger.info('LIVE mode enabled') config.settings.SkipGit = False config.git.UseReset = True config.git.UseIdentity = True config.errors.SendNotifications = True else: logger.info('DEV mode enabled') config.git.UseIdentity = False config.git.SkipCommit = True config.git.SkipPush = True config.errors.SendNotifications = False config.dev.DevMode = not args.live if args.notify: # to enable notifications in dev mode config.errors.SendNotifications = True if args.remove_cache: config.dev.ClearHierarchyCache = True if args.skip_install: config.settings.SkipInstall = True if args.skip_extract: config.settings.SkipExtract = True if args.mods is not None: config.extract_mods = args.mods if args.maps is not None: config.extract_maps = args.maps # Git actions if args.skip_pull: config.git.SkipPull = True if args.skip_commit: config.git.SkipCommit = True if args.skip_push: config.git.SkipPush = True return config
def load_product_from_truck(self, amount, timestamp): global_config = config.get_global_config() standard_unit = int(global_config['STANDARD_UNIT']) if self.inventory + amount > self.size_limit: return elif self.inventory + amount > self.max_warehouse_size: self.purchase_additional_warehouse_size(standard_unit, timestamp) self.inventory += amount else: self.inventory += amount
def get_current_demand(self, timestamp): global_config = config.get_global_config() demand_growth_period = int(global_config['DEMAND_GROWTH_PERIOD']) if timestamp < demand_growth_period: return self.initial_demand else: current_demand = self.initial_demand for i in range(int(timestamp / demand_growth_period)): current_demand = current_demand * (1 + self.demand_growth) return current_demand
def setup_logger_to_logfile(logger, logfile): log_dir = config.get_global_config().log_dir mkdir_p(log_dir) log_file = os.path.join(log_dir, logfile) log_handler = logging.handlers.RotatingFileHandler( log_file, maxBytes = 1024 * 1024, encoding = 'utf-8') log_handler.setFormatter(logging.Formatter( '%(asctime)s %(levelname)s: %(message)s [%(pathname)s:%(lineno)d]')) log_handler.setLevel(logging.INFO) logger.addHandler(log_handler) logger.setLevel(logging.INFO)
def _export_npc_zone_manager(world: WorldData, proxy: NPCZoneManager): if not get_global_config( ).export_wiki.ExportSpawnData or not proxy.bEnabled[0].value: return data = extract_npc_zone_manager(world, proxy) if not data: return world.spawns.append(data) if data['spawnGroup'] not in world.spawnGroups: world.spawnGroups.append(data['spawnGroup'])
def test_petscan_ok(self, mock_create_intersection, mock_get): mock_response = mock_get() mock_response.json.return_value = { '*': [{'a': {'*': [{'id': i} for i in range(10)]}}]} mock_create_intersection.return_value = (self.inter, range(5)) response = json.loads( self.app.post('/en/intersection', data = json.dumps({'psid': '123456'}), headers = {'Content-Type': 'application/json'}).data) self.assertEquals(response['id'], self.inter) self.assertEquals(response['page_ids'], range(5)) self.assertEquals(response['ttl_days'], config.get_global_config().intersection_expiration_days)
import os import urllib import urlparse import traceback import logging.handlers # Cache duration for snippets. # Since each page contains a link to the next one, even when no category is # selected, we risk users getting trapped circling among their cached pages # sometimes. We do depend on caching for prefetching to work, but let's do # it for only a short period to be safe. # An alternative would be to never cache when no category is selected UNLESS # when prefetching, but that's a bit more complex. CACHE_DURATION_SNIPPET = 30 global_config = config.get_global_config() app = flask.Flask(__name__) flask_compress.Compress(app) debug = 'DEBUG' in os.environ @app.route('/') @handlers.validate_lang_code def index(lang_code): pass # nothing to do but validate lang_code app.add_url_rule('/<lang_code>', view_func = handlers.citation_hunt, strict_slashes = False) app.add_url_rule('/<lang_code>/stats.html', view_func = handlers.stats) if 'stats' not in global_config.flagged_off: app.after_request(handlers.log_request)
#-*- encoding: utf-8 -*- from __future__ import unicode_literals import os os.environ['DEBUG'] = '1' # Disable stats since it requires a database, and we're not # testing it anyway import config config.get_global_config().flagged_off.append('stats') import app import mock import requests import time import datetime import json import unittest class CitationHuntTest(unittest.TestCase): def setUp(self): self.app = app.app.test_client() self.sid = '93b6f3cf' self.cat = 'b5e1a25d' self.inter = 'c4a1e27d' self.fake_snippet_info = ( 'Some snippet', 'Some section', 'https://en.wikipedia.org/wiki/A', 'Some title')