コード例 #1
0
ファイル: game.py プロジェクト: kb173/srctools
    def get_filesystem(self) -> FileSystemChain:
        """Build a chained filesystem from the search paths."""
        vpks = []
        raw_folders = []

        for path in self.search_paths:
            if path.is_dir():
                raw_folders.append(path)
                for ind in itertools.count(1):
                    vpk = (path / 'pak{:02}_dir.vpk'.format(ind))
                    if vpk.is_file():
                        vpks.append(vpk)
                    else:
                        break
                continue

            if not path.suffix:
                path = path.with_suffix('.vpk')
            if not path.name.endswith('_dir.vpk'):
                path = path.with_name(path.name[:-4] + '_dir.vpk')

            if path.is_file() and path.suffix == '.vpk':
                vpks.append(path)

        fsys = FileSystemChain()
        for path in vpks:
            fsys.add_sys(VPKFileSystem(path))
        for path in raw_folders:
            fsys.add_sys(RawFileSystem(path))

        return fsys
コード例 #2
0
ファイル: game.py プロジェクト: ENDERZOMBI102/BEE-manipulator
    def get_filesystem(self) -> FileSystemChain:
        """Build a chained filesystem from the search paths."""
        vpks = []
        raw_folders = []

        for path in self.search_paths:
            if path.is_dir():
                raw_folders.append(path)
                if (path / 'pak01_dir.vpk').is_file():
                    vpks.append(path / 'pak01_dir.vpk')
                continue

            if not path.suffix:
                path = path.with_suffix('.vpk')
            if not path.name.endswith('_dir.vpk'):
                path = path.with_name(path.name[:-4] + '_dir.vpk')

            if path.is_file() and path.suffix == '.vpk':
                vpks.append(path)

        fsys = FileSystemChain()
        for path in vpks:
            fsys.add_sys(VPKFileSystem(path))
        for path in raw_folders:
            fsys.add_sys(RawFileSystem(path))

        return fsys
コード例 #3
0
def main(args: List[str]) -> None:
    parser = argparse.ArgumentParser(description=__doc__, )

    parser.add_argument(
        "game",
        help="Either location of a gameinfo.txt file, or a game folder.")

    result = parser.parse_args(args)
    try:
        fsys = Game(result.game).get_filesystem()
    except FileNotFoundError:
        fsys = RawFileSystem(result.game)

    # Shader/proxy -> parameter -> use count
    shader_params = defaultdict(Counter)
    shader_proxies = defaultdict(Counter)

    with fsys:
        for file in fsys.walk_folder('materials/'):
            if not file.path.endswith('.vmt'):
                continue

            print('.', end='', flush=True)
            try:
                with file.open_str() as f:
                    mat = Material.parse(f)
                mat = mat.apply_patches(fsys)
            except Exception:
                traceback.print_exc()
                continue

            param_count = shader_params[mat.shader.casefold()]
            for name, param_type, value in mat:
                param_count[name.casefold()] += 1

            for prox in mat.proxies:
                param_count = shader_proxies[prox.name]
                for prop in prox:
                    param_count[prop.name] += 1

    print('\n\nShaders:')
    for shader in sorted(shader_params):
        print('"{}"\n\t{{'.format(shader.title()))
        param_count = shader_params[shader]
        for param_name in sorted(param_count):
            print('\t{} = {}'.format(param_name, param_count[param_name]))
        print('\t}')

    print('\n\nProxies:')
    for proxy in sorted(shader_proxies):
        print('"{}"\n\t{{'.format(proxy.title()))
        param_count = shader_proxies[proxy]
        for param_name in sorted(param_count):
            print('\t{} = {}'.format(param_name, param_count[param_name]))
        print('\t}')
コード例 #4
0
ファイル: unify_fgd.py プロジェクト: Wii2/HammerAddons
def load_database(dbase: Path, extra_loc: Path = None) -> FGD:
    """Load the entire database from disk."""
    print('Loading database:')
    fgd = FGD()

    fgd.map_size_min = -16384
    fgd.map_size_max = 16384

    with RawFileSystem(str(dbase)) as fsys:
        for file in dbase.rglob("*.fgd"):
            fgd.parse_file(
                fsys,
                fsys[str(file.relative_to(dbase))],
                eval_bases=False,
            )
            print('.', end='', flush=True)

    if extra_loc is not None:
        print('\nLoading extra file:')
        if extra_loc.is_file():
            # One file.
            with RawFileSystem(str(extra_loc.parent)) as fsys:
                fgd.parse_file(
                    fsys,
                    fsys[extra_loc.name],
                    eval_bases=False,
                )
        else:
            print('\nLoading extra files:')
            with RawFileSystem(str(extra_loc)) as fsys:
                for file in extra_loc.rglob("*.fgd"):
                    fgd.parse_file(
                        fsys,
                        fsys[str(file.relative_to(extra_loc))],
                        eval_bases=False,
                    )
                    print('.', end='', flush=True)
    print()

    fgd.apply_bases()
    print('\nDone!')
    return fgd
コード例 #5
0
def parse(path: Path) -> Tuple[
    Config,
    Game,
    FileSystemChain,
    Set[FileSystem],
    Set[Plugin],
]:
    """From some directory, locate and parse the config file.

    This then constructs and customises each object according to config
    options.

    The first srctools.vdf file found in a parent directory is parsed.
    If none can be found, it tries to find the first subfolder of 'common/' and
    writes a default copy there. FileNotFoundError is raised if none can be
    found.

    This returns:
        * The config.
        * Parsed gameinfo.
        * The chain of filesystems.
        * A packing blacklist.
        * A list of plugins.
    """
    conf = Config(OPTIONS)

    # If the path is a folder, add a dummy folder so parents yields it.
    # That way we check for a config in this folder.
    if not path.suffix:
        path /= 'unused'

    for folder in path.parents:
        conf_path = folder / CONF_NAME
        if conf_path.exists():
            LOGGER.info('Config path: "{}"', conf_path.absolute())
            with open(conf_path) as f:
                props = Property.parse(f, conf_path)
            conf.path = conf_path
            conf.load(props)
            break
    else:
        LOGGER.warning('Cannot find a valid config file!')
        # Apply all the defaults.
        conf.load(Property(None, []))

        # Try to write out a default file in the game folder.
        for folder in path.parents:
            if folder.parent.stem == 'common':
                break
        else:
            # Give up, write to working directory.
            folder = Path()
        conf.path = folder / CONF_NAME

        LOGGER.warning('Writing default to "{}"', conf.path)

    with AtomicWriter(str(conf.path)) as f:
        conf.save(f)

    game = Game((folder / conf.get(str, 'gameinfo')).resolve())

    fsys_chain = game.get_filesystem()

    blacklist = set()  # type: Set[FileSystem]

    if not conf.get(bool, 'pack_vpk'):
        for fsys, prefix in fsys_chain.systems:
            if isinstance(fsys, VPKFileSystem):
                blacklist.add(fsys)

    game_root = game.root

    for prop in conf.get(Property, 'searchpaths'):  # type: Property
        if prop.has_children():
            raise ValueError('Config "searchpaths" value cannot have children.')
        assert isinstance(prop.value, str)

        if prop.value.endswith('.vpk'):
            fsys = VPKFileSystem(str((game_root / prop.value).resolve()))
        else:
            fsys = RawFileSystem(str((game_root / prop.value).resolve()))

        if prop.name in ('prefix', 'priority'):
            fsys_chain.add_sys(fsys, priority=True)
        elif prop.name == 'nopack':
            blacklist.add(fsys)
        elif prop.name in ('path', 'pack'):
            fsys_chain.add_sys(fsys)
        else:
            raise ValueError(
                'Unknown searchpath '
                'key "{}"!'.format(prop.real_name)
            )

    plugins = set()  # type: Set[Plugin]

    # find all the plugins and make plugin objects out of them
    for prop in conf.get(Property, 'plugins'):  # type: Property
        if prop.has_children():
            raise ValueError('Config "plugins" value cannot have children.')
        assert isinstance(prop.value, str)
        
        path = (game_root / Path(prop.value)).resolve()
        if prop.name in ("path", "recursive"):
            if not path.is_dir():
                raise ValueError("'{}' is not a directory".format(path))

            # want to recursive glob if key is recursive
            pattern = "*.py" if prop.name == "path" else "**/*.py"

            #find all .py files, make Plugins
            for p in path.glob(pattern):
                plugins.add(Plugin(path / p))

        elif prop.name == "single":
            plugins.add(Plugin(path))
        else:
            raise ValueError("Unknown plugins key {}".format(prop.real_name))

    return conf, game, fsys_chain, blacklist, plugins
コード例 #6
0
ファイル: vrad.py プロジェクト: Hiden1/BEE2.4
def main(argv: List[str]) -> None:
    LOGGER.info('BEE2 VRAD hook started!')

    args = " ".join(argv)
    fast_args = argv[1:]
    full_args = argv[1:]

    if not fast_args:
        # No arguments!
        LOGGER.info(
            'No arguments!\n'
            "The BEE2 VRAD takes all the regular VRAD's "
            'arguments, with some extra arguments:\n'
            '-force_peti: Force enabling map conversion. \n'
            "-force_hammer: Don't convert the map at all.\n"
            "If not specified, the map name must be \"preview.bsp\" to be "
            "treated as PeTI.")
        sys.exit()

    # The path is the last argument to vrad
    # P2 adds wrong slashes sometimes, so fix that.
    fast_args[-1] = path = os.path.normpath(argv[-1])  # type: str

    LOGGER.info("Map path is " + path)

    load_config()

    for a in fast_args[:]:
        if a.casefold() in (
                "-both",
                "-final",
                "-staticproplighting",
                "-staticproppolys",
                "-textureshadows",
        ):
            # remove final parameters from the modified arguments
            fast_args.remove(a)
        elif a in ('-force_peti', '-force_hammer', '-no_pack'):
            # we need to strip these out, otherwise VRAD will get confused
            fast_args.remove(a)
            full_args.remove(a)

    fast_args = ['-bounce', '2', '-noextra'] + fast_args

    # Fast args: -bounce 2 -noextra -game $gamedir $path\$file
    # Final args: -both -final -staticproplighting -StaticPropPolys
    # -textureshadows  -game $gamedir $path\$file

    if not path.endswith(".bsp"):
        path += ".bsp"

    if not os.path.exists(path):
        raise ValueError('"{}" does not exist!'.format(path))
    if not os.path.isfile(path):
        raise ValueError('"{}" is not a file!'.format(path))

    # If VBSP thinks it's hammer, trust it.
    if CONF.bool('is_hammer', False):
        is_peti = edit_args = False
    else:
        is_peti = True
        # Detect preview via knowing the bsp name. If we are in preview,
        # check the config file to see what was specified there.
        if os.path.basename(path) == "preview.bsp":
            edit_args = not CONF.bool('force_full', False)
        else:
            # publishing - always force full lighting.
            edit_args = False

    if '-force_peti' in args or '-force_hammer' in args:
        # we have override commands!
        if '-force_peti' in args:
            LOGGER.warning('OVERRIDE: Applying cheap lighting!')
            is_peti = edit_args = True
        else:
            LOGGER.warning('OVERRIDE: Preserving args!')
            is_peti = edit_args = False

    LOGGER.info('Final status: is_peti={}, edit_args={}', is_peti, edit_args)

    # Grab the currently mounted filesystems in P2.
    game = find_gameinfo(argv)
    root_folder = game.path.parent
    fsys = game.get_filesystem()

    fsys_tag = fsys_mel = None
    if is_peti and 'mel_vpk' in CONF:
        fsys_mel = VPKFileSystem(CONF['mel_vpk'])
        fsys.add_sys(fsys_mel)
    if is_peti and 'tag_dir' in CONF:
        fsys_tag = RawFileSystem(CONF['tag_dir'])
        fsys.add_sys(fsys_tag)

    LOGGER.info('Reading BSP')
    bsp_file = BSP(path)

    bsp_ents = bsp_file.read_ent_data()

    zip_data = BytesIO()
    zip_data.write(bsp_file.get_lump(BSP_LUMPS.PAKFILE))
    zipfile = ZipFile(zip_data, mode='a')

    # Mount the existing packfile, so the cubemap files are recognised.
    fsys.systems.append((ZipFileSystem('', zipfile), ''))

    fsys.open_ref()

    LOGGER.info('Done!')

    LOGGER.info('Reading our FGD files...')
    fgd = load_fgd()

    packlist = PackList(fsys)
    packlist.load_soundscript_manifest(
        str(root_folder / 'bin/bee2/sndscript_cache.vdf'))

    # We nee to add all soundscripts in scripts/bee2_snd/
    # This way we can pack those, if required.
    for soundscript in fsys.walk_folder('scripts/bee2_snd/'):
        if soundscript.path.endswith('.txt'):
            packlist.load_soundscript(soundscript, always_include=False)

    if is_peti:
        LOGGER.info('Adding special packed files:')
        music_data = CONF.find_key('MusicScript', [])
        if music_data:
            packlist.pack_file('scripts/BEE2_generated_music.txt',
                               PackType.SOUNDSCRIPT,
                               data=generate_music_script(
                                   music_data, packlist))

        for filename, arcname in inject_files():
            LOGGER.info('Injecting "{}" into packfile.', arcname)
            with open(filename, 'rb') as f:
                packlist.pack_file(arcname, data=f.read())

    LOGGER.info('Run transformations...')
    run_transformations(bsp_ents, fsys, packlist)

    LOGGER.info('Scanning map for files to pack:')
    packlist.pack_from_bsp(bsp_file)
    packlist.pack_fgd(bsp_ents, fgd)
    packlist.eval_dependencies()
    LOGGER.info('Done!')

    if is_peti:
        packlist.write_manifest()
    else:
        # Write with the map name, so it loads directly.
        packlist.write_manifest(os.path.basename(path)[:-4])

    # We need to disallow Valve folders.
    pack_whitelist = set()  # type: Set[FileSystem]
    pack_blacklist = set()  # type: Set[FileSystem]
    if is_peti:
        pack_blacklist |= {
            RawFileSystem(root_folder / 'portal2_dlc2'),
            RawFileSystem(root_folder / 'portal2_dlc1'),
            RawFileSystem(root_folder / 'portal2'),
            RawFileSystem(root_folder / 'platform'),
            RawFileSystem(root_folder / 'update'),
        }
        if fsys_mel is not None:
            pack_whitelist.add(fsys_mel)
        if fsys_tag is not None:
            pack_whitelist.add(fsys_tag)

    if '-no_pack' not in args:
        # Cubemap files packed into the map already.
        existing = set(zipfile.infolist())

        LOGGER.info('Writing to BSP...')
        packlist.pack_into_zip(
            zipfile,
            ignore_vpk=True,
            whitelist=pack_whitelist,
            blacklist=pack_blacklist,
        )

        LOGGER.info(
            'Packed files:\n{}', '\n'.join([
                zipinfo.filename for zipinfo in zipfile.infolist()
                if zipinfo.filename not in existing
            ]))

    dump_files(zipfile)

    zipfile.close()  # Finalise the zip modification

    # Copy the zipfile into the BSP file, and adjust the headers.
    bsp_file.lumps[BSP_LUMPS.PAKFILE].data = zip_data.getvalue()
    # Copy new entity data.
    bsp_file.lumps[BSP_LUMPS.ENTITIES].data = BSP.write_ent_data(bsp_ents)

    bsp_file.save()
    LOGGER.info(' - BSP written!')

    if is_peti:
        mod_screenshots()

    if edit_args:
        LOGGER.info("Forcing Cheap Lighting!")
        run_vrad(fast_args)
    else:
        if is_peti:
            LOGGER.info(
                "Publishing - Full lighting enabled! (or forced to do so)")
        else:
            LOGGER.info("Hammer map detected! Not forcing cheap lighting..")
        run_vrad(full_args)

    LOGGER.info("BEE2 VRAD hook finished!")
コード例 #7
0
"""Compile the files in the fgd/ folder into a binary blob."""
from srctools import FGD
from srctools.filesys import RawFileSystem

from lzma import LZMAFile

fgd = FGD()

with RawFileSystem('fgd/') as fs:
    for file in fs:
        fgd.parse_file(fs, file)

with open('srctools/fgd.lzma', 'wb') as f:
    with LZMAFile(f, mode='w') as cf:
        fgd.serialise(cf)
コード例 #8
0
tkImgWidgets = Union[tk.Label, ttk.Label, tk.Button, ttk.Button]
tkImgWidgetsT = TypeVar('tkImgWidgetsT', tk.Label, ttk.Label, tk.Button, ttk.Button)
WidgetWeakRef = Union['WeakRef[tk.Label], WeakRef[ttk.Label], WeakRef[tk.Button], WeakRef[ttk.Button]']

ArgT = TypeVar('ArgT')

# Used to keep track of the used handles, so we can deduplicate them.
_handles: dict[tuple, Handle] = {}
# Matches widgets to the handle they use.
_wid_tk: dict[WidgetWeakRef, Handle] = {}

# TK images have unique IDs, so preserve discarded image objects.
_unused_tk_img: dict[tuple[int, int], list[tk.PhotoImage]] = {}

LOGGER = srctools.logger.get_logger('img')
FSYS_BUILTIN = RawFileSystem(str(utils.install_path('images')))
PACK_SYSTEMS: dict[str, FileSystem] = {}

# Silence DEBUG messages from Pillow, they don't help.
logging.getLogger('PIL').setLevel(logging.INFO)

# Colour of the palette item background
PETI_ITEM_BG = (229, 232, 233)
PETI_ITEM_BG_HEX = '#{:2X}{:2X}{:2X}'.format(*PETI_ITEM_BG)


def _load_special(path: str) -> Image.Image:
    """Various special images we have to load."""
    img: Image.Image
    try:
        img = Image.open(utils.install_path(f'images/BEE2/{path}.png'))
コード例 #9
0
async def find_packages(nursery: trio.Nursery, pak_dir: Path) -> None:
    """Search a folder for packages, recursing if necessary."""
    found_pak = False
    try:
        contents = list(pak_dir.iterdir())
    except FileNotFoundError:
        LOGGER.warning('Package search location "{}" does not exist!', pak_dir)
        return

    for name in contents:  # Both files and dirs
        folded = name.stem.casefold()
        if folded.endswith('.vpk') and not folded.endswith('_dir.vpk'):
            # _000.vpk files, useless without the directory
            continue

        if name.is_dir():
            filesys = RawFileSystem(name)
        else:
            ext = name.suffix.casefold()
            if ext in ('.bee_pack', '.zip'):
                filesys = await trio.to_thread.run_sync(ZipFileSystem,
                                                        name,
                                                        cancellable=True)
            elif ext == '.vpk':
                filesys = await trio.to_thread.run_sync(VPKFileSystem,
                                                        name,
                                                        cancellable=True)
            else:
                LOGGER.info('Extra file: {}', name)
                continue

        LOGGER.debug('Reading package "{}"', name)

        # Valid packages must have an info.txt file!
        try:
            info = await trio.to_thread.run_sync(filesys.read_prop,
                                                 'info.txt',
                                                 cancellable=True)
        except FileNotFoundError:
            if name.is_dir():
                # This isn't a package, so check the subfolders too...
                LOGGER.debug('Checking subdir "{}" for packages...', name)
                nursery.start_soon(find_packages, nursery, name)
            else:
                LOGGER.warning('ERROR: package "{}" has no info.txt!', name)
            # Don't continue to parse this "package"
            continue
        pak_id = info['ID']

        if pak_id.casefold() in packages:
            raise ValueError(
                f'Duplicate package with id "{pak_id}"!\n'
                'If you just updated the mod, delete any old files in packages/.'
            ) from None

        PACKAGE_SYS[pak_id.casefold()] = filesys

        packages[pak_id.casefold()] = Package(
            pak_id,
            filesys,
            info,
            name,
        )
        found_pak = True

    if not found_pak:
        LOGGER.info('No packages in folder {}!', pak_dir)
コード例 #10
0
ファイル: find_deps.py プロジェクト: DrMeepster/srctools
def main(args: List[str]) -> None:
    """Main script."""
    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument(
        "-f",
        "--filter",
        help="filter output to only display resources in this subfolder. "
        "This can be used multiple times.",
        type=str.casefold,
        action='append',
        metavar='folder',
        dest='filters',
    )
    parser.add_argument(
        "-u",
        "--unused",
        help="Instead of showing depenencies, show files in the filtered "
        "folders that are unused.",
        action='store_true',
    )
    parser.add_argument(
        "game",
        help="either location of a gameinfo.txt file, or any root folder.",
    )
    parser.add_argument(
        "path",
        help="the files to load. The path can have a single * in the "
        "filename to match files with specific extensions and a prefix.",
    )

    result = parser.parse_args(args)

    if result.unused and not result.filters:
        raise ValueError(
            'At least one filter must be provided in "unused" mode.')

    if result.game:
        try:
            fsys = Game(result.game).get_filesystem()
        except FileNotFoundError:
            fsys = FileSystemChain(RawFileSystem(result.game))
    else:
        fsys = FileSystemChain()

    packlist = PackList(fsys)

    file_path: str = result.path
    print('Finding files...')
    with fsys:
        if '*' in file_path:  # Multiple files
            if file_path.count('*') > 1:
                raise ValueError('Multiple * in path!')
            prefix, suffix = file_path.split('*')
            folder, prefix = os.path.split(prefix)
            prefix = prefix.casefold()
            suffix = suffix.casefold()
            print(f'Prefix: {prefix!r}, suffix: {suffix!r}')
            print(f'Searching folder {folder}...')

            files = []
            for file in fsys.walk_folder(folder):
                file_path = file.path.casefold()
                if not os.path.basename(file_path).startswith(prefix):
                    continue
                if file_path.endswith(suffix):
                    print(' ' + file.path)
                    files.append(file)
        else:  # Single file
            files = [fsys[file_path]]
        for file in files:
            ext = file.path[-4:].casefold()
            if ext == '.vmf':
                with file.open_str() as f:
                    vmf_props = Property.parse(f)
                    vmf = VMF.parse(vmf_props)
                packlist.pack_fgd(vmf, fgd)
                del vmf, vmf_props  # Hefty, don't want to keep.
            elif ext == '.bsp':
                child_sys = fsys.get_system(file)
                if not isinstance(child_sys, RawFileSystem):
                    raise ValueError('Cannot inspect BSPs in VPKs!')
                bsp = BSP(os.path.join(child_sys.path, file.path))
                packlist.pack_from_bsp(bsp)
                packlist.pack_fgd(bsp.read_ent_data(), fgd)
                del bsp
            else:
                packlist.pack_file(file.path)
        print('Evaluating dependencies...')
        packlist.eval_dependencies()
        print('Done.')

        if result.unused:
            print('Unused files:')
            used = set(packlist.filenames())
            for folder in result.filters:
                for file in fsys.walk_folder(folder):
                    if file.path.casefold() not in used:
                        print(' ' + file.path)
        else:
            print('Dependencies:')
            for filename in packlist.filenames():
                if not result.filters or any(
                        map(filename.casefold().startswith, result.filters)):
                    print(' ' + filename)
コード例 #11
0
ファイル: config.py プロジェクト: DrMeepster/srctools
def parse(
    path: Path
) -> Tuple[Config, Game, FileSystemChain, Set[FileSystem], PluginFinder, ]:
    """From some directory, locate and parse the config file.

    This then constructs and customises each object according to config
    options.

    The first srctools.vdf file found in a parent directory is parsed.
    If none can be found, it tries to find the first subfolder of 'common/' and
    writes a default copy there. FileNotFoundError is raised if none can be
    found.

    This returns:
        * The config.
        * Parsed gameinfo.
        * The chain of filesystems.
        * A packing blacklist.
        * The plugin loader.
    """
    conf = Config(OPTIONS)

    # If the path is a folder, add a dummy folder so parents yields it.
    # That way we check for a config in this folder.
    if not path.suffix:
        path /= 'unused'

    for folder in path.parents:
        conf_path = folder / CONF_NAME
        if conf_path.exists():
            LOGGER.info('Config path: "{}"', conf_path.absolute())
            with open(conf_path) as f:
                props = Property.parse(f, conf_path)
            conf.path = conf_path
            conf.load(props)
            break
    else:
        LOGGER.warning('Cannot find a valid config file!')
        # Apply all the defaults.
        conf.load(Property(None, []))

        # Try to write out a default file in the game folder.
        for folder in path.parents:
            if folder.parent.stem == 'common':
                break
        else:
            # Give up, write to working directory.
            folder = Path()
        conf.path = folder / CONF_NAME

        LOGGER.warning('Writing default to "{}"', conf.path)

    with AtomicWriter(str(conf.path)) as f:
        conf.save(f)

    game = Game((folder / conf.get(str, 'gameinfo')).resolve())

    fsys_chain = game.get_filesystem()

    blacklist = set()  # type: Set[FileSystem]

    if not conf.get(bool, 'pack_vpk'):
        for fsys, prefix in fsys_chain.systems:
            if isinstance(fsys, VPKFileSystem):
                blacklist.add(fsys)

    game_root = game.root

    for prop in conf.get(Property, 'searchpaths'):  # type: Property
        if prop.has_children():
            raise ValueError(
                'Config "searchpaths" value cannot have children.')
        assert isinstance(prop.value, str)

        if prop.value.endswith('.vpk'):
            fsys = VPKFileSystem(str((game_root / prop.value).resolve()))
        else:
            fsys = RawFileSystem(str((game_root / prop.value).resolve()))

        if prop.name in ('prefix', 'priority'):
            fsys_chain.add_sys(fsys, priority=True)
        elif prop.name == 'nopack':
            blacklist.add(fsys)
        elif prop.name in ('path', 'pack'):
            fsys_chain.add_sys(fsys)
        else:
            raise ValueError('Unknown searchpath '
                             'key "{}"!'.format(prop.real_name))

    sources: Dict[Path, PluginSource] = {}

    builtin_transforms = (Path(sys.argv[0]).parent / 'transforms').resolve()

    # find all the plugins and make plugin objects out of them
    for prop in conf.get(Property, 'plugins'):
        if prop.has_children():
            raise ValueError('Config "plugins" value cannot have children.')
        assert isinstance(prop.value, str)

        path = (game_root / Path(prop.value)).resolve()
        if prop.name in ('path', "recursive", 'folder'):
            if not path.is_dir():
                raise ValueError("'{}' is not a directory".format(path))

            is_recursive = prop.name == "recursive"

            try:
                source = sources[path]
            except KeyError:
                sources[path] = PluginSource(path, is_recursive)
            else:
                if is_recursive and not source.recursive:
                    # Upgrade to recursive.
                    source.recursive = True

        elif prop.name in ('single', 'file'):
            parent = path.parent
            try:
                source = sources[parent]
            except KeyError:
                source = sources[parent] = PluginSource(parent, False)
            source.autoload_files.add(path)

        elif prop.name == "_builtin_":
            # For development purposes, redirect builtin folder.
            builtin_transforms = path
        else:
            raise ValueError("Unknown plugins key {}".format(prop.real_name))

    for source in sources.values():
        LOGGER.debug('Plugin path: "{}", recursive={}, files={}',
                     source.folder, source.recursive,
                     sorted(source.autoload_files))
    LOGGER.debug('Builtin plugin path is {}', builtin_transforms)
    if builtin_transforms not in sources:
        sources[builtin_transforms] = PluginSource(builtin_transforms, True)

    plugin_finder = PluginFinder('srctools.bsp_transforms.plugin',
                                 sources.values())
    sys.meta_path.append(plugin_finder)

    return conf, game, fsys_chain, blacklist, plugin_finder
コード例 #12
0
from typing import Iterable, Union, Dict, Tuple

LOGGER = srctools.logger.get_logger('img')

cached_img = {}  # type: Dict[Tuple[str, int, int], ImageTk.PhotoImage]
# r, g, b, size -> image
cached_squares = {
}  # type: Dict[Union[Tuple[float, float, float, int], Tuple[str, int]], ImageTk.PhotoImage]

# Colour of the palette item background
PETI_ITEM_BG = Vec(229, 232, 233)

filesystem = FileSystemChain(
    # Highest priority is the in-built UI images.
    RawFileSystem(os.path.join(os.getcwd(), '../', 'images')), )


def load_filesystems(systems: Iterable[FileSystem]):
    """Load in the filesystems used in packages."""
    for sys in systems:
        filesystem.add_sys(sys, 'resources/BEE2/')


def tuple_size(size: Union[Tuple[int, int], int]) -> Tuple[int, int]:
    """Return an xy tuple given a size or tuple."""
    if isinstance(size, tuple):
        return size
    return size, size

コード例 #13
0
import srctools.logger
import logging
import utils

from typing import Iterable, Union, Dict, Tuple

LOGGER = srctools.logger.get_logger('img')

cached_img = {}  # type: Dict[Tuple[str, int, int], ImageTk.PhotoImage]
# r, g, b, size -> image
cached_squares = {
}  # type: Dict[Union[Tuple[float, float, float, int], Tuple[str, int]], ImageTk.PhotoImage]

filesystem = FileSystemChain(
    # Highest priority is the in-built UI images.
    RawFileSystem(str(utils.install_path('images'))), )

# Silence DEBUG messages from Pillow, they don't help.
logging.getLogger('PIL').setLevel(logging.INFO)


def load_filesystems(systems: Iterable[FileSystem]):
    """Load in the filesystems used in packages."""
    for sys in systems:
        filesystem.add_sys(sys, 'resources/BEE2/')


def tuple_size(size: Union[Tuple[int, int], int]) -> Tuple[int, int]:
    """Return an xy tuple given a size or tuple."""
    if isinstance(size, tuple):
        return size
コード例 #14
0
ファイル: unify_fgd.py プロジェクト: braem/HammerAddons
def load_database(dbase: Path,
                  extra_loc: Path = None,
                  fgd_vis: bool = False) -> Tuple[FGD, EntityDef]:
    """Load the entire database from disk. This returns the FGD, plus the CBaseEntity definition."""
    print(f'Loading database {dbase}:')
    fgd = FGD()

    fgd.map_size_min = -16384
    fgd.map_size_max = 16384

    # Classname -> filename
    ent_source: Dict[str, str] = {}

    with RawFileSystem(str(dbase)) as fsys:
        for file in dbase.rglob("*.fgd"):
            # Use a temp FGD class, to allow us to verify no overwrites.
            file_fgd = FGD()
            rel_loc = str(file.relative_to(dbase))
            file_fgd.parse_file(
                fsys,
                fsys[rel_loc],
                eval_bases=False,
                encoding='utf8',
            )
            for clsname, ent in file_fgd.entities.items():
                if clsname in fgd.entities:
                    raise ValueError(
                        f'Duplicate "{clsname}" class '
                        f'in {rel_loc} and {ent_source[clsname]}!')
                fgd.entities[clsname] = ent
                ent_source[clsname] = rel_loc

            if fgd_vis:
                for parent, visgroup in file_fgd.auto_visgroups.items():
                    try:
                        existing_group = fgd.auto_visgroups[parent]
                    except KeyError:
                        fgd.auto_visgroups[parent] = visgroup
                    else:  # Need to merge
                        existing_group.ents.update(visgroup.ents)

            fgd.mat_exclusions.update(file_fgd.mat_exclusions)
            for tags, mat_list in file_fgd.tagged_mat_exclusions.items():
                fgd.tagged_mat_exclusions[tags] |= mat_list

            print('.', end='', flush=True)

    load_visgroup_conf(fgd, dbase)

    if extra_loc is not None:
        print('\nLoading extra file:')
        if extra_loc.is_file():
            # One file.
            with RawFileSystem(str(extra_loc.parent)) as fsys:
                fgd.parse_file(
                    fsys,
                    fsys[extra_loc.name],
                    eval_bases=False,
                )
        else:
            print('\nLoading extra files:')
            with RawFileSystem(str(extra_loc)) as fsys:
                for file in extra_loc.rglob("*.fgd"):
                    fgd.parse_file(
                        fsys,
                        fsys[str(file.relative_to(extra_loc))],
                        eval_bases=False,
                    )
                    print('.', end='', flush=True)
    print()

    fgd.apply_bases()
    print('\nDone!')

    print('Entities without visgroups:')
    vis_ents = {
        name.casefold()
        for group in fgd.auto_visgroups.values() for name in group.ents
    }
    vis_count = ent_count = 0
    for ent in fgd:
        # Base ents, worldspawn, or engine-only ents don't need visgroups.
        if ent.type is EntityTypes.BASE or ent.classname == 'worldspawn':
            continue
        applies_to = get_appliesto(ent)
        if '+ENGINE' in applies_to or 'ENGINE' in applies_to:
            continue
        ent_count += 1
        if ent.classname.casefold() not in vis_ents:
            print(ent.classname, end=', ')
        else:
            vis_count += 1
    print(
        f'\nVisgroup count: {vis_count}/{ent_count} ({vis_count*100/ent_count:.2f}%) done!'
    )

    try:
        base_entity_def = fgd.entities.pop(BASE_ENTITY.casefold())
        base_entity_def.type = EntityTypes.BASE
    except KeyError:
        base_entity_def = EntityDef(EntityTypes.BASE)
    return fgd, base_entity_def
コード例 #15
0
ファイル: template_brush.py プロジェクト: Thedoczek/BEE2.4
def _parse_template(loc: UnparsedTemplate) -> Template:
    """Parse a template VMF."""
    filesys: FileSystem
    if os.path.isdir(loc.pak_path):
        filesys = RawFileSystem(loc.pak_path)
    else:
        ext = os.path.splitext(loc.pak_path)[1].casefold()
        if ext in ('.bee_pack', '.zip'):
            filesys = ZipFileSystem(loc.pak_path)
        elif ext == '.vpk':
            filesys = VPKFileSystem(loc.pak_path)
        else:
            raise ValueError(f'Unknown filesystem type for "{loc.pak_path}"!')

    with filesys[loc.path].open_str() as f:
        props = Property.parse(f, f'{loc.pak_path}:{loc.path}')
    vmf = srctools.VMF.parse(props, preserve_ids=True)
    del props, filesys, f  # Discard all this data.

    # visgroup -> list of brushes/overlays
    detail_ents: dict[str, list[Solid]] = defaultdict(list)
    world_ents: dict[str, list[Solid]] = defaultdict(list)
    overlay_ents: dict[str, list[Entity]] = defaultdict(list)

    color_pickers: list[ColorPicker] = []
    tile_setters: list[TileSetter] = []
    voxel_setters: list[VoxelSetter] = []

    conf_ents = vmf.by_class['bee2_template_conf']
    if len(conf_ents) > 1:
        raise ValueError(
            f'Multiple configuration entities in template "{loc.id}"!')
    elif not conf_ents:
        raise ValueError(f'No configration entity for template "{loc.id}"!')
    else:
        [conf] = conf_ents

    if conf['template_id'].upper() != loc.id:
        raise ValueError(
            f'Mismatch in template IDs for {conf["template_id"]} and {loc.id}')

    def yield_world_detail() -> Iterator[tuple[list[Solid], bool, set[str]]]:
        """Yield all world/detail solids in the map.

        This also indicates if it's a func_detail, and the visgroup IDs.
        (Those are stored in the ent for detail, and the solid for world.)
        """
        for brush in vmf.brushes:
            yield [brush], False, brush.visgroup_ids
        for detail in vmf.by_class['func_detail']:
            yield detail.solids, True, detail.visgroup_ids

    force = conf['temp_type']
    force_is_detail: Optional[bool]
    if force.casefold() == 'detail':
        force_is_detail = True
    elif force.casefold() == 'world':
        force_is_detail = False
    else:
        force_is_detail = None

    visgroup_names = {vis.id: vis.name.casefold() for vis in vmf.vis_tree}
    conf_auto_visgroup = 1 if srctools.conv_bool(
        conf['detail_auto_visgroup']) else 0

    if not srctools.conv_bool(conf['discard_brushes']):
        for brushes, is_detail, vis_ids in yield_world_detail():
            visgroups = list(map(visgroup_names.__getitem__, vis_ids))
            if len(visgroups) > 1:
                raise ValueError('Template "{}" has brush with two '
                                 'visgroups! ({})'.format(
                                     loc.id, ', '.join(visgroups)))
            # No visgroup = ''
            visgroup = visgroups[0] if visgroups else ''

            # Auto-visgroup puts func_detail ents in unique visgroups.
            if is_detail and not visgroup and conf_auto_visgroup:
                visgroup = '__auto_group_{}__'.format(conf_auto_visgroup)
                # Reuse as the unique index, >0 are True too..
                conf_auto_visgroup += 1

            # Check this after auto-visgroup, so world/detail can be used to
            # opt into the grouping, then overridden to be the same.
            if force_is_detail is not None:
                is_detail = force_is_detail

            if is_detail:
                detail_ents[visgroup].extend(brushes)
            else:
                world_ents[visgroup].extend(brushes)

    for ent in vmf.by_class['info_overlay']:
        visgroups = list(map(visgroup_names.__getitem__, ent.visgroup_ids))
        if len(visgroups) > 1:
            raise ValueError('Template "{}" has overlay with two '
                             'visgroups! ({})'.format(loc.id,
                                                      ', '.join(visgroups)))
        # No visgroup = ''
        overlay_ents[visgroups[0] if visgroups else ''].append(ent)

    for ent in vmf.by_class['bee2_template_colorpicker']:
        # Parse the colorpicker data.
        try:
            priority = Decimal(ent['priority'])
        except ArithmeticError:
            LOGGER.warning(
                'Bad priority for colorpicker in "{}" template!',
                loc.id,
            )
            priority = Decimal(0)

        try:
            remove_after = AfterPickMode(ent['remove_brush', '0'])
        except ValueError:
            LOGGER.warning(
                'Bad remove-brush mode for colorpicker in "{}" template!',
                loc.id,
            )
            remove_after = AfterPickMode.NONE

        color_pickers.append(
            ColorPicker(
                priority=priority,
                name=ent['targetname'],
                visgroups=set(map(visgroup_names.__getitem__,
                                  ent.visgroup_ids)),
                offset=Vec.from_str(ent['origin']),
                normal=Vec(x=1) @ Angle.from_str(ent['angles']),
                sides=ent['faces'].split(' '),
                grid_snap=srctools.conv_bool(ent['grid_snap']),
                after=remove_after,
                use_pattern=srctools.conv_bool(ent['use_pattern']),
                force_tex_white=ent['tex_white'],
                force_tex_black=ent['tex_black'],
            ))

    for ent in vmf.by_class['bee2_template_voxelsetter']:
        tile_type = TILE_SETTER_SKINS[srctools.conv_int(ent['skin'])]

        voxel_setters.append(
            VoxelSetter(
                offset=Vec.from_str(ent['origin']),
                normal=Vec(z=1) @ Angle.from_str(ent['angles']),
                visgroups=set(map(visgroup_names.__getitem__,
                                  ent.visgroup_ids)),
                tile_type=tile_type,
                force=srctools.conv_bool(ent['force']),
            ))

    for ent in vmf.by_class['bee2_template_tilesetter']:
        tile_type = TILE_SETTER_SKINS[srctools.conv_int(ent['skin'])]
        color = ent['color']
        if color == 'tile':
            try:
                color = tile_type.color
            except ValueError:
                # Non-tile types.
                color = None
        elif color == 'invert':
            color = 'INVERT'
        elif color == 'match':
            color = None
        elif color != 'copy':
            raise ValueError('Invalid TileSetter color '
                             '"{}" for "{}"'.format(color, loc.id))

        tile_setters.append(
            TileSetter(
                offset=Vec.from_str(ent['origin']),
                normal=Vec(z=1) @ Angle.from_str(ent['angles']),
                visgroups=set(map(visgroup_names.__getitem__,
                                  ent.visgroup_ids)),
                color=color,
                tile_type=tile_type,
                picker_name=ent['color_picker'],
                force=srctools.conv_bool(ent['force']),
            ))

    coll: list[CollisionDef] = []
    for ent in vmf.by_class['bee2_collision_bbox']:
        visgroups = set(map(visgroup_names.__getitem__, ent.visgroup_ids))
        for bbox in collisions.BBox.from_ent(ent):
            coll.append(CollisionDef(bbox, visgroups))

    return Template(
        loc.id,
        set(visgroup_names.values()),
        world_ents,
        detail_ents,
        overlay_ents,
        conf['skip_faces'].split(),
        conf['realign_faces'].split(),
        conf['overlay_faces'].split(),
        conf['vertical_faces'].split(),
        color_pickers,
        tile_setters,
        voxel_setters,
        coll,
    )
コード例 #16
0
ファイル: unify_fgd.py プロジェクト: braem/HammerAddons
def action_import(
    dbase: Path,
    engine_tag: str,
    fgd_paths: List[Path],
) -> None:
    """Import an FGD file, adding differences to the unified files."""
    new_fgd = FGD()
    print('Using tag "{}"'.format(engine_tag))

    expanded = expand_tags(frozenset({engine_tag}))

    print('Reading {} FGDs:'.format(len(fgd_paths)))
    for path in fgd_paths:
        print(path)
        with RawFileSystem(str(path.parent)) as fsys:
            new_fgd.parse_file(fsys, fsys[path.name], eval_bases=False)

    print('\nImporting {} entiti{}...'.format(
        len(new_fgd),
        "y" if len(new_fgd) == 1 else "ies",
    ))
    for new_ent in new_fgd:
        path = dbase / ent_path(new_ent)
        path.parent.mkdir(parents=True, exist_ok=True)

        if path.exists():
            old_fgd = FGD()
            with RawFileSystem(str(path.parent)) as fsys:
                old_fgd.parse_file(fsys, fsys[path.name], eval_bases=False)
            try:
                ent = old_fgd[new_ent.classname]
            except KeyError:
                raise ValueError("Classname not present in FGD!")
            # Now merge the two.

            if new_ent.desc not in ent.desc:
                # Temporary, append it.
                ent.desc += '|||' + new_ent.desc

            # Merge helpers. We just combine overall...
            for new_base in new_ent.bases:
                if new_base not in ent.bases:
                    ent.bases.append(new_base)

            for helper in new_ent.helpers:
                # Sorta ew, quadratic search. But helper sizes shouldn't
                # get too big.
                if helper not in ent.helpers:
                    ent.helpers.append(helper)

            for cat in ('keyvalues', 'inputs', 'outputs'):
                cur_map = getattr(
                    ent, cat
                )  # type: Dict[str, Dict[FrozenSet[str], Union[KeyValues, IODef]]]
                new_map = getattr(new_ent, cat)
                new_names = set()
                for name, tag_map in new_map.items():
                    new_names.add(name)
                    try:
                        orig_tag_map = cur_map[name]
                    except KeyError:
                        # Not present in the old file.
                        cur_map[name] = {
                            add_tag(tag, engine_tag): value
                            for tag, value in tag_map.items()
                        }
                        continue
                    # Otherwise merge, if unequal add the new ones.
                    # TODO: Handle tags in "new" files.
                    for tag, new_value in tag_map.items():
                        for old_tag, old_value in orig_tag_map.items():
                            if old_value == new_value:
                                if tag:
                                    # Already present, modify this tag.
                                    del orig_tag_map[old_tag]
                                    orig_tag_map[add_tag(
                                        old_tag, engine_tag)] = new_value
                                # else: Blank tag, keep blank.
                                break
                        else:
                            # Otherwise, we need to add this.
                            orig_tag_map[add_tag(tag, engine_tag)] = new_value

                # Make sure removed items don't apply to the new tag.
                for name, tag_map in cur_map.items():
                    if name not in new_names:
                        cur_map[name] = {
                            add_tag(tag, '!' + engine_tag): value
                            for tag, value in tag_map.items()
                        }

        else:
            # No existing one, just set appliesto.
            ent = new_ent

        applies_to = get_appliesto(ent)
        if not match_tags(expanded, applies_to):
            applies_to.append(engine_tag)
        ent.helpers[:] = [
            helper for helper in ent.helpers
            if not isinstance(helper, HelperExtAppliesTo)
        ]

        with open(path, 'w') as f:
            ent.export(f)

        print('.', end='', flush=True)
    print()
コード例 #17
0
def find_packages(pak_dir: str) -> None:
    """Search a folder for packages, recursing if necessary."""
    found_pak = False
    for name in os.listdir(pak_dir):  # Both files and dirs
        name = os.path.join(pak_dir, name)
        folded = name.casefold()
        if folded.endswith('.vpk') and not folded.endswith('_dir.vpk'):
            # _000.vpk files, useless without the directory
            continue

        if os.path.isdir(name):
            filesys = RawFileSystem(name)
        else:
            ext = os.path.splitext(folded)[1]
            if ext in ('.bee_pack', '.zip'):
                filesys = ZipFileSystem(name)
            elif ext == '.vpk':
                filesys = VPKFileSystem(name)
            else:
                LOGGER.info('Extra file: {}', name)
                continue

        LOGGER.debug('Reading package "' + name + '"')

        # Gain a persistent hold on the filesystem's handle.
        # That means we don't need to reopen the zip files constantly.
        filesys.open_ref()

        # Valid packages must have an info.txt file!
        try:
            info = filesys.read_prop('info.txt')
        except FileNotFoundError:
            # Close the ref we've gotten, since it's not in the dict
            # it won't be done by load_packages().
            filesys.close_ref()

            if os.path.isdir(name):
                # This isn't a package, so check the subfolders too...
                LOGGER.debug('Checking subdir "{}" for packages...', name)
                find_packages(name)
            else:
                LOGGER.warning('ERROR: package "{}" has no info.txt!', name)
            # Don't continue to parse this "package"
            continue
        try:
            pak_id = info['ID']
        except IndexError:
            # Close the ref we've gotten, since it's not in the dict
            # it won't be done by load_packages().
            filesys.close_ref()
            raise

        if pak_id in packages:
            raise ValueError(
                f'Duplicate package with id "{pak_id}"!\n'
                'If you just updated the mod, delete any old files in packages/.'
            ) from None

        PACKAGE_SYS[pak_id] = filesys

        packages[pak_id] = Package(
            pak_id,
            filesys,
            info,
            name,
        )
        found_pak = True

    if not found_pak:
        LOGGER.info('No packages in folder {}!', pak_dir)