Esempio n. 1
0
def load_fgd() -> FGD:
    """Extract the local copy of FGD data.

    This allows the analysis to not depend on local files.
    """

    from lzma import LZMAFile
    with LZMAFile(open_binary(srctools, 'fgd.lzma')) as f:
        return FGD.unserialise(f)
Esempio n. 2
0
def load_fgd() -> FGD:
    """Extract the local copy of FGD data.

    This allows the analysis to not depend on local files.
    """

    from lzma import LZMAFile
    with LZMAFile(open_binary(srctools, 'fgd.lzma')) as f:
        return FGD.unserialise(f)
Esempio n. 3
0
def load_fgd() -> FGD:
    """Extract the local copy of FGD data.

    This allows the analysis to not depend on local files.
    """
    import warnings
    warnings.warn(
        'Use FGD.engine_dbase() instead, '
        'this has been moved there.',
        DeprecationWarning,
        stacklevel=2,
    )
    return FGD.engine_dbase()
Esempio n. 4
0
def action_import(
    dbase: Path,
    engine_tag: str,
    fgd_paths: List[Path],
) -> None:
    """Import an FGD file, adding differences to the unified files."""
    new_fgd = FGD()
    print('Using tag "{}"'.format(engine_tag))

    expanded = expand_tags(frozenset({engine_tag}))

    print('Reading {} FGDs:'.format(len(fgd_paths)))
    for path in fgd_paths:
        print(path)
        with RawFileSystem(str(path.parent)) as fsys:
            new_fgd.parse_file(fsys, fsys[path.name], eval_bases=False)

    print('\nImporting {} entiti{}...'.format(
        len(new_fgd),
        "y" if len(new_fgd) == 1 else "ies",
    ))
    for new_ent in new_fgd:
        path = dbase / ent_path(new_ent)
        path.parent.mkdir(parents=True, exist_ok=True)

        if path.exists():
            old_fgd = FGD()
            with RawFileSystem(str(path.parent)) as fsys:
                old_fgd.parse_file(fsys, fsys[path.name], eval_bases=False)
            try:
                ent = old_fgd[new_ent.classname]
            except KeyError:
                raise ValueError("Classname not present in FGD!")
            # Now merge the two.

            if new_ent.desc not in ent.desc:
                # Temporary, append it.
                ent.desc += '|||' + new_ent.desc

            # Merge helpers. We just combine overall...
            for new_base in new_ent.bases:
                if new_base not in ent.bases:
                    ent.bases.append(new_base)

            for helper in new_ent.helpers:
                # Sorta ew, quadratic search. But helper sizes shouldn't
                # get too big.
                if helper not in ent.helpers:
                    ent.helpers.append(helper)

            for cat in ('keyvalues', 'inputs', 'outputs'):
                cur_map = getattr(
                    ent, cat
                )  # type: Dict[str, Dict[FrozenSet[str], Union[KeyValues, IODef]]]
                new_map = getattr(new_ent, cat)
                new_names = set()
                for name, tag_map in new_map.items():
                    new_names.add(name)
                    try:
                        orig_tag_map = cur_map[name]
                    except KeyError:
                        # Not present in the old file.
                        cur_map[name] = {
                            add_tag(tag, engine_tag): value
                            for tag, value in tag_map.items()
                        }
                        continue
                    # Otherwise merge, if unequal add the new ones.
                    # TODO: Handle tags in "new" files.
                    for tag, new_value in tag_map.items():
                        for old_tag, old_value in orig_tag_map.items():
                            if old_value == new_value:
                                if tag:
                                    # Already present, modify this tag.
                                    del orig_tag_map[old_tag]
                                    orig_tag_map[add_tag(
                                        old_tag, engine_tag)] = new_value
                                # else: Blank tag, keep blank.
                                break
                        else:
                            # Otherwise, we need to add this.
                            orig_tag_map[add_tag(tag, engine_tag)] = new_value

                # Make sure removed items don't apply to the new tag.
                for name, tag_map in cur_map.items():
                    if name not in new_names:
                        cur_map[name] = {
                            add_tag(tag, '!' + engine_tag): value
                            for tag, value in tag_map.items()
                        }

        else:
            # No existing one, just set appliesto.
            ent = new_ent

        applies_to = get_appliesto(ent)
        if not match_tags(expanded, applies_to):
            applies_to.append(engine_tag)
        ent.helpers[:] = [
            helper for helper in ent.helpers
            if not isinstance(helper, HelperExtAppliesTo)
        ]

        with open(path, 'w') as f:
            ent.export(f)

        print('.', end='', flush=True)
    print()
Esempio n. 5
0
def load_database(dbase: Path,
                  extra_loc: Path = None,
                  fgd_vis: bool = False) -> Tuple[FGD, EntityDef]:
    """Load the entire database from disk. This returns the FGD, plus the CBaseEntity definition."""
    print(f'Loading database {dbase}:')
    fgd = FGD()

    fgd.map_size_min = -16384
    fgd.map_size_max = 16384

    # Classname -> filename
    ent_source: Dict[str, str] = {}

    with RawFileSystem(str(dbase)) as fsys:
        for file in dbase.rglob("*.fgd"):
            # Use a temp FGD class, to allow us to verify no overwrites.
            file_fgd = FGD()
            rel_loc = str(file.relative_to(dbase))
            file_fgd.parse_file(
                fsys,
                fsys[rel_loc],
                eval_bases=False,
                encoding='utf8',
            )
            for clsname, ent in file_fgd.entities.items():
                if clsname in fgd.entities:
                    raise ValueError(
                        f'Duplicate "{clsname}" class '
                        f'in {rel_loc} and {ent_source[clsname]}!')
                fgd.entities[clsname] = ent
                ent_source[clsname] = rel_loc

            if fgd_vis:
                for parent, visgroup in file_fgd.auto_visgroups.items():
                    try:
                        existing_group = fgd.auto_visgroups[parent]
                    except KeyError:
                        fgd.auto_visgroups[parent] = visgroup
                    else:  # Need to merge
                        existing_group.ents.update(visgroup.ents)

            fgd.mat_exclusions.update(file_fgd.mat_exclusions)
            for tags, mat_list in file_fgd.tagged_mat_exclusions.items():
                fgd.tagged_mat_exclusions[tags] |= mat_list

            print('.', end='', flush=True)

    load_visgroup_conf(fgd, dbase)

    if extra_loc is not None:
        print('\nLoading extra file:')
        if extra_loc.is_file():
            # One file.
            with RawFileSystem(str(extra_loc.parent)) as fsys:
                fgd.parse_file(
                    fsys,
                    fsys[extra_loc.name],
                    eval_bases=False,
                )
        else:
            print('\nLoading extra files:')
            with RawFileSystem(str(extra_loc)) as fsys:
                for file in extra_loc.rglob("*.fgd"):
                    fgd.parse_file(
                        fsys,
                        fsys[str(file.relative_to(extra_loc))],
                        eval_bases=False,
                    )
                    print('.', end='', flush=True)
    print()

    fgd.apply_bases()
    print('\nDone!')

    print('Entities without visgroups:')
    vis_ents = {
        name.casefold()
        for group in fgd.auto_visgroups.values() for name in group.ents
    }
    vis_count = ent_count = 0
    for ent in fgd:
        # Base ents, worldspawn, or engine-only ents don't need visgroups.
        if ent.type is EntityTypes.BASE or ent.classname == 'worldspawn':
            continue
        applies_to = get_appliesto(ent)
        if '+ENGINE' in applies_to or 'ENGINE' in applies_to:
            continue
        ent_count += 1
        if ent.classname.casefold() not in vis_ents:
            print(ent.classname, end=', ')
        else:
            vis_count += 1
    print(
        f'\nVisgroup count: {vis_count}/{ent_count} ({vis_count*100/ent_count:.2f}%) done!'
    )

    try:
        base_entity_def = fgd.entities.pop(BASE_ENTITY.casefold())
        base_entity_def.type = EntityTypes.BASE
    except KeyError:
        base_entity_def = EntityDef(EntityTypes.BASE)
    return fgd, base_entity_def
Esempio n. 6
0
def main(argv: List[str]) -> None:

    parser = argparse.ArgumentParser(
        description="Modifies the BSP file, allowing additional entities "
        "and bugfixes.", )

    parser.add_argument("--nopack",
                        dest="allow_pack",
                        action="store_false",
                        help="Prevent packing of files found in the map.")
    parser.add_argument(
        "--propcombine",
        action="store_true",
        help="Allow merging static props together.",
    )
    parser.add_argument(
        "--showgroups",
        action="store_true",
        help="Show propcombined props, by setting their tint to random groups",
    )
    parser.add_argument(
        "--dumpgroups",
        action="store_true",
        help="Write all props without propcombine groups to a new VMF.",
    )

    parser.add_argument(
        "map",
        help="The path to the BSP file.",
    )

    args = parser.parse_args(argv)

    # The path is the last argument to the compiler.
    # Hammer adds wrong slashes sometimes, so fix that.
    # Also if it's the VMF file, make it the BSP.
    path = Path(args.map).with_suffix('.bsp')

    # Open and start writing to the map's log file.
    handler = FileHandler(path.with_suffix('.log'))
    handler.setFormatter(
        Formatter(
            # One letter for level name
            '[{levelname}] {module}.{funcName}(): {message}',
            style='{',
        ))
    LOGGER.addHandler(handler)

    LOGGER.info('Srctools postcompiler hook started at {}!',
                datetime.datetime.now().isoformat())
    LOGGER.info("Map path is {}", path)

    conf, game_info, fsys, pack_blacklist, plugin = config.parse(path)

    fsys.open_ref()

    packlist = PackList(fsys)

    LOGGER.info('Gameinfo: {}', game_info.path)
    LOGGER.info(
        'Search paths: \n{}',
        '\n'.join([sys.path for sys, prefix in fsys.systems]),
    )

    fgd = FGD.engine_dbase()

    LOGGER.info('Loading soundscripts...')
    packlist.load_soundscript_manifest(
        conf.path.with_name('srctools_sndscript_data.vdf'))
    LOGGER.info('Done! ({} sounds)', len(packlist.soundscripts))

    LOGGER.info('Reading BSP...')
    bsp_file = BSP(path)

    LOGGER.info('Reading entities...')
    vmf = bsp_file.read_ent_data()
    LOGGER.info('Done!')

    # Mount the existing packfile, so the cubemap files are recognised.
    LOGGER.info('Mounting BSP packfile...')
    zipfile = ZipFile(BytesIO(bsp_file.get_lump(BSP_LUMPS.PAKFILE)))
    fsys.add_sys(ZipFileSystem('<BSP pakfile>', zipfile))

    studiomdl_path = conf.get(str, 'studiomdl')
    if studiomdl_path:
        studiomdl_loc = (game_info.root / studiomdl_path).resolve()
        if not studiomdl_loc.exists():
            LOGGER.warning('No studiomdl found at "{}"!', studiomdl_loc)
            studiomdl_loc = None
    else:
        LOGGER.warning('No studiomdl path provided.')
        studiomdl_loc = None

    LOGGER.info('Loading plugins...')
    plugin.load_all()

    use_comma_sep = conf.get(bool, 'use_comma_sep')
    if use_comma_sep is None:
        # Guess the format, by picking whatever the first output uses.
        for ent in vmf.entities:
            for out in ent.outputs:
                use_comma_sep = out.comma_sep
                break
        if use_comma_sep is None:
            LOGGER.warning(
                'No outputs in map, could not determine BSP I/O format!')
            LOGGER.warning('Set "use_comma_sep" in srctools.vdf.')
        use_comma_sep = False

    LOGGER.info('Running transforms...')
    run_transformations(vmf, fsys, packlist, bsp_file, game_info,
                        studiomdl_loc)

    if studiomdl_loc is not None and args.propcombine:
        decomp_cache_loc = conf.get(str, 'propcombine_cache')
        if decomp_cache_loc is not None:
            decomp_cache_loc = (game_info.root / decomp_cache_loc).resolve()
            decomp_cache_loc.mkdir(parents=True, exist_ok=True)
        if conf.get(bool, 'propcombine_crowbar'):
            # argv[0] is the location of our script/exe, which lets us locate
            # Crowbar from there.
            crowbar_loc = Path(sys.argv[0], '../Crowbar.exe').resolve()
        else:
            crowbar_loc = None

        LOGGER.info('Combining props...')
        propcombine.combine(
            bsp_file,
            vmf,
            packlist,
            game_info,
            studiomdl_loc=studiomdl_loc,
            qc_folders=[
                game_info.root / folder for folder in conf.get(
                    Property, 'propcombine_qc_folder').as_array(conv=Path)
            ],
            decomp_cache_loc=decomp_cache_loc,
            crowbar_loc=crowbar_loc,
            auto_range=conf.get(int, 'propcombine_auto_range'),
            min_cluster=conf.get(int, 'propcombine_min_cluster'),
            debug_tint=args.showgroups,
            debug_dump=args.dumpgroups,
        )
        LOGGER.info('Done!')
    else:  # Strip these if they're present.
        for ent in vmf.by_class['comp_propcombine_set']:
            ent.remove()

    bsp_file.lumps[BSP_LUMPS.ENTITIES].data = bsp_file.write_ent_data(
        vmf, use_comma_sep)

    if conf.get(bool, 'auto_pack') and args.allow_pack:
        LOGGER.info('Analysing packable resources...')
        packlist.pack_fgd(vmf, fgd)

        packlist.pack_from_bsp(bsp_file)

        packlist.eval_dependencies()
        if conf.get(bool, 'soundscript_manifest'):
            packlist.write_manifest()

    packlist.pack_into_zip(bsp_file,
                           blacklist=pack_blacklist,
                           ignore_vpk=False)

    with bsp_file.packfile() as pak_zip:
        # List out all the files, but group together files with the same extension.
        ext_for_name: Dict[str, List[str]] = defaultdict(list)
        for file in pak_zip.infolist():
            filename = Path(file.filename)
            if '.' in filename.name:
                stem, ext = filename.name.split('.', 1)
                file_path = str(filename.parent / stem)
            else:
                file_path = file.filename
                ext = ''

            ext_for_name[file_path].append(ext)

        LOGGER.info('Packed files: \n{}'.format('\n'.join([
            (f'{name}.{exts[0]}'
             if len(exts) == 1 else f'{name}.({"/".join(exts)})')
            for name, exts in sorted(ext_for_name.items())
        ])))

    LOGGER.info('Writing BSP...')
    bsp_file.save()

    LOGGER.info("srctools VRAD hook finished!")
Esempio n. 7
0
def load_database(dbase: Path, extra_loc: Path = None) -> FGD:
    """Load the entire database from disk."""
    print('Loading database:')
    fgd = FGD()

    fgd.map_size_min = -16384
    fgd.map_size_max = 16384

    with RawFileSystem(str(dbase)) as fsys:
        for file in dbase.rglob("*.fgd"):
            fgd.parse_file(
                fsys,
                fsys[str(file.relative_to(dbase))],
                eval_bases=False,
            )
            print('.', end='', flush=True)

    if extra_loc is not None:
        print('\nLoading extra file:')
        if extra_loc.is_file():
            # One file.
            with RawFileSystem(str(extra_loc.parent)) as fsys:
                fgd.parse_file(
                    fsys,
                    fsys[extra_loc.name],
                    eval_bases=False,
                )
        else:
            print('\nLoading extra files:')
            with RawFileSystem(str(extra_loc)) as fsys:
                for file in extra_loc.rglob("*.fgd"):
                    fgd.parse_file(
                        fsys,
                        fsys[str(file.relative_to(extra_loc))],
                        eval_bases=False,
                    )
                    print('.', end='', flush=True)
    print()

    fgd.apply_bases()
    print('\nDone!')
    return fgd
Esempio n. 8
0
def main(argv: List[str]) -> None:

    parser = argparse.ArgumentParser(
        description="Modifies the BSP file, allowing additional entities "
        "and bugfixes.", )

    parser.add_argument("--nopack",
                        dest="allow_pack",
                        action="store_false",
                        help="Prevent packing of files found in the map.")
    parser.add_argument(
        "--propcombine",
        action="store_true",
        help="Allow merging static props together.",
    )
    parser.add_argument(
        "--showgroups",
        action="store_true",
        help="Show propcombined props, by setting their tint to 0 255 0",
    )

    parser.add_argument(
        "map",
        help="The path to the BSP file.",
    )

    args = parser.parse_args(argv)

    # The path is the last argument to the compiler.
    # Hammer adds wrong slashes sometimes, so fix that.
    # Also if it's the VMF file, make it the BSP.
    path = Path(args.map).with_suffix('.bsp')

    # Open and start writing to the map's log file.
    handler = FileHandler(path.with_suffix('.log'))
    handler.setFormatter(
        Formatter(
            # One letter for level name
            '[{levelname}] {module}.{funcName}(): {message}',
            style='{',
        ))
    LOGGER.addHandler(handler)

    LOGGER.info('Srctools postcompiler hook started at {}!',
                datetime.datetime.now().isoformat())
    LOGGER.info("Map path is {}", path)

    conf, game_info, fsys, pack_blacklist, plugins = config.parse(path)

    fsys.open_ref()

    packlist = PackList(fsys)

    LOGGER.info('Gameinfo: {}', game_info.path)
    LOGGER.info(
        'Search paths: \n{}',
        '\n'.join([sys.path for sys, prefix in fsys.systems]),
    )

    fgd = FGD.engine_dbase()

    LOGGER.info('Loading soundscripts...')
    packlist.load_soundscript_manifest(
        conf.path.with_name('srctools_sndscript_data.vdf'))
    LOGGER.info('Done! ({} sounds)', len(packlist.soundscripts))

    LOGGER.info('Reading BSP...')
    bsp_file = BSP(path)

    LOGGER.info('Reading entities...')
    vmf = bsp_file.read_ent_data()
    LOGGER.info('Done!')

    studiomdl_path = conf.get(str, 'studiomdl')
    if studiomdl_path:
        studiomdl_loc = (game_info.root / studiomdl_path).resolve()
        if not studiomdl_loc.exists():
            LOGGER.warning('No studiomdl found at "{}"!', studiomdl_loc)
            studiomdl_loc = None
    else:
        LOGGER.warning('No studiomdl path provided.')
        studiomdl_loc = None

    for plugin in plugins:
        plugin.load()

    use_comma_sep = conf.get(bool, 'use_comma_sep')
    if use_comma_sep is None:
        # Guess the format, by picking whatever the first output uses.
        for ent in vmf.entities:
            for out in ent.outputs:
                use_comma_sep = out.comma_sep
                break
        if use_comma_sep is None:
            LOGGER.warning(
                'No outputs in map, could not determine BSP I/O format!')
            LOGGER.warning('Set "use_comma_sep" in srctools.vdf.')
        use_comma_sep = False

    run_transformations(vmf, fsys, packlist, bsp_file, game_info,
                        studiomdl_loc)

    if studiomdl_loc is not None and args.propcombine:
        LOGGER.info('Combining props...')
        propcombine.combine(
            bsp_file,
            vmf,
            packlist,
            game_info,
            studiomdl_loc,
            [
                game_info.root / folder for folder in conf.get(
                    Property, 'propcombine_qc_folder').as_array(conv=Path)
            ],
            conf.get(int, 'propcombine_auto_range'),
            conf.get(int, 'propcombine_min_cluster'),
            debug_tint=args.showgroups,
        )
        LOGGER.info('Done!')
    else:  # Strip these if they're present.
        for ent in vmf.by_class['comp_propcombine_set']:
            ent.remove()

    bsp_file.lumps[BSP_LUMPS.ENTITIES].data = bsp_file.write_ent_data(
        vmf, use_comma_sep)

    if conf.get(bool, 'auto_pack') and args.allow_pack:
        LOGGER.info('Analysing packable resources...')
        packlist.pack_fgd(vmf, fgd)

        packlist.pack_from_bsp(bsp_file)

        packlist.eval_dependencies()

    packlist.pack_into_zip(bsp_file,
                           blacklist=pack_blacklist,
                           ignore_vpk=False)

    with bsp_file.packfile() as pak_zip:
        LOGGER.info('Packed files: \n{}'.format('\n'.join(pak_zip.namelist())))

    LOGGER.info('Writing BSP...')
    bsp_file.save()

    LOGGER.info("srctools VRAD hook finished!")