예제 #1
0
def prepare_nmm(context):
    sln = VisualStudio2015Solution()
    sln.LoadFromFile(os.path.join(build_path, "Nexus-Mod-Manager", 'NexusClient.sln'))
    ncc_csproj = os.path.join(build_path, 'NexusClientCLI', 'NexusClientCLI', 'NexusClientCLI.csproj')
    if not os.path.isfile(ncc_csproj):
        log.critical('NOT FOUND: %s', ncc_csproj)
    else:
        log.info('FOUND: %s', ncc_csproj)
    changed = False
    projfile = VS2015Project()
    projfile.LoadFromFile(ncc_csproj)
    projguid = projfile.PropertyGroups[0].element.find('ProjectGuid').text
    log.info('ProjectGuid = %s', projguid)
    if "NexusClientCli" not in sln.projectsByName:
        newproj = sln.AddProject('NexusClientCli', ProjectType.CSHARP_PROJECT, ncc_csproj, guid=projguid)
        log.info('Adding project %s (%s) to NexusClient.sln', newproj.name, newproj.guid)
        changed = True
    else:
        newproj = sln.projectsByName['NexusClientCli']
        log.info('Project %s (%s) already exists in NexusClient.sln', newproj.name, newproj.guid)
        if newproj.projectfile != ncc_csproj:
            log.info('Changing projectfile: %s -> %s', newproj.projectfile, ncc_csproj)
            newproj.projectfile = ncc_csproj
            changed = True
    if changed:
        log.info('Writing NexusClientCli.sln')
        sln.SaveToFile(os.path.relpath(os.path.join(build_path, "Nexus-Mod-Manager", 'NexusClientCli.sln'))) # So we dont get conflicts when pulling
        return True
예제 #2
0
def dlPackagesIn(pkgdefs, superrepo="build"):
    os_utils.ensureDirExists("download")
    for destination, retrievalData in pkgdefs.items():
        rebuild = args.rebuild_all or destination in args.rebuild
        destination = os.path.join(superrepo, destination)
        dlType = retrievalData["type"]
        if dlType == "git":
            remote = retrievalData.get("remote", "origin")
            branch = retrievalData.get("branch", "master")
            commit = retrievalData.get("commit")
            submodules = retrievalData.get("submodules", False)
            submodules_remote = retrievalData.get("submodules_remote", False)
            tag = retrievalData.get("tag")
            if "uri" not in retrievalData:
                log.critical("uri not in def for %s", destination)
            git = GitRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True)
            with log.info("Checking for updates to %s...", destination):
                if rebuild or not os.path.isdir(destination):
                    if rebuild or git.CheckForUpdates(remote, branch, tag=tag, commit=commit):
                        log.info("Updates detecting, pulling...")
                        git.Pull(remote, branch, tag=tag, commit=commit, cleanup=True)
                    if submodules:
                        if rebuild:
                            with os_utils.Chdir(destination):
                                os_utils.cmd(
                                    ["git", "submodule", "foreach", "--recursive", "git clean -dfx"],
                                    echo=True,
                                    show_output=True,
                                    critical=True,
                                )
                        git.UpdateSubmodules(submodules_remote)
        elif dlType == "hg":
            remote = retrievalData.get("remote", "default")
            branch = retrievalData.get("branch", "master")
            commit = retrievalData.get("commit", retrievalData.get("tag"))
            if "uri" not in retrievalData:
                log.critical("uri not in def for %s", destination)
            hg = HgRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True)
            with log.info("Checking for updates to %s...", destination):
                if rebuild or not os.path.isdir(destination):
                    if rebuild or hg.CheckForUpdates(remote, branch):
                        log.info("Updates detecting, pulling...")
                        hg.Pull(remote, branch, commit, cleanup=True)
        elif dlType == "http":
            url = retrievalData["url"]
            ext = retrievalData.get("ext", url[url.rfind(".") :])
            filename = os.path.join(
                script_dir, "download", retrievalData.get("filename", hashlib.md5(url).hexdigest() + ext)
            )
            if not os.path.isfile(filename):
                with log.info("Downloading %s...", url):
                    http.DownloadFile(url, filename)
            if (rebuild or not os.path.isdir(destination)) and not retrievalData.get("download-only", False):
                if rebuild:
                    os_utils.safe_rmtree(destination)
                os_utils.ensureDirExists(destination)
                with os_utils.Chdir(destination):
                    os_utils.decompressFile(filename)
예제 #3
0
파일: lobbytool.py 프로젝트: N3X15/vgws
def _cmd_set(args=None):
    pooldir = os.path.join('lobbyscreens', args.poolID)

    data = {}
    data = None
    datafile = os.path.join(pooldir, '__POOL__.yml')
    if os.path.isfile(datafile):
        with open(datafile, 'r') as f:
            data = yaml.safe_load(f)
            readfrom = f.name
    if data is None:
        log.critical('Could not find __POOL__.yml')
        sys.exit(1)
    pool = Pool()
    pool.ID = args.poolID
    pool.deserialize(data)
    poolfilesdir = os.path.join(pooldir, 'files')

    if args.animID in pool.animationsByID.keys():
        anim = pool.animationsByID[args.animID]
        if args.set_filename is not None:
            anim.filename = args.set_filename
    else:
        anim = Animation()
        anim.ID = args.animID
        anim.filename = args.set_filename or f'{args.animID}.gif'
        pool.animationsByID[args.animID] = anim
    if args.override_playlist is not None:
        anim.overridePlaylist = args.override_playlist
    for script in args.add_scripts:
        anim.scripts += [script]
    for script in args.rm_scripts:
        anim.scripts.remove(script)
    if args.clear_scripts:
        anim.scripts = []

    try:
        with open('__POOL__.tmp.yml', 'w') as f:
            yaml.dump(pool.serialize(), f, default_flow_style=False)
    finally:
        os.remove(readfrom)
        os_utils.single_copy('__POOL__.tmp.yml', '__POOL__.yml')
        os.remove('__POOL__.tmp.yml')
예제 #4
0
def main():
    argp = argparse.ArgumentParser(description='Generation script for ss13-vox.')
    #argp.add_argument('--codebase', choices=['vg', 'tg'], default='vg', help='Which codebase to generate for. (Affects output code and paths.)')
    argp.add_argument('--threads', '-j', type=int, default=multiprocessing.cpu_count(), help='How many threads to use in ffmpeg.')
    #argp.add_argument('phrasefiles', nargs='+', type=str, help='A list of phrase files.')
    args = argp.parse_args()

    if not os.path.isdir('tmp'):
        os.makedirs('tmp')

    DIST_DIR = 'dist'
    PREEX_SOUND = 'sound/vox/{ID}.wav'
    NUVOX_SOUND = 'sound/vox_{SEX}/{ID}.wav'
    voices = []
    vox_sounds_path = ''
    templatefile = ''

    config = BaseConfig()
    config.cfg = YAMLConfig('config.yml')
    pathcfg = BaseConfig()
    pathcfg.cfg = YAMLConfig('paths.yml').cfg[config.get('codebase', 'vg')]

    PREEX_SOUND = pathcfg.get('sound.old-vox', PREEX_SOUND)
    NUVOX_SOUND = pathcfg.get('sound.new-vox', NUVOX_SOUND)

    voice_assignments = {}
    all_voices = []
    default_voice: Voice = VoiceRegistry.Get(USSLTFemale.ID)
    sfx_voice: SFXVoice = SFXVoice()
    configured_voices: Dict[str, dict] = {}
    for sexID, voiceid in config.get('voices', {'fem': USSLTFemale.ID}).items():
        voice = VoiceRegistry.Get(voiceid)
        assert sexID != ''
        voice.assigned_sex = sexID
        if sexID in ('fem', 'mas'):
            sex = EVoiceSex(sexID)
            assert voice.SEX == sex
            voices += [voice]
        elif sexID == 'default':
            default_voice = voice
        voice_assignments[voice.assigned_sex] = []
        all_voices += [voice]
        configured_voices[sexID] = voice.serialize()

    voice_assignments[sfx_voice.assigned_sex] = []
    all_voices += [sfx_voice]
    configured_voices[sfx_voice.assigned_sex] = sfx_voice.serialize()

    vox_sounds_path = os.path.join(DIST_DIR, pathcfg.get('vox_sounds.path'))
    templatefile = pathcfg.get('vox_sounds.template')
    vox_data_path = os.path.join(DIST_DIR, pathcfg.get('vox_data'))

    DATA_DIR = os.path.join(DIST_DIR, 'data')
    os_utils.ensureDirExists(DATA_DIR)
    with log.info('Parsing lexicon...'):
        lexicon = ParseLexiconText('lexicon.txt')

    phrases=[]
    phrasesByID = {}
    broked = False
    for filename in config.get('phrasefiles', ['announcements.txt', 'voxwords.txt']):
        for p in ParsePhraseListFrom(filename):
            if p.id in phrasesByID:
                duplicated = phrasesByID[p.id]
                log.critical('Duplicate phrase with ID %s in file %s on line %d! First instance in file %s on line %d.', p.id, p.deffile, p.defline, duplicated.deffile, duplicated.defline)
                broked = True
                continue
            phrases += [p]
            phrasesByID[p.id] = p
        if broked:
            sys.exit(1)

    soundsToKeep = set()
    for sound in OTHERSOUNDS:
        soundsToKeep.add(os.path.join(DIST_DIR, sound + '.ogg'))

    phrases.sort(key=lambda x: x.id)

    overrides = config.get('overrides', {})
    for phrase in phrases:
        if phrase.id in overrides:
            phrase.fromOverrides(overrides.get(phrase.id))
        phrase_voices = list(voices)
        # If it has a path, it's being manually specified.
        if '/' in phrase.id:
            phrase.filename = phrase.id + '.ogg'
            phrase_voices = [default_voice]
            soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, phrase.filename)))
        else:
            phrase.filename = ''+NUVOX_SOUND
            if phrase.hasFlag(EPhraseFlags.OLD_VOX):
                phrase_voices = [default_voice]
                phrase.filename = PREEX_SOUND.format(ID=phrase.id)
                for voice in ['fem', 'mas']:
                    phrase.files[voice] = FileData()
                    phrase.files[voice].filename = phrase.filename
                    phrase.files[voice].checksum = ''
                    phrase.files[voice].duration = phrase.override_duration or -1
                    phrase.files[voice].size     = phrase.override_size or -1
                    #voice_assignments[voice].append(phrase)
                soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, phrase.filename)))
                continue

        if phrase.hasFlag(EPhraseFlags.SFX):
            phrase_voices = [sfx_voice]

        if not phrase.hasFlag(EPhraseFlags.OLD_VOX):
            log.info('%s - %r', phrase.id, [x.assigned_sex for x in phrase_voices])
            for v in phrase_voices:
                voice_assignments[v.assigned_sex].append(phrase)
                #phrase.files[v.assigned_sex] = fd
    #sys.exit(1)
    for voice in all_voices:
        print(voice.ID, voice.assigned_sex)
        DumpLexiconScript(voice.FESTIVAL_VOICE_ID, lexicon.values(), 'tmp/VOXdict.lisp')
        for phrase in voice_assignments[voice.assigned_sex]:
            GenerateForWord(phrase, voice, soundsToKeep, args)
            sexes=set()
            for vk, fd in phrase.files.items():
                soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, fd.filename)))

    jenv = jinja2.Environment(loader=jinja2.FileSystemLoader(['./templates']))
    jenv.add_extension('jinja2.ext.do') # {% do ... %}
    templ = jenv.get_template(templatefile)
    with log.info('Writing sound list to %s...', vox_sounds_path):
        os_utils.ensureDirExists(os.path.dirname(vox_sounds_path))
        assetcache={}
        sound2id={}
        with open(vox_sounds_path, 'w') as f:
            sexes = {
                'fem': [],
                'mas': [],
                'default': [],
                #'sfx': [],
            }
            for p in phrases:
                for k in p.files.keys():
                    assetcache[p.getAssetKey(k)] = p.files[k].filename
                    sound2id[p.files[k].filename] = p.getAssetKey(k)
                if p.hasFlag(EPhraseFlags.NOT_VOX):
                    continue
                for k in p.files.keys():
                    if p.hasFlag(EPhraseFlags.SFX):
                        for sid in ('fem', 'mas'):
                            if p not in sexes[sid]:
                                sexes[sid].append(p)
                    else:
                        sexes[k].append(p)
            f.write(templ.render(
                InitClass=InitClass,
                SEXES=sexes,
                ASSETCACHE=assetcache,
                SOUND2ID=sound2id,
                PHRASES=[p for p in phrases if not p.hasFlag(EPhraseFlags.NOT_VOX)]))
    soundsToKeep.add(os.path.abspath(vox_sounds_path))

    os_utils.ensureDirExists(DATA_DIR)
    with open(os.path.join(DATA_DIR, 'vox_data.json'), 'w') as f:
        data = {
            'version': 2,
            'compiled': time.time(),
            'voices': configured_voices,
            'words': collections.OrderedDict({w.id: w.serialize() for w in phrases if '/' not in w.id}),
        }
        json.dump(data, f, indent=2)
    soundsToKeep.add(os.path.abspath(os.path.join(DATA_DIR, 'vox_data.json')))

    with open('tmp/written.txt', 'w') as f:
        for filename in sorted(soundsToKeep):
            f.write(f'{filename}\n')

    for root, _, files in os.walk(DIST_DIR, topdown=False):
        for name in files:
            filename = os.path.abspath(os.path.join(root, name))
            if filename not in soundsToKeep:
                log.warning('Removing {0} (no longer defined)'.format(filename))
                os.remove(filename)
예제 #5
0
    def build(self):
        gitmodules = {}
        with open(self.gitmodulesfile, 'r') as tomlf:
            smid = None
            for line in tomlf:
                line = line.strip()
                m = REG_SUBMODULE_SECTION.match(line)
                if m is not None:
                    smid = m.group(1).strip()
                    gitmodules[smid] = {}
                if '=' in line:
                    k, v = line.split('=', 2)
                    gitmodules[smid][k.strip()] = v.strip()
        gitconfig = {}
        with open(self.gitconfigfile, 'r') as tomlf:
            smid = None
            for line in tomlf:
                line = line.strip()
                #print(line)
                m = REG_SUBMODULE_SECTION.match(line)
                if m is not None:
                    smid = m.group(1).strip()
                    gitconfig[smid] = {}
                if smid is not None and '=' in line:
                    #print(line)
                    k, v = line.split('=', 2)
                    gitconfig[smid][k.strip()] = v.strip()
        '''
        with open(self.gitmodulesfile + '.yml', 'w') as f:
            yaml.dump(gitmodules, f, default_flow_style=False)
        with open('.gitconfig.yml', 'w') as f:
            yaml.dump(gitconfig, f, default_flow_style=False)
        '''
        for repoID, repoconf in gitconfig.items():
            if repoID not in gitmodules.keys():
                with log.warn('Submodule %s is present in .git/config but not .gitmodules!', repoID):
                    pathspec = repoconf.get('path', repoID)
                    path = os.path.abspath(pathspec)
                    tag = repoconf.get('tag', None)
                    branch = repoconf.get('branch', 'HEAD' if tag is None else None)
                    log.info('path = %s', pathspec)
        for repoID, repoconf in gitmodules.items():
            if repoID not in gitconfig.keys():
                with log.warn('Submodule %s is present in .gitmodules but not .git/config!', repoID):
                    pathspec = repoconf.get('path', repoID)
                    path = os.path.abspath(pathspec)
                    tag = repoconf.get('tag', None)
                    branch = repoconf.get('branch', 'HEAD' if tag is None else None)
                    opts = []
                    if branch != 'HEAD':
                        opts += ['-b', branch]
                    log.info('path = %s', pathspec)
                    if os.path.isdir(path):
                        log.warn('Removing existing %s directory.', path)
                        shutil.rmtree(path)
                    cmd = ['git', 'submodule', 'add']+opts+['-f', '--name', repoID, '--', repoconf.get('url'), pathspec]
                    os_utils.cmd(cmd, critical=True, echo=self.should_echo_commands(), show_output=True)
                    #log.error('Would exec: %s', ' '.join(cmd))

        for repoID, repoconf in gitmodules.items():
            with log.info('Checking %s...', repoID):
                pathspec = repoconf.get('path', repoID)
                path = os.path.abspath(pathspec)
                tag = repoconf.get('tag', None)
                branch = repoconf.get('branch', 'HEAD' if tag is None else None)
                if os.path.isdir(path):
                    desired_commit = ''
                    cmdline = ['git', 'ls-tree', Git.GetBranch(), pathspec]
                    stdout, stderr = os_utils.cmd_output(cmdline, echo=self.should_echo_commands(), critical=True)
                    skip_this = False
                    for line in (stdout+stderr).decode('utf-8').splitlines():
                        if line.startswith('error:') or line.startswith('fatal:'):
                            log.critical(line)
                            raise error.SubprocessThrewError(cmdline, line)
                        line,repoID = line.strip().split('\t')
                        _, _, desired_commit = line.split(' ')
                    if not skip_this:
                        with os_utils.Chdir(path, quiet=not self.should_echo_commands()):
                            cur_commit = Git.GetCommit(short=False, quiet=not self.should_echo_commands())
                            #log.info(desired_commit)
                            #log.info(cur_commit)
                            if cur_commit == desired_commit:
                                log.info('Commits are synced, skipping.')
                                continue

                repo = GitRepository(path, origin_uri=repoconf['url'], submodule=True)
                if repo.CheckForUpdates(branch=branch, quiet=False):
                    if os.path.isdir(path):
                        os_utils.cmd(['git', 'submodule', 'sync', '--', pathspec], critical=True, echo=self.should_echo_commands(), show_output=True)
                    os_utils.cmd(['git', 'submodule', 'update', '--init', '--recursive', pathspec], critical=True, echo=self.should_echo_commands(), show_output=True)
예제 #6
0
with log.info("Building NCC..."):
    if not args.rebuild_all and filesAllExist(
        [os.path.join(script_dir, "install", "bin", "ncc", "NexusClientCLI.exe")]
    ):
        log.info("Skipping; All needed files built.")
    else:
        # We patch it LIVE now.
        # with os_utils.Chdir(ncc_dir):
        #    os_utils.single_copy(os.path.join(ncc_dir,'NexusClient.sln'), nmm_dir, ignore_mtime=True)
        with os_utils.Chdir(nmm_dir):
            # And this is why I use buildtools everywhere: Because it has shit like this.
            sln = VisualStudio2015Solution()
            sln.LoadFromFile("NexusClient.sln")
            ncc_csproj = os.path.relpath(os.path.join(ncc_dir, "NexusClientCLI", "NexusClientCLI.csproj"))
            if not os.path.isfile(ncc_csproj):
                log.critical("NOT FOUND: %s", ncc_csproj)
            else:
                log.info("FOUND: %s", ncc_csproj)
            changed = False
            projfile = VS2015Project()
            projfile.LoadFromFile(ncc_csproj)
            projguid = projfile.PropertyGroups[0].element.find("ProjectGuid").text
            log.info("ProjectGuid = %s", projguid)
            if "NexusClientCli" not in sln.projectsByName:
                newproj = sln.AddProject("NexusClientCli", ProjectType.CSHARP_PROJECT, ncc_csproj, guid=projguid)
                log.info("Adding project %s (%s) to NexusClient.sln", newproj.name, newproj.guid)
                changed = True
            else:
                newproj = sln.projectsByName["NexusClientCli"]
                log.info("Project %s (%s) already exists in NexusClient.sln", newproj.name, newproj.guid)
                if newproj.projectfile != ncc_csproj: