예제 #1
0
파일: lobbytool.py 프로젝트: N3X15/vgws
def _cmd_create(args):
    data = {
        'id': args.ID,
        'playlist': 'lobby',
        'template': 'main'
    }
    pooldir = os.path.join('lobbyscreens', args.ID)
    os_utils.ensureDirExists(pooldir, noisy=True)
    os_utils.ensureDirExists(os.path.join(pooldir, 'files'), noisy=True)
    written = []
    with open(os.path.join(pooldir, '__POOL__.yml'), 'w') as f:
        yaml.dump(data, f, default_flow_style=False)
        log.info('Wrote %s.', f.name)
        written += [f.name]

    with open('.gitignore', 'w') as f:
        f.write('/parsed.yml\n')
        written += [f.name]

    with os_utils.Chdir(pooldir):
        if not os.path.isdir('.git'):
            os_utils.cmd(['git', 'init'], echo=True, show_output=True, critical=True)
        os_utils.cmd(['git', 'lfs', 'install'], echo=True, show_output=True, critical=True)
        os_utils.cmd(['git', 'lfs', 'track', '*.png', '*.gif', '*.jpg', '*.webm', '*.webp'], echo=True, show_output=True, critical=True)
        os_utils.cmd(['git', 'add', '.gitattributes']+written, echo=True, show_output=True, critical=True)
예제 #2
0
    def shouldBuild(self):
        if args.rebuild_all:
            return True

        if self.name in args.get_snapshot:
            log.info("Getting snapshot of %s (%s pre-build)...", script_dir, self.name)
            for root, _, files in os.walk(script_dir):
                for filename in files:
                    self.preExisting.append(os.path.relpath(os.path.abspath(os.path.join(root, filename)), script_dir))
        if self.name in args.rebuild:
            return True

        if not os.path.isdir(self.build_dir):
            return True
        if not os.path.isfile(self.builder_meta_file):
            return True

        manifest = {}
        with open(self.builder_meta_file, "r") as f:
            data = yaml.load(f)
            manifest = data.get("manifest", {})

        for expectedFile in self.expected:
            if not os.path.isfile(expectedFile):
                return True
            relfilepath = os.path.relpath(expectedFile, script_dir)
            if relfilepath not in manifest:
                return True
            if os.stat(expectedFile).m_time != manifest[relfilepath]:
                return True
        return False
 def parseWord(self, line):
     """
     walkers: noun "w oo" 'k @ z'
     present: verb 'p r e' "z @ n t"
     monument: noun "mo" 'n y u' 'm @ n t'
     """
     global REGEX_SEARCH_STRINGS
     lineChunks = line.split(' ')
     self.name = lineChunks[0].strip(':')
     self.type = lineChunks[1].strip()
     pronunciation = ' '.join(lineChunks[2:])
     for match in REGEX_SEARCH_STRINGS.finditer(pronunciation):
         stressLevel = 0
         if match.group(1) == '"':
             stressLevel = 1
         phonemes = []
         for phoneme in match.group(2).split(' '):
             if phoneme not in self.VALID_PHONEMES:
                 log.error(
                     'INVALID PHONEME "{0}" IN LEX ENTRY "{1}"'.format(
                         phoneme, self.name))
                 sys.exit(1)
             if self.phoneset in self.PHONE_CONVERSIONS:
                 phoneset = self.PHONE_CONVERSIONS[self.phoneset]
                 if phoneme in phoneset:
                     phoneme = phoneset[phoneme]
             phonemes += [phoneme]
         self.syllables += [(phonemes, stressLevel)]
     log.info('Parsed {0} as {1}.'.format(pronunciation,
                                          repr(self.syllables)))
예제 #4
0
 def build(self):
     os_utils.ensureDirExists(os.path.dirname(self.target))
     # BUGFIX: Coffeescript sometimes doesn't want to overwrite shit. - N3X
     if os.path.isfile(self.target):
         os.remove(self.target)
     coffeefile = self.files[0]
     if len(self.files) > 1:
         coffeefile = self.getCoffeeFile()
         if os.path.isfile(coffeefile):
             os.remove(coffeefile)
         with codecs.open(coffeefile, 'w', encoding='utf-8-sig') as outf:
             tq = tqdm(self.files, desc='Concatenating...', leave=False)
             for infilename in tq:
                 #outf.write('\n`// FILE: {}`\n'.format(infilename))
                 with codecs.open(infilename, 'r', encoding='utf-8-sig') as inf:
                     for line in inf:
                         outf.write(line.rstrip() + "\n")
                 #outf.write('\n`//# sourceURL={}\n`\n'.format(infilename))
             tq.close()
     coffeefile_basename, _ = os.path.splitext(os.path.basename(coffeefile))
     os_utils.cmd([self.coffee_executable] + self.coffee_opts + ['-o', os.path.dirname(self.target), coffeefile], critical=True, echo=self.should_echo_commands(), show_output=True)
     coffee_output_file = os.path.join(os.path.dirname(self.target), coffeefile_basename+'.js')
     if coffee_output_file != self.target:
         log.info('Renaming %s to %s...', coffee_output_file, self.target)
         os.rename(coffee_output_file, self.target)
예제 #5
0
파일: lobbytool.py 프로젝트: N3X15/vgws
def _cmd_collect(args=None):
    allpools = {}
    for pooldirname in os.listdir('lobbyscreens'):
        pooldir = os.path.join('lobbyscreens', pooldirname)
        data = None
        datafile = os.path.join(pooldir, '__POOL__.yml')
        if os.path.isfile(datafile):
            with open(datafile, 'r') as f:
                data = yaml.safe_load(f)
        if data is None:
            continue
        pool = Pool()
        pool.ID = pooldirname
        pool.deserialize(data)
        poolfilesdir = os.path.join(pooldir, 'files')
        for imagebasename in os.listdir(poolfilesdir):
            basename, ext = os.path.splitext(imagebasename)
            #print(basename, ext)
            if ext not in ('.jpg', '.png', '.gif', '.svg', '.webm', '.webp', '.mp4', '.ogv'):
                #print('  SKIPPED')
                continue
            anim = Animation()
            anim.ID = basename
            data = None
            filedatapath = os.path.join(poolfilesdir, basename+'.yml')
            if os.path.isfile(filedatapath):
                with open(filedatapath, 'r') as f:
                    data = yaml.safe_load(f)
            filedatapath = os.path.join(poolfilesdir, basename+'.toml')
            if os.path.isfile(filedatapath):
                with open(filedatapath, 'r') as f:
                    data = toml.load(f)
            filedatapath = os.path.join(poolfilesdir, basename+'.json')
            if os.path.isfile(filedatapath):
                with open(filedatapath, 'r') as f:
                    data = json.load(f)
            if data is not None:
                anim.deserialize(data)
            anim.url = imagebasename
            fullpath = os.path.join(poolfilesdir, imagebasename)
            destfile = os.path.join('htdocs', 'img', 'lobby', pool.ID, anim.url)
            os_utils.ensureDirExists(os.path.dirname(destfile), noisy=False)
            os_utils.single_copy(fullpath, destfile, as_file=True, noisy=False)
            pool.add(anim)
        with open(os.path.join(pooldir, 'parsed.yml'), 'w') as f:
            yaml.dump(pool.serialize(suppress_id=True), f, default_flow_style=False)
        log.info('Found pool %r: %d animations', pool.ID, len(pool.animations))
        allpools[pool.ID] = pool.serialize()
    os_utils.ensureDirExists('data')
    with open('data/lobby.json', 'w') as f:
        json.dump(allpools, f, indent=2)
예제 #6
0
    def is_stale(self):
        self._updateCacheInfo()
        if not os.path.isfile(self.target):
            return True

        etag = ''
        if os.path.isfile(self.etagfile):
            with open(self.etagfile, 'r') as f:
                etag = f.read()
        with log.info('Checking for changes to %s...', self.url):
            res = requests.head(self.url,
                                allow_redirects=True,
                                headers={'If-None-Match': etag})
            if res.status_code == 304:
                log.info('304 - Not Modified')
                return False
            if etag == res.headers.get('ETag'):
                return False
            res.raise_for_status()
            with log.info('Response headers:'):
                for k, v in res.headers.items():
                    log.info('%s: %s', k, v)
            log.info('HTTP %d', res.status_code)
            with open(self.etagfile, 'w') as f:
                f.write(res.headers.get('ETag'))
        return True
예제 #7
0
파일: BUILD.py 프로젝트: N3X15/ONI-mods
def mkproject(bm: BuildMaestro, project: str, depends: List[str] = []):
    with log.info('Configuring %s...', project):
        proj_dir = os.path.join('src', project)

        projin = bm.add(
            ReplaceTextTarget(os.path.join(proj_dir, f'{project}.csproj'),
                              os.path.join(proj_dir, f'{project}.csproj.in'),
                              replacements={
                                  re.escape('$(ONIPath)'):
                                  CONFIG.get('paths.oni').replace(
                                      '\\', '\\\\')
                              }))

        dll = os.path.join('src', project, 'bin', project + '.dll')

        csfiles = [
            f for f in os_utils.get_file_list(os.path.join(proj_dir, 'Source'),
                                              prefix=os.path.join(
                                                  proj_dir, 'Source'))
            if f.endswith('.cs')
        ]
        csfiles.sort()
        for csfile in csfiles:
            log.info(csfile)
        csp = bm.add(
            MSBuildTarget(dll,
                          os.path.join(proj_dir, f'{project}.sln'),
                          files=csfiles,
                          dependencies=[projin.target] + depends))
        csp.msb.properties['ONIPath'] = CONFIG.get('paths.oni')
        #csp.msb.configuration = 'Debug'

        deploydir = os.path.join(LOCALMODS, project)
        bm.add(CopyFileTarget(deploydir, dll, dependencies=[csp.target]))
        os_utils.ensureDirExists(deploydir, noisy=True)
        for basefilename in os.listdir(os.path.join('Mods', project)):
            filename = os.path.join('Mods', project, basefilename)
            _, ext = os.path.splitext(basefilename)
            if ext in ('.json', '.txt'):
                cf = bm.add(
                    CopyFileTarget(os.path.join(deploydir, basefilename),
                                   filename,
                                   dependencies=[csp.target],
                                   verbose=True))
                log.info('Found config: %s', basefilename)
        return csp
예제 #8
0
    def updateManifest(self):
        currentFiles = []
        newfiles = []
        if self.name in args.get_snapshot:
            log.info("Getting snapshot of %s (%s post-build)...", script_dir, self.name)
            for root, _, files in os.walk(script_dir):
                for filename in files:
                    currentFiles.append(os.path.relpath(os.path.abspath(os.path.join(root, filename)), script_dir))
            log.info("Comparing...")
            newfiles = [fn.replace("\\", "/") for fn in currentFiles if fn not in self.preExisting]

        newmanifest = {}
        with log.info("Checking for %d expected files...", len(self.expected)):
            for expectedFile in self.expected:
                if not os.path.isfile(expectedFile):
                    log.error("MISSING %s", expectedFile)
                    return False
                relfilepath = os.path.relpath(expectedFile, self.build_dir)
                newmanifest[relfilepath] = os.stat(expectedFile).m_time
            log.info("All check out!")

        with open(self.builder_meta_file, "w") as f:
            yaml.dump(
                {"configuration": self.configuration, "manifest": newmanifest, "newfiles": newfiles},
                f,
                default_flow_style=False,
            )

        return True
예제 #9
0
def main():
    argp = argparse.ArgumentParser()
    argp.add_argument('--go', action='store_true')
    args = argp.parse_args()

    files_to_proc = []
    for root, _, files in os.walk(IN_DIR):
        for bfn in files:
            fullpath = os.path.abspath(os.path.join(root, bfn))
            if bfn.endswith('.bak'):
                log.info('rm %s', fullpath)
                os.remove(fullpath)
            if bfn.endswith('.php'):
                files_to_proc += [fullpath]

    for filename in tqdm.tqdm(files_to_proc, desc='Moving files...', unit='file'):
        namespace = None
        outpath = None
        with open(filename, 'r') as f:
            for line in f:
                m = re.match(REG_NS, line)
                if m is not None:
                    namespace = m.group(1)
                    break
        if namespace is None:
            continue
        nschunks = namespace.split('\\')
        if nschunks[0] == '':
            nschunks = nschunks[1:]
        nschunks = nschunks[1:]

        nschunks += [os.path.basename(filename).replace('.class', '').replace('.interface','')]
        outpath = os.path.abspath(os.path.join(OUT_DIR, *nschunks))
        if outpath == filename:
            continue

        cmd = [os_utils.which('git'), 'mv', os.path.relpath(filename), os.path.relpath(outpath)]
        if args.go:
            os_utils.ensureDirExists(os.path.dirname(outpath), noisy=True)
            os_utils.cmd([os_utils.which('git'), 'add', os.path.relpath(filename)], echo=True, show_output=True)
            os_utils.cmd(cmd, echo=True, critical=True)
        else:
            log.info(' '.join(cmd))
    os_utils.del_empty_dirs(IN_DIR, quiet=False)
예제 #10
0
def dlPackagesIn(pkgdefs, superrepo="build"):
    os_utils.ensureDirExists("download")
    for destination, retrievalData in pkgdefs.items():
        rebuild = args.rebuild_all or destination in args.rebuild
        destination = os.path.join(superrepo, destination)
        dlType = retrievalData["type"]
        if dlType == "git":
            remote = retrievalData.get("remote", "origin")
            branch = retrievalData.get("branch", "master")
            commit = retrievalData.get("commit")
            submodules = retrievalData.get("submodules", False)
            submodules_remote = retrievalData.get("submodules_remote", False)
            tag = retrievalData.get("tag")
            if "uri" not in retrievalData:
                log.critical("uri not in def for %s", destination)
            git = GitRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True)
            with log.info("Checking for updates to %s...", destination):
                if rebuild or not os.path.isdir(destination):
                    if rebuild or git.CheckForUpdates(remote, branch, tag=tag, commit=commit):
                        log.info("Updates detecting, pulling...")
                        git.Pull(remote, branch, tag=tag, commit=commit, cleanup=True)
                    if submodules:
                        if rebuild:
                            with os_utils.Chdir(destination):
                                os_utils.cmd(
                                    ["git", "submodule", "foreach", "--recursive", "git clean -dfx"],
                                    echo=True,
                                    show_output=True,
                                    critical=True,
                                )
                        git.UpdateSubmodules(submodules_remote)
        elif dlType == "hg":
            remote = retrievalData.get("remote", "default")
            branch = retrievalData.get("branch", "master")
            commit = retrievalData.get("commit", retrievalData.get("tag"))
            if "uri" not in retrievalData:
                log.critical("uri not in def for %s", destination)
            hg = HgRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True)
            with log.info("Checking for updates to %s...", destination):
                if rebuild or not os.path.isdir(destination):
                    if rebuild or hg.CheckForUpdates(remote, branch):
                        log.info("Updates detecting, pulling...")
                        hg.Pull(remote, branch, commit, cleanup=True)
        elif dlType == "http":
            url = retrievalData["url"]
            ext = retrievalData.get("ext", url[url.rfind(".") :])
            filename = os.path.join(
                script_dir, "download", retrievalData.get("filename", hashlib.md5(url).hexdigest() + ext)
            )
            if not os.path.isfile(filename):
                with log.info("Downloading %s...", url):
                    http.DownloadFile(url, filename)
            if (rebuild or not os.path.isdir(destination)) and not retrievalData.get("download-only", False):
                if rebuild:
                    os_utils.safe_rmtree(destination)
                os_utils.ensureDirExists(destination)
                with os_utils.Chdir(destination):
                    os_utils.decompressFile(filename)
예제 #11
0
 def build(self):
     linebuf = ''
     nlines = 0
     lastbytecount = 0
     lastcheck = 0
     longest_line = 0
     os_utils.ensureDirExists(os.path.dirname(self.target))
     nbytes = os.path.getsize(self.subject)
     with codecs.open(self.subject, 'r',
                      encoding=self.read_encoding) as inf:
         with codecs.open(self.target + '.out',
                          'w',
                          encoding=self.write_encoding) as outf:
             progBar = tqdm.tqdm(
                 total=nbytes, unit='B',
                 leave=False) if self.display_progress else None
             outf.write(self.text)
             while True:
                 block = inf.read(4096)
                 block = block.replace('\r\n', '\n')
                 block = block.replace('\r', '\n')
                 if not block:  # EOF
                     outf.write(linebuf)
                     nlines += 1
                     charsInLine = len(linebuf)
                     if charsInLine > longest_line:
                         longest_line = charsInLine
                     break
                 for c in block:
                     nbytes += 1
                     if self.display_progress:
                         # if nbytes % 10 == 1:
                         cms = utils.current_milli_time()
                         if cms - lastcheck >= 250:
                             progBar.set_postfix({
                                 'linebuf': len(linebuf),
                                 'nlines': nlines
                             })
                             progBar.update(nbytes - lastbytecount)
                             lastcheck = cms
                             lastbytecount = nbytes
                     linebuf += c
                     if c in '\r\n':
                         outf.write(linebuf)
                         nlines += 1
                         charsInLine = len(linebuf)
                         if charsInLine > longest_line:
                             longest_line = charsInLine
                         linebuf = ''
             if self.display_progress:
                 progBar.close()
                 with log.info('Completed.'):
                     log.info('Lines.......: %d', nlines)
                     log.info('Chars.......: %d', nbytes)
                     log.info('Longest line: %d chars', longest_line)
     shutil.move(self.target + '.out', self.target)
예제 #12
0
 def __init__(self,
              sources,
              destination,
              rsync_executable=None,
              progress=False,
              delete=False,
              opts=['-Rruhavp'],
              chmod=0o755,
              chown=None,
              chgrp=None,
              show_output=False,
              dependencies=[],
              provides=[],
              name='rsync',
              keyfile=None):
     self.rsync_executable = rsync_executable or os_utils.which('rsync')
     self.opts = opts
     self.progress = progress
     self.chmod = chmod
     self.chown = chown
     self.chgrp = chgrp
     self.show_output = show_output
     self.progress = progress
     self.sources = sources
     self.delete = delete
     self.destination = destination
     self.keyfile = keyfile
     files = []
     for source in sources:
         with log.info('Scanning %s...', source):
             if os.path.isdir(source):
                 files += os_utils.get_file_list(source)
             if os.path.isfile(source):
                 files += [source]
     super().__init__(target=str(
         Path('.build',
              hashlib.sha256(self.name).hexdigest() + '.target')),
                      files=files,
                      dependencies=dependencies,
                      provides=provides,
                      name=name)
예제 #13
0
 def __init__(self,
              sources,
              destination,
              rsync_executable=None,
              progress=False,
              delete=False,
              opts=['-Rruavp'],
              chmod=0o755,
              chown=None,
              chgrp=None,
              show_output=False,
              dependencies=[],
              provides=[],
              name='rsync',
              keyfile=None):
     self.rsync_executable = rsync_executable or os_utils.which('rsync')
     self.opts = opts
     self.progress = progress
     self.chmod = chmod
     self.chown = chown
     self.chgrp = chgrp
     self.show_output = show_output
     self.progress = progress
     self.sources = sources
     self.delete = delete
     self.destination = destination
     self.keyfile = keyfile
     files = []
     for source in sources:
         with log.info('Scanning %s...', source):
             if os.path.isdir(source):
                 files += os_utils.get_file_list(source)
             if os.path.isfile(source):
                 files += [source]
     super().__init__(target=self.genVirtualTarget(
         name.replace('\\', '_').replace('/', '_')),
                      files=files,
                      dependencies=dependencies,
                      provides=provides,
                      name=name)
예제 #14
0
'''

import os,sys, tempfile

script_dir=os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(script_dir,'buildtools'))

from buildtools import log, Config, Properties, replace_vars
from buildtools.wrapper import Ant

DefaultConfig={
    'ant': {
        'properties':{
            'files':['build.properties'],
            'keys': {},
            'secret-keys':[],
            'outfile': '/tmp/jenkins-%%JOB_NAME%%.properties'
        }
    }
}

config=Config('build-eclipse.yml',DefaultConfig)

outfile=replace_vars(config.get('outfile','/tmp/jenkins-%%JOB_NAME%%.properties'),os.environ)
with log.info('Building %s...',outfile):
    properties=Properties()
    properties.properties=config.get('ant.properties.keys',{})
    for filename in config.get('ant.properties.files',[]):
        filename=replace_vars(filename,os.environ)
        properties.Load(filename,expand_vars=os.environ)
    properties.Save(outfile)
예제 #15
0
 def build(self):
     if self.archive.endswith('.zip'):
         with log.info('Extracting %r as ZIP archive...', self.archive):
             with zipfile.ZipFile(self.archive) as z:
                 z.extractall(path=self.target_dir)
예제 #16
0
ENV.set("QMAKESPEC", config.get("qt-makespec", "win32-msvc2013"))

#: x64 or x86
short_arch = "x64" if config["architecture"] == "x86_64" else "x86"
#: 64 or 32
nbits = "64" if config["architecture"] == "x86_64" else "32"

superrepo = os.path.join("build", "modorganizer_super")
if not os.path.isdir(superrepo):
    os_utils.ensureDirExists(superrepo)
    with os_utils.Chdir(superrepo):
        os_utils.cmd([EXECUTABLES["git"], "init"], show_output=True, critical=True)

ymlvars = {"nbits": nbits, "script_dir": script_dir}
prerequisites = YAMLConfig("prerequisites.yml", variables=ymlvars).cfg
with log.info("Downloading prerequisites..."):
    dlPackagesIn(prerequisites)

# Copied from Unimake.
projs = [
    ("modorganizer-archive", "archive", "master", ["7zip", "Qt5"]),
    ("modorganizer-uibase", "uibase", "new_vfs_library", ["Qt5", "boost"]),
    ("modorganizer-lootcli", "lootcli", "master", ["LootApi", "boost"]),
    ("modorganizer-esptk", "esptk", "master", ["boost"]),
    ("modorganizer-bsatk", "bsatk", "master", ["zlib"]),
    ("modorganizer-nxmhandler", "nxmhandler", "master", ["Qt5"]),
    ("modorganizer-helper", "helper", "master", ["Qt5"]),
    (
        "modorganizer-game_gamebryo",
        "game_gamebryo",
        "new_vfs_library",
예제 #17
0
    def build(self):
        definition = {}
        with open(self.filename, 'r') as r:
            definition=yaml.load(r)['enum']

        if 'auto-value' in definition:
            autoval = definition['auto-value']
            i=autoval.get('start',0)
            for k in definition['values'].keys():
                if definition[k].get('auto', True):
                    definition[k]['value']=1 >> i if definition.get('flags', False) else i
                    i += 1

        flags = False
        if 'flags' in definition and definition['flags']:
            flags=True
            definition['tests']=definition.get('tests',{})
            definition['tests']['unique']=definition['tests'].get('unique',True)
            definition['tests']['single-bit-only']=definition['tests'].get('single-bit-only',True)

        default = definition.get('default', 0)
        for k,vpak in definition['values'].items():
            val = self._get_value_for(vpak)
            if self._get_for(vpak, 'default', False):
                if flags:
                    default |= val
                else:
                    default = val

        if flags or 'tests' in definition:
            with log.info('Testing %s....', definition['name']):
                tests = definition.get('tests',{})
                if 'increment' in tests:
                    incrdef = tests['increment']
                    start = incrdef.get('start',0)
                    stop = incrdef.get('stop', len(definition['values']))

                    vals = []
                    for k,vpak in definition['values'].items():
                        vals += [self._get_value_for(vpak)]

                    for i in range(start,stop):
                        if i not in vals:
                            log.error('increment: Missing value %d!', i)
                if 'unique' in tests and tests['unique']:
                    vals={}
                    for k,vpak in definition['values'].items():
                        val = self._get_value_for(vpak)
                        if val in vals:
                            log.error('unique: Entry %s is not using a unique value!', k)
                        vals[val]=True
                if flags:
                    if 'single-bit-only' in tests and tests['single-bit-only']:
                        for k,vpak in definition['values'].items():
                            val = self._get_value_for(vpak)
                            c = 0
                            while val > 0:
                                c = val & 1
                                val >>= 1
                                if c > 1:
                                    log.error('single-bit-only: Entry %s has too many bits!', k)
                                    break
        definition['default'] = default
        os_utils.ensureDirExists(os.path.dirname(self.target), noisy=True)
        with open(self.target, 'w') as w:
            self.writer.write(w, definition)
예제 #18
0
import os

script_dir = os.path.dirname(__file__)

from buildtools import log
from buildtools.buildsystem.visualstudio import VisualStudio2015Solution, ProjectType

target = os.path.join(script_dir, '..', 'NMM', 'NexusClient.sln')
csprojfile = os.path.join('..', 'NexusClientCli', 'NexusClientCLI',
                          'NexusClientCLI.csproj')

sln = VisualStudio2015Solution()
sln.LoadFromFile(target)
changed = False
if 'NexusClientCli' not in sln.projectsByName:
    proj = sln.AddProject('NexusClientCli', ProjectType.CSHARP_PROJECT,
                          csprojfile)
    log.info('Added %s (%s) to %s', proj.name, proj.guid, target)
    changed = True
else:
    proj = sln.projectsByName['NexusClientCli']
    log.info('Project %s (%s) already exists in %s', proj.name, proj.guid,
             target)
    if proj.projectfile != csprojfile:
        oprojfile = proj.projectfile
        proj.projectfile = csprojfile
        log.info('Fixed CSPROJ location: %s -> %s', oprojfile, csprojfile)
        changed = True
if changed:
    sln.SaveToFile(target + '.patched')
예제 #19
0
    def build(self):
        gitmodules = {}
        with open(self.gitmodulesfile, 'r') as tomlf:
            smid = None
            for line in tomlf:
                line = line.strip()
                m = REG_SUBMODULE_SECTION.match(line)
                if m is not None:
                    smid = m.group(1).strip()
                    gitmodules[smid] = {}
                if '=' in line:
                    k, v = line.split('=', 2)
                    gitmodules[smid][k.strip()] = v.strip()
        gitconfig = {}
        with open(self.gitconfigfile, 'r') as tomlf:
            smid = None
            for line in tomlf:
                line = line.strip()
                #print(line)
                m = REG_SUBMODULE_SECTION.match(line)
                if m is not None:
                    smid = m.group(1).strip()
                    gitconfig[smid] = {}
                if smid is not None and '=' in line:
                    #print(line)
                    k, v = line.split('=', 2)
                    gitconfig[smid][k.strip()] = v.strip()
        '''
        with open(self.gitmodulesfile + '.yml', 'w') as f:
            yaml.dump(gitmodules, f, default_flow_style=False)
        with open('.gitconfig.yml', 'w') as f:
            yaml.dump(gitconfig, f, default_flow_style=False)
        '''
        for repoID, repoconf in gitconfig.items():
            if repoID not in gitmodules.keys():
                with log.warn('Submodule %s is present in .git/config but not .gitmodules!', repoID):
                    pathspec = repoconf.get('path', repoID)
                    path = os.path.abspath(pathspec)
                    tag = repoconf.get('tag', None)
                    branch = repoconf.get('branch', 'HEAD' if tag is None else None)
                    log.info('path = %s', pathspec)
        for repoID, repoconf in gitmodules.items():
            if repoID not in gitconfig.keys():
                with log.warn('Submodule %s is present in .gitmodules but not .git/config!', repoID):
                    pathspec = repoconf.get('path', repoID)
                    path = os.path.abspath(pathspec)
                    tag = repoconf.get('tag', None)
                    branch = repoconf.get('branch', 'HEAD' if tag is None else None)
                    opts = []
                    if branch != 'HEAD':
                        opts += ['-b', branch]
                    log.info('path = %s', pathspec)
                    if os.path.isdir(path):
                        log.warn('Removing existing %s directory.', path)
                        shutil.rmtree(path)
                    cmd = ['git', 'submodule', 'add']+opts+['-f', '--name', repoID, '--', repoconf.get('url'), pathspec]
                    os_utils.cmd(cmd, critical=True, echo=self.should_echo_commands(), show_output=True)
                    #log.error('Would exec: %s', ' '.join(cmd))

        for repoID, repoconf in gitmodules.items():
            with log.info('Checking %s...', repoID):
                pathspec = repoconf.get('path', repoID)
                path = os.path.abspath(pathspec)
                tag = repoconf.get('tag', None)
                branch = repoconf.get('branch', 'HEAD' if tag is None else None)
                if os.path.isdir(path):
                    desired_commit = ''
                    cmdline = ['git', 'ls-tree', Git.GetBranch(), pathspec]
                    stdout, stderr = os_utils.cmd_output(cmdline, echo=self.should_echo_commands(), critical=True)
                    skip_this = False
                    for line in (stdout+stderr).decode('utf-8').splitlines():
                        if line.startswith('error:') or line.startswith('fatal:'):
                            log.critical(line)
                            raise error.SubprocessThrewError(cmdline, line)
                        line,repoID = line.strip().split('\t')
                        _, _, desired_commit = line.split(' ')
                    if not skip_this:
                        with os_utils.Chdir(path, quiet=not self.should_echo_commands()):
                            cur_commit = Git.GetCommit(short=False, quiet=not self.should_echo_commands())
                            #log.info(desired_commit)
                            #log.info(cur_commit)
                            if cur_commit == desired_commit:
                                log.info('Commits are synced, skipping.')
                                continue

                repo = GitRepository(path, origin_uri=repoconf['url'], submodule=True)
                if repo.CheckForUpdates(branch=branch, quiet=False):
                    if os.path.isdir(path):
                        os_utils.cmd(['git', 'submodule', 'sync', '--', pathspec], critical=True, echo=self.should_echo_commands(), show_output=True)
                    os_utils.cmd(['git', 'submodule', 'update', '--init', '--recursive', pathspec], critical=True, echo=self.should_echo_commands(), show_output=True)
예제 #20
0
import os, sys, tempfile

script_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(script_dir, 'buildtools'))

from buildtools import log, Config, Properties, replace_vars
from buildtools.wrapper import Ant

DefaultConfig = {
    'ant': {
        'properties': {
            'files': ['build.properties'],
            'keys': {},
            'secret-keys': [],
            'outfile': '/tmp/jenkins-%%JOB_NAME%%.properties'
        }
    }
}

config = Config('build-eclipse.yml', DefaultConfig)

outfile = replace_vars(
    config.get('outfile', '/tmp/jenkins-%%JOB_NAME%%.properties'), os.environ)
with log.info('Building %s...', outfile):
    properties = Properties()
    properties.properties = config.get('ant.properties.keys', {})
    for filename in config.get('ant.properties.files', []):
        filename = replace_vars(filename, os.environ)
        properties.Load(filename, expand_vars=os.environ)
    properties.Save(outfile)
예제 #21
0
def main():
    argp = argparse.ArgumentParser(description='Generation script for ss13-vox.')
    #argp.add_argument('--codebase', choices=['vg', 'tg'], default='vg', help='Which codebase to generate for. (Affects output code and paths.)')
    argp.add_argument('--threads', '-j', type=int, default=multiprocessing.cpu_count(), help='How many threads to use in ffmpeg.')
    #argp.add_argument('phrasefiles', nargs='+', type=str, help='A list of phrase files.')
    args = argp.parse_args()

    if not os.path.isdir('tmp'):
        os.makedirs('tmp')

    DIST_DIR = 'dist'
    PREEX_SOUND = 'sound/vox/{ID}.wav'
    NUVOX_SOUND = 'sound/vox_{SEX}/{ID}.wav'
    voices = []
    vox_sounds_path = ''
    templatefile = ''

    config = BaseConfig()
    config.cfg = YAMLConfig('config.yml')
    pathcfg = BaseConfig()
    pathcfg.cfg = YAMLConfig('paths.yml').cfg[config.get('codebase', 'vg')]

    PREEX_SOUND = pathcfg.get('sound.old-vox', PREEX_SOUND)
    NUVOX_SOUND = pathcfg.get('sound.new-vox', NUVOX_SOUND)

    voice_assignments = {}
    all_voices = []
    default_voice: Voice = VoiceRegistry.Get(USSLTFemale.ID)
    sfx_voice: SFXVoice = SFXVoice()
    configured_voices: Dict[str, dict] = {}
    for sexID, voiceid in config.get('voices', {'fem': USSLTFemale.ID}).items():
        voice = VoiceRegistry.Get(voiceid)
        assert sexID != ''
        voice.assigned_sex = sexID
        if sexID in ('fem', 'mas'):
            sex = EVoiceSex(sexID)
            assert voice.SEX == sex
            voices += [voice]
        elif sexID == 'default':
            default_voice = voice
        voice_assignments[voice.assigned_sex] = []
        all_voices += [voice]
        configured_voices[sexID] = voice.serialize()

    voice_assignments[sfx_voice.assigned_sex] = []
    all_voices += [sfx_voice]
    configured_voices[sfx_voice.assigned_sex] = sfx_voice.serialize()

    vox_sounds_path = os.path.join(DIST_DIR, pathcfg.get('vox_sounds.path'))
    templatefile = pathcfg.get('vox_sounds.template')
    vox_data_path = os.path.join(DIST_DIR, pathcfg.get('vox_data'))

    DATA_DIR = os.path.join(DIST_DIR, 'data')
    os_utils.ensureDirExists(DATA_DIR)
    with log.info('Parsing lexicon...'):
        lexicon = ParseLexiconText('lexicon.txt')

    phrases=[]
    phrasesByID = {}
    broked = False
    for filename in config.get('phrasefiles', ['announcements.txt', 'voxwords.txt']):
        for p in ParsePhraseListFrom(filename):
            if p.id in phrasesByID:
                duplicated = phrasesByID[p.id]
                log.critical('Duplicate phrase with ID %s in file %s on line %d! First instance in file %s on line %d.', p.id, p.deffile, p.defline, duplicated.deffile, duplicated.defline)
                broked = True
                continue
            phrases += [p]
            phrasesByID[p.id] = p
        if broked:
            sys.exit(1)

    soundsToKeep = set()
    for sound in OTHERSOUNDS:
        soundsToKeep.add(os.path.join(DIST_DIR, sound + '.ogg'))

    phrases.sort(key=lambda x: x.id)

    overrides = config.get('overrides', {})
    for phrase in phrases:
        if phrase.id in overrides:
            phrase.fromOverrides(overrides.get(phrase.id))
        phrase_voices = list(voices)
        # If it has a path, it's being manually specified.
        if '/' in phrase.id:
            phrase.filename = phrase.id + '.ogg'
            phrase_voices = [default_voice]
            soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, phrase.filename)))
        else:
            phrase.filename = ''+NUVOX_SOUND
            if phrase.hasFlag(EPhraseFlags.OLD_VOX):
                phrase_voices = [default_voice]
                phrase.filename = PREEX_SOUND.format(ID=phrase.id)
                for voice in ['fem', 'mas']:
                    phrase.files[voice] = FileData()
                    phrase.files[voice].filename = phrase.filename
                    phrase.files[voice].checksum = ''
                    phrase.files[voice].duration = phrase.override_duration or -1
                    phrase.files[voice].size     = phrase.override_size or -1
                    #voice_assignments[voice].append(phrase)
                soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, phrase.filename)))
                continue

        if phrase.hasFlag(EPhraseFlags.SFX):
            phrase_voices = [sfx_voice]

        if not phrase.hasFlag(EPhraseFlags.OLD_VOX):
            log.info('%s - %r', phrase.id, [x.assigned_sex for x in phrase_voices])
            for v in phrase_voices:
                voice_assignments[v.assigned_sex].append(phrase)
                #phrase.files[v.assigned_sex] = fd
    #sys.exit(1)
    for voice in all_voices:
        print(voice.ID, voice.assigned_sex)
        DumpLexiconScript(voice.FESTIVAL_VOICE_ID, lexicon.values(), 'tmp/VOXdict.lisp')
        for phrase in voice_assignments[voice.assigned_sex]:
            GenerateForWord(phrase, voice, soundsToKeep, args)
            sexes=set()
            for vk, fd in phrase.files.items():
                soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, fd.filename)))

    jenv = jinja2.Environment(loader=jinja2.FileSystemLoader(['./templates']))
    jenv.add_extension('jinja2.ext.do') # {% do ... %}
    templ = jenv.get_template(templatefile)
    with log.info('Writing sound list to %s...', vox_sounds_path):
        os_utils.ensureDirExists(os.path.dirname(vox_sounds_path))
        assetcache={}
        sound2id={}
        with open(vox_sounds_path, 'w') as f:
            sexes = {
                'fem': [],
                'mas': [],
                'default': [],
                #'sfx': [],
            }
            for p in phrases:
                for k in p.files.keys():
                    assetcache[p.getAssetKey(k)] = p.files[k].filename
                    sound2id[p.files[k].filename] = p.getAssetKey(k)
                if p.hasFlag(EPhraseFlags.NOT_VOX):
                    continue
                for k in p.files.keys():
                    if p.hasFlag(EPhraseFlags.SFX):
                        for sid in ('fem', 'mas'):
                            if p not in sexes[sid]:
                                sexes[sid].append(p)
                    else:
                        sexes[k].append(p)
            f.write(templ.render(
                InitClass=InitClass,
                SEXES=sexes,
                ASSETCACHE=assetcache,
                SOUND2ID=sound2id,
                PHRASES=[p for p in phrases if not p.hasFlag(EPhraseFlags.NOT_VOX)]))
    soundsToKeep.add(os.path.abspath(vox_sounds_path))

    os_utils.ensureDirExists(DATA_DIR)
    with open(os.path.join(DATA_DIR, 'vox_data.json'), 'w') as f:
        data = {
            'version': 2,
            'compiled': time.time(),
            'voices': configured_voices,
            'words': collections.OrderedDict({w.id: w.serialize() for w in phrases if '/' not in w.id}),
        }
        json.dump(data, f, indent=2)
    soundsToKeep.add(os.path.abspath(os.path.join(DATA_DIR, 'vox_data.json')))

    with open('tmp/written.txt', 'w') as f:
        for filename in sorted(soundsToKeep):
            f.write(f'{filename}\n')

    for root, _, files in os.walk(DIST_DIR, topdown=False):
        for name in files:
            filename = os.path.abspath(os.path.join(root, name))
            if filename not in soundsToKeep:
                log.warning('Removing {0} (no longer defined)'.format(filename))
                os.remove(filename)
예제 #22
0
def prepare_nmm(context):
    sln = VisualStudio2015Solution()
    sln.LoadFromFile(os.path.join(build_path, "Nexus-Mod-Manager", 'NexusClient.sln'))
    ncc_csproj = os.path.join(build_path, 'NexusClientCLI', 'NexusClientCLI', 'NexusClientCLI.csproj')
    if not os.path.isfile(ncc_csproj):
        log.critical('NOT FOUND: %s', ncc_csproj)
    else:
        log.info('FOUND: %s', ncc_csproj)
    changed = False
    projfile = VS2015Project()
    projfile.LoadFromFile(ncc_csproj)
    projguid = projfile.PropertyGroups[0].element.find('ProjectGuid').text
    log.info('ProjectGuid = %s', projguid)
    if "NexusClientCli" not in sln.projectsByName:
        newproj = sln.AddProject('NexusClientCli', ProjectType.CSHARP_PROJECT, ncc_csproj, guid=projguid)
        log.info('Adding project %s (%s) to NexusClient.sln', newproj.name, newproj.guid)
        changed = True
    else:
        newproj = sln.projectsByName['NexusClientCli']
        log.info('Project %s (%s) already exists in NexusClient.sln', newproj.name, newproj.guid)
        if newproj.projectfile != ncc_csproj:
            log.info('Changing projectfile: %s -> %s', newproj.projectfile, ncc_csproj)
            newproj.projectfile = ncc_csproj
            changed = True
    if changed:
        log.info('Writing NexusClientCli.sln')
        sln.SaveToFile(os.path.relpath(os.path.join(build_path, "Nexus-Mod-Manager", 'NexusClientCli.sln'))) # So we dont get conflicts when pulling
        return True
예제 #23
0
import os

script_dir = os.path.dirname(__file__)

from buildtools import log
from buildtools.buildsystem.visualstudio import VisualStudio2015Solution, ProjectType

target=os.path.join(script_dir, '..', 'NMM','NexusClient.sln')
csprojfile = os.path.join('..','NexusClientCli','NexusClientCLI','NexusClientCLI.csproj')

sln = VisualStudio2015Solution()
sln.LoadFromFile(target)
changed=False
if 'NexusClientCli' not in sln.projectsByName:
	proj = sln.AddProject('NexusClientCli',ProjectType.CSHARP_PROJECT,csprojfile)
	log.info('Added %s (%s) to %s', proj.name, proj.guid, target)
	changed=True
else:
	proj = sln.projectsByName['NexusClientCli']
	log.info('Project %s (%s) already exists in %s', proj.name, proj.guid, target)
	if proj.projectfile != csprojfile:
		oprojfile = proj.projectfile
		proj.projectfile = csprojfile
		log.info('Fixed CSPROJ location: %s -> %s',oprojfile,csprojfile)
		changed=True
if changed:
	sln.SaveToFile(target+'.patched')
예제 #24
0
def GenerateForWord(phrase: Phrase, voice: Voice, writtenfiles: set, args: Optional[argparse.Namespace] = None):
    global PHRASELENGTHS, OLD_SFX, KNOWN_PHONEMES, OTHERSOUNDS
    my_phonemes = {}
    if phrase.hasFlag(EPhraseFlags.OLD_VOX):
        log.info('Skipping %s.ogg (Marked as OLD_VOX)', phrase.id)
        return
    if phrase.hasFlag(EPhraseFlags.NOT_VOX):
        OTHERSOUNDS += [phrase.id]

    if phrase.parsed_phrase is not None:
        for _word in phrase.parsed_phrase:
            _word = _word.lower()
            if _word in KNOWN_PHONEMES:
                my_phonemes[_word] = KNOWN_PHONEMES[_word].toLisp().replace('\n', '')


    filename = phrase.filename.format(ID=phrase.id, SEX=voice.assigned_sex)
    sox_args = voice.genSoxArgs(args)

    md5 = json.dumps(phrase.serialize())
    md5 += '\n'.join(my_phonemes.values())
    md5 += ''.join(sox_args) + PRE_SOX_ARGS + ''.join(RECOMPRESS_ARGS)
    md5 += voice.ID
    md5 += filename

    #filename = os.path.join('sound', 'vox_fem', phrase.id + '.ogg')
    #if '/' in phrase.id:
    #    filename = os.path.join(phrase.id + '.ogg')
    oggfile = os.path.abspath(os.path.join('dist', filename))
    cachebase = os.path.abspath(os.path.join('cache', phrase.id.replace(os.sep, '_').replace('.', '')))
    checkfile = cachebase + voice.ID + '.dat'
    cachefile = cachebase + voice.ID + '.json'

    fdata = FileData()
    fdata.voice = voice.ID
    fdata.filename = os.path.relpath(oggfile, 'dist')

    def commitWritten():
        nonlocal phrase, voice, oggfile, writtenfiles, fdata
        if voice.ID == SFXVoice.ID:
            # Both masculine and feminine voicepacks link to SFX.
            for sex in ['fem', 'mas']:
                phrase.files[sex] = fdata
        else:
            phrase.files[voice.assigned_sex] = fdata
        writtenfiles.add(os.path.abspath(oggfile))

    parent = os.path.dirname(oggfile)
    if not os.path.isdir(parent):
        os.makedirs(parent)

    parent = os.path.dirname(cachefile)
    if not os.path.isdir(parent):
        os.makedirs(parent)

    if os.path.isfile(oggfile) and os.path.isfile(cachefile):
        old_md5 = ''
        if os.path.isfile(checkfile):
            with open(checkfile, 'r') as md5f:
                old_md5 = md5f.read()
        if old_md5 == md5:
            cachedata = {}
            with open(cachefile, 'r') as cachef:
                cachedata = json.load(cachef)
            fdata.deserialize(cachedata)

            log.info('Skipping {0} for {1} (exists)'.format(filename, voice.ID))
            commitWritten()
            return
    log.info('Generating {0} for {1} ({2!r})'.format(filename, voice.ID, phrase.phrase))
    text2wave = None
    if phrase.hasFlag(EPhraseFlags.SFX):
        text2wave = 'ffmpeg -i '+phrase.phrase+' tmp/VOX-word.wav'
    else:
        with open('tmp/VOX-word.txt', 'w') as wf:
            wf.write(phrase.phrase)

        text2wave = 'text2wave tmp/VOX-word.txt -o tmp/VOX-word.wav'
        if os.path.isfile('tmp/VOXdict.lisp'):
            text2wave = 'text2wave -eval tmp/VOXdict.lisp tmp/VOX-word.txt -o tmp/VOX-word.wav'
    with open(checkfile, 'w') as wf:
        wf.write(md5)
    for fn in ('tmp/VOX-word.wav', 'tmp/VOX-soxpre-word.wav', 'tmp/VOX-sox-word.wav', 'tmp/VOX-encoded.ogg'):
        if os.path.isfile(fn):
            os.remove(fn)

    cmds = []
    cmds += [(text2wave.split(' '), 'tmp/VOX-word.wav')]
    if not phrase.hasFlag(EPhraseFlags.NO_PROCESS) or not phrase.hasFlag(EPhraseFlags.NO_TRIM):
        cmds += [(['sox', 'tmp/VOX-word.wav', 'tmp/VOX-soxpre-word.wav'] + PRE_SOX_ARGS.split(' '), 'tmp/VOX-soxpre-word.wav')]
    if not phrase.hasFlag(EPhraseFlags.NO_PROCESS):
        cmds += [(['sox', cmds[-1][1], 'tmp/VOX-sox-word.wav'] + sox_args, 'tmp/VOX-sox-word.wav')]
    cmds += [(['oggenc', cmds[-1][1], '-o', 'tmp/VOX-encoded.ogg'], 'tmp/VOX-encoded.ogg')]
    cmds += [(['ffmpeg', '-i', 'tmp/VOX-encoded.ogg']+RECOMPRESS_ARGS+['-threads',args.threads]+[oggfile], oggfile)]
    for command_spec in cmds:
        (command, cfn) = command_spec
        with os_utils.TimeExecution(command[0]):
            os_utils.cmd(command, echo=True, critical=True, show_output=command[0] in ('text2wave',))

    command = ['ffprobe', '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', oggfile]
    with os_utils.TimeExecution(command[0]):
        captured = os_utils.cmd_out(command, echo=False, critical=True)
        fdata.fromJSON(json.loads(captured))
        fdata.checksum = md5sum(oggfile)

    for command_spec in cmds:
        (command, cfn) = command_spec
        if not os.path.isfile(fn):
            log.error("File '{0}' doesn't exist, command '{1}' probably failed!".format(cfn, command))
            sys.exit(1)

    with open(cachefile, 'w') as f:
        json.dump(fdata.serialize(), f)

    commitWritten()