def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd([self.svgo_cmd, '-i', self.source, '-o', self.target] + self.svgo_opts, echo=self.should_echo_commands(), show_output=True, critical=True)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target), noisy=False) os_utils.single_copy(self.files[0], self.target, verbose=False, as_file=True) self.touch(self.target)
def build(self): sourcefilerel, reloutfile, absoutfile = self.calcFilename() sourcefilerel = sourcefilerel.replace(os.sep, '/') outfile = reloutfile.replace(os.sep, '/') manifest_data = {sourcefilerel: outfile} if os.path.isfile(self.manifest): with open(self.manifest, 'r') as f: manifest_data = json.load(f) if sourcefilerel in manifest_data.keys(): oldfilename = os.path.normpath( os.path.join(self.destdir, manifest_data[sourcefilerel])) #log.info(oldfilename) #log.info(absoutfile) if oldfilename != absoutfile: self.removeFile(oldfilename) os_utils.ensureDirExists(os.path.dirname(absoutfile), noisy=True) os_utils.single_copy(self.source, absoutfile, verbose=False) manifest_data[sourcefilerel] = outfile os_utils.ensureDirExists(os.path.dirname(self.manifest), noisy=True) #print(self.manifest) with open(self.manifest, 'w') as f: json.dump(manifest_data, f, indent=2) self.touch(absoutfile) self.touch(self.target)
def _cmd_create(args): data = { 'id': args.ID, 'playlist': 'lobby', 'template': 'main' } pooldir = os.path.join('lobbyscreens', args.ID) os_utils.ensureDirExists(pooldir, noisy=True) os_utils.ensureDirExists(os.path.join(pooldir, 'files'), noisy=True) written = [] with open(os.path.join(pooldir, '__POOL__.yml'), 'w') as f: yaml.dump(data, f, default_flow_style=False) log.info('Wrote %s.', f.name) written += [f.name] with open('.gitignore', 'w') as f: f.write('/parsed.yml\n') written += [f.name] with os_utils.Chdir(pooldir): if not os.path.isdir('.git'): os_utils.cmd(['git', 'init'], echo=True, show_output=True, critical=True) os_utils.cmd(['git', 'lfs', 'install'], echo=True, show_output=True, critical=True) os_utils.cmd(['git', 'lfs', 'track', '*.png', '*.gif', '*.jpg', '*.webm', '*.webp'], echo=True, show_output=True, critical=True) os_utils.cmd(['git', 'add', '.gitattributes']+written, echo=True, show_output=True, critical=True)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) stdout, stderr = os_utils.cmd_output([self.js2coffee_path]+self.files+self.js2coffee_opts, echo=self.should_echo_commands(), critical=True) if stderr.strip() != '': log.error(stderr) with codecs.open(self.target, 'w', encoding='utf-8-sig') as outf: outf.write(stdout)
def getCoffeeMapFile(self): os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) coffeefile = os.path.join('tmp', self.target) coffeefile, _ = os.path.splitext(coffeefile) coffeefile += '.yml' coffeefile = os.path.abspath(coffeefile) return coffeefile
def build(self, clean=False): with Chdir(self.repo_dir): with log.info('Checking repo for updates...'): self.repo.quiet = False self.repo.CheckForUpdates(remote='origin', branch=self.branch, quiet=False) self.repo.Pull(remote='origin', branch=self.branch, cleanup=clean) self.repo.UpdateSubmodules() if len(self.patches) > 0: os_utils.ensureDirExists(self.patch_dir, quiet=False) with log.info('Applying patches...'): for patchID, patchURL in self.patches.items(): patchPath = os.path.join(self.patch_dir, patchID + '.patch') http.DownloadFile(patchURL, patchPath) if not cmd(['git', 'apply', '--check', patchPath], echo=True, critical=True): sys.exit(1) if not cmd(['git', 'apply', patchPath], echo=True, critical=True): sys.exit(1) with log.info('Running hphpize...'): if not cmd([cfg.get('bin.hphpize', 'hphpize')], critical=True, echo=True): sys.exit(1) with log.info('Configuring...'): if not cmake.run(cfg.get('bin.cmake', 'cmake')): sys.exit(1) with log.info('Compiling...'): if not cmd([cfg.get('bin.make', 'make')] + MAKE_FLAGS, critical=True, echo=True): sys.exit(1)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) # BUGFIX: Coffeescript sometimes doesn't want to overwrite shit. - N3X if os.path.isfile(self.target): os.remove(self.target) coffeefile = self.files[0] if len(self.files) > 1: coffeefile = self.getCoffeeFile() if os.path.isfile(coffeefile): os.remove(coffeefile) with codecs.open(coffeefile, 'w', encoding='utf-8-sig') as outf: tq = tqdm(self.files, desc='Concatenating...', leave=False) for infilename in tq: #outf.write('\n`// FILE: {}`\n'.format(infilename)) with codecs.open(infilename, 'r', encoding='utf-8-sig') as inf: for line in inf: outf.write(line.rstrip() + "\n") #outf.write('\n`//# sourceURL={}\n`\n'.format(infilename)) tq.close() coffeefile_basename, _ = os.path.splitext(os.path.basename(coffeefile)) os_utils.cmd([self.coffee_executable] + self.coffee_opts + ['-o', os.path.dirname(self.target), coffeefile], critical=True, echo=self.should_echo_commands(), show_output=True) coffee_output_file = os.path.join(os.path.dirname(self.target), coffeefile_basename+'.js') if coffee_output_file != self.target: log.info('Renaming %s to %s...', coffee_output_file, self.target) os.rename(coffee_output_file, self.target)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.destination), noisy=True) os_utils.single_copy(self.source, self.intermediate_filename, verbose=True) os_utils.cmd([self.webify, self.intermediate_filename], echo=True, critical=True)
def extract(self, archive_file_path, output_file_path, progress): def progress_func(pos, size): progress.value = int(pos * 100 / size) logging.info("Extracting {0}".format(self.__url)) progress.value = 0 progress.job = "Extracting" #output_file_path = u"\\\\?\\" + os.path.abspath(output_file_path) if os.path.isdir(output_file_path): shutil.rmtree(output_file_path,False) try: os.makedirs(output_file_path) except Exception: # doesn't matter if the directory already exists. pass with on_failure(lambda: shutil.rmtree(output_file_path)): filename, extension = os.path.splitext(self.__file_name) if extension == ".gz" or extension == ".tgz": archive_file = ProgressFile(archive_file_path, progress_func) with tarfile.open(fileobj=archive_file, mode='r:gz') as arch: arch.extractall(output_file_path) archive_file.close() elif extension == ".bz2": archive_file = ProgressFile(archive_file_path, progress_func) with tarfile.open(fileobj=archive_file, mode='r:bz2') as arch: arch.extractall(output_file_path) archive_file.close() elif extension == ".zip": archive_file = ProgressFile(archive_file_path, progress_func) with zipfile.ZipFile(archive_file) as arch: arch.extractall(output_file_path) archive_file.close() elif extension == ".7z": os_utils.ensureDirExists(output_file_path) proc = subprocess.Popen([config['paths']['7z'], "x", '-aoa', os_utils.cygpath(os.path.abspath(archive_file_path)), "-o{}".format(os_utils.cygpath(os.path.abspath(output_file_path)))]) if proc.wait() != 0: return False elif extension in [".exe", ".msi"]: # installers need to be handled by the caller return True else: logging.error("unsupported file extension {0}".format(extension)) return False for i in range(self.__tree_depth): sub_dirs = os.listdir(output_file_path) if len(sub_dirs) != 1: raise ValueError("unexpected archive structure," " expected exactly one directory in {}".format(output_file_path)) source_dir = os.path.join(output_file_path, sub_dirs[0]) for src in os.listdir(source_dir): shutil.move(os.path.join(source_dir, src), output_file_path) shutil.rmtree(source_dir) return True
def dlPackagesIn(pkgdefs, superrepo="build"): os_utils.ensureDirExists("download") for destination, retrievalData in pkgdefs.items(): rebuild = args.rebuild_all or destination in args.rebuild destination = os.path.join(superrepo, destination) dlType = retrievalData["type"] if dlType == "git": remote = retrievalData.get("remote", "origin") branch = retrievalData.get("branch", "master") commit = retrievalData.get("commit") submodules = retrievalData.get("submodules", False) submodules_remote = retrievalData.get("submodules_remote", False) tag = retrievalData.get("tag") if "uri" not in retrievalData: log.critical("uri not in def for %s", destination) git = GitRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True) with log.info("Checking for updates to %s...", destination): if rebuild or not os.path.isdir(destination): if rebuild or git.CheckForUpdates(remote, branch, tag=tag, commit=commit): log.info("Updates detecting, pulling...") git.Pull(remote, branch, tag=tag, commit=commit, cleanup=True) if submodules: if rebuild: with os_utils.Chdir(destination): os_utils.cmd( ["git", "submodule", "foreach", "--recursive", "git clean -dfx"], echo=True, show_output=True, critical=True, ) git.UpdateSubmodules(submodules_remote) elif dlType == "hg": remote = retrievalData.get("remote", "default") branch = retrievalData.get("branch", "master") commit = retrievalData.get("commit", retrievalData.get("tag")) if "uri" not in retrievalData: log.critical("uri not in def for %s", destination) hg = HgRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True) with log.info("Checking for updates to %s...", destination): if rebuild or not os.path.isdir(destination): if rebuild or hg.CheckForUpdates(remote, branch): log.info("Updates detecting, pulling...") hg.Pull(remote, branch, commit, cleanup=True) elif dlType == "http": url = retrievalData["url"] ext = retrievalData.get("ext", url[url.rfind(".") :]) filename = os.path.join( script_dir, "download", retrievalData.get("filename", hashlib.md5(url).hexdigest() + ext) ) if not os.path.isfile(filename): with log.info("Downloading %s...", url): http.DownloadFile(url, filename) if (rebuild or not os.path.isdir(destination)) and not retrievalData.get("download-only", False): if rebuild: os_utils.safe_rmtree(destination) os_utils.ensureDirExists(destination) with os_utils.Chdir(destination): os_utils.decompressFile(filename)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) command_line = [self.convert_executable] command_line += [os.path.relpath(x, os.getcwd()) for x in self.files] command_line += [os.path.relpath(self.target, os.getcwd())] os_utils.cmd(command_line, critical=True, echo=self.should_echo_commands(), show_output=True)
def _write_targets(self): alltargets = set() for bt in self.alltargets: if bt.built: for targetfile in bt.provides(): alltargets.add(targetfile) os_utils.ensureDirExists(os.path.dirname(self.all_targets_file)) with open(self.all_targets_file, 'w', encoding='utf-8') as f: yaml.dump(list(alltargets), f)
def checkMTimes(self, inputs, targets, config=None): inputs=callLambda(inputs) for target in targets: if not os.path.isfile(target): log.debug('%s does not exist.', target) return True if config is not None: s = StringIO() yaml.dump(config, s) configHash = hashlib.md5(s.getvalue().encode('utf-8')).hexdigest() targetHash = hashlib.md5(';'.join(targets).encode('utf-8')).hexdigest() def writeHash(): with open(configcachefile, 'w') as f: f.write(configHash) os_utils.ensureDirExists('.build') configcachefile = os.path.join('.build', targetHash) if not os.path.isfile(configcachefile): writeHash() log.debug('%s: Target cache doesn\'t exist.', self.name) return True oldConfigHash = '' with open(configcachefile, 'r') as f: oldConfigHash = f.readline().strip() if(oldConfigHash != configHash): writeHash() log.debug('%s: Target config changed.', self.name) return True target_mtime = 0 # must be higher newest_target = None inputs_mtime = 0 newest_input = None for infilename in targets: infilename = callLambda(infilename) if os.path.isfile(infilename): c_mtime = os.path.getmtime(infilename) # log.info("%d",input_mtime-target_mtime) if c_mtime > target_mtime: target_mtime = c_mtime newest_target = infilename for infilename in inputs: infilename = callLambda(infilename) if os.path.isfile(infilename): c_mtime = os.path.getmtime(infilename) # log.info("%d",input_mtime-target_mtime) if c_mtime > inputs_mtime: inputs_mtime = c_mtime newest_input = infilename if newest_input is None or target_mtime <= inputs_mtime: log.debug("%s is newer than %s by %ds!", newest_input, newest_target, inputs_mtime - target_mtime) return True else: log.debug("%s is older than %s by %ds!", newest_input, newest_target, target_mtime - inputs_mtime) return False
def build(self): linebuf = '' nlines = 0 lastbytecount = 0 lastcheck = 0 longest_line = 0 os_utils.ensureDirExists(os.path.dirname(self.target)) nbytes = os.path.getsize(self.subject) with codecs.open(self.subject, 'r', encoding=self.read_encoding) as inf: with codecs.open(self.target + '.out', 'w', encoding=self.write_encoding) as outf: progBar = tqdm.tqdm( total=nbytes, unit='B', leave=False) if self.display_progress else None outf.write(self.text) while True: block = inf.read(4096) block = block.replace('\r\n', '\n') block = block.replace('\r', '\n') if not block: # EOF outf.write(linebuf) nlines += 1 charsInLine = len(linebuf) if charsInLine > longest_line: longest_line = charsInLine break for c in block: nbytes += 1 if self.display_progress: # if nbytes % 10 == 1: cms = utils.current_milli_time() if cms - lastcheck >= 250: progBar.set_postfix({ 'linebuf': len(linebuf), 'nlines': nlines }) progBar.update(nbytes - lastbytecount) lastcheck = cms lastbytecount = nbytes linebuf += c if c in '\r\n': outf.write(linebuf) nlines += 1 charsInLine = len(linebuf) if charsInLine > longest_line: longest_line = charsInLine linebuf = '' if self.display_progress: progBar.close() with log.info('Completed.'): log.info('Lines.......: %d', nlines) log.info('Chars.......: %d', nbytes) log.info('Longest line: %d chars', longest_line) shutil.move(self.target + '.out', self.target)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd([ self.inkscape, '-z', '-e', self.target, '-h', str(self.height), '-w', str(self.width), self.files[0] ], critical=True, echo=self.should_echo_commands(), show_output=True)
def _cmd_collect(args=None): allpools = {} for pooldirname in os.listdir('lobbyscreens'): pooldir = os.path.join('lobbyscreens', pooldirname) data = None datafile = os.path.join(pooldir, '__POOL__.yml') if os.path.isfile(datafile): with open(datafile, 'r') as f: data = yaml.safe_load(f) if data is None: continue pool = Pool() pool.ID = pooldirname pool.deserialize(data) poolfilesdir = os.path.join(pooldir, 'files') for imagebasename in os.listdir(poolfilesdir): basename, ext = os.path.splitext(imagebasename) #print(basename, ext) if ext not in ('.jpg', '.png', '.gif', '.svg', '.webm', '.webp', '.mp4', '.ogv'): #print(' SKIPPED') continue anim = Animation() anim.ID = basename data = None filedatapath = os.path.join(poolfilesdir, basename+'.yml') if os.path.isfile(filedatapath): with open(filedatapath, 'r') as f: data = yaml.safe_load(f) filedatapath = os.path.join(poolfilesdir, basename+'.toml') if os.path.isfile(filedatapath): with open(filedatapath, 'r') as f: data = toml.load(f) filedatapath = os.path.join(poolfilesdir, basename+'.json') if os.path.isfile(filedatapath): with open(filedatapath, 'r') as f: data = json.load(f) if data is not None: anim.deserialize(data) anim.url = imagebasename fullpath = os.path.join(poolfilesdir, imagebasename) destfile = os.path.join('htdocs', 'img', 'lobby', pool.ID, anim.url) os_utils.ensureDirExists(os.path.dirname(destfile), noisy=False) os_utils.single_copy(fullpath, destfile, as_file=True, noisy=False) pool.add(anim) with open(os.path.join(pooldir, 'parsed.yml'), 'w') as f: yaml.dump(pool.serialize(suppress_id=True), f, default_flow_style=False) log.info('Found pool %r: %d animations', pool.ID, len(pool.animations)) allpools[pool.ID] = pool.serialize() os_utils.ensureDirExists('data') with open('data/lobby.json', 'w') as f: json.dump(allpools, f, indent=2)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.cached_dl)) http.DownloadFile(self.url, self.cached_dl, log_after=True, print_status=True, log_before=True) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.single_copy(self.cached_dl, self.target, as_file=True, verbose=True) if not self.cache: os.remove(self.cached_dl)
def Load(self, filename, merge=False, defaults=None, variables={}, verbose=False): lh = NullIndenter() if self.actual_filename is None: self.actual_filename = filename if self.template_dir is None else os.path.join( self.template_dir, filename) if verbose: lh = log.info("Loading %s...", filename) with lh: if not self.skip_defaults and not os.path.isfile(filename): if defaults is None: if verbose: log.error('Failed to load %s.', filename) return False else: if verbose: log.warn('File not found, loading defaults.') ensureDirExists(os.path.dirname(filename)) self.dump_to_file(filename, defaults) rendered = '' template: jinja2.Template = None #print(repr(self.template_dir)) try: if self.template_dir is not None: template = self.environment.get_template( os.path.basename(filename) if self.template_dir is None else filename) else: with open(filename) as f: template = self.environment.from_string(f.read()) rendered = template.render(variables) except jinja2.exceptions.TemplateNotFound as tnf: if verbose: log.warn( 'Jinja2 failed to load %s (TemplateNotFound). Failing over to plain string.', filename) log.exception(tnf) with codecs.open(filename, 'r', encoding=self.encoding) as f: rendered = f.read() #print(rendered) newcfg = self.load_from_string(rendered) if merge: self.cfg = dict_merge(self.cfg, newcfg) else: self.cfg = newcfg return True
def build(self): sass_cmd = [] sass_cmd = [self.sass_path] args = ['--no-color', '-q', '--stop-on-error', '-s', self.output_style] if self.source_map: args += ['--embed-sources', '--embed-source-map'] for import_path in self.import_paths: args += ['-I', import_path] #os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(sass_cmd + args + self.files + [self.target], critical=True, echo=self.should_echo_commands(), show_output=True)
def mkproject(bm: BuildMaestro, project: str, depends: List[str] = []): with log.info('Configuring %s...', project): proj_dir = os.path.join('src', project) projin = bm.add( ReplaceTextTarget(os.path.join(proj_dir, f'{project}.csproj'), os.path.join(proj_dir, f'{project}.csproj.in'), replacements={ re.escape('$(ONIPath)'): CONFIG.get('paths.oni').replace( '\\', '\\\\') })) dll = os.path.join('src', project, 'bin', project + '.dll') csfiles = [ f for f in os_utils.get_file_list(os.path.join(proj_dir, 'Source'), prefix=os.path.join( proj_dir, 'Source')) if f.endswith('.cs') ] csfiles.sort() for csfile in csfiles: log.info(csfile) csp = bm.add( MSBuildTarget(dll, os.path.join(proj_dir, f'{project}.sln'), files=csfiles, dependencies=[projin.target] + depends)) csp.msb.properties['ONIPath'] = CONFIG.get('paths.oni') #csp.msb.configuration = 'Debug' deploydir = os.path.join(LOCALMODS, project) bm.add(CopyFileTarget(deploydir, dll, dependencies=[csp.target])) os_utils.ensureDirExists(deploydir, noisy=True) for basefilename in os.listdir(os.path.join('Mods', project)): filename = os.path.join('Mods', project, basefilename) _, ext = os.path.splitext(basefilename) if ext in ('.json', '.txt'): cf = bm.add( CopyFileTarget(os.path.join(deploydir, basefilename), filename, dependencies=[csp.target], verbose=True)) log.info('Found config: %s', basefilename) return csp
def _updateCacheInfo(self) -> str: self.cache_dir = os.path.join(self.maestro.builddir, 'DownloadFileTarget.cache') fileid = hashlib.md5(self.urlchunks.path.encode('utf-8')).hexdigest() self.etagdir = os.path.join(self.cache_dir, self.urlchunks.hostname, fileid[0:2], fileid[2:4]) #self.old_uri_id = hashlib.sha256(self.urlchunks.path.encode('utf-8')).hexdigest()+'.etags' uri_id = fileid[4:] + '.etags' self.cached_dl = os.path.join(self.etagdir, fileid[4:] + '.dat') #if os.path.isfile(os.path.join(etagdir, old_uri_id)): # shutil.move(os.path.join(etagdir, old_uri_id), os.path.join(etagdir, uri_id)) self.etagfile = os.path.join(self.etagdir, uri_id) os_utils.ensureDirExists(self.etagdir)
def build(self): sass_cmd = [] if self.sass_convert_path.endswith( '.bat') or self.sass_convert_path.endswith('.BAT'): RUBYDIR = os.path.dirname(self.sass_convert_path) sass_cmd = [ os.path.join(RUBYDIR, 'ruby.exe'), os.path.join(RUBYDIR, 'sass-convert') ] else: sass_cmd = [self.sass_convert_path] args = ['-F', 'css', '-T', 'scss', '-C'] #os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(sass_cmd + args + self.files + [self.target], critical=True, echo=self.should_echo_commands(), show_output=True)
def main(): argp = argparse.ArgumentParser() argp.add_argument('--go', action='store_true') args = argp.parse_args() files_to_proc = [] for root, _, files in os.walk(IN_DIR): for bfn in files: fullpath = os.path.abspath(os.path.join(root, bfn)) if bfn.endswith('.bak'): log.info('rm %s', fullpath) os.remove(fullpath) if bfn.endswith('.php'): files_to_proc += [fullpath] for filename in tqdm.tqdm(files_to_proc, desc='Moving files...', unit='file'): namespace = None outpath = None with open(filename, 'r') as f: for line in f: m = re.match(REG_NS, line) if m is not None: namespace = m.group(1) break if namespace is None: continue nschunks = namespace.split('\\') if nschunks[0] == '': nschunks = nschunks[1:] nschunks = nschunks[1:] nschunks += [os.path.basename(filename).replace('.class', '').replace('.interface','')] outpath = os.path.abspath(os.path.join(OUT_DIR, *nschunks)) if outpath == filename: continue cmd = [os_utils.which('git'), 'mv', os.path.relpath(filename), os.path.relpath(outpath)] if args.go: os_utils.ensureDirExists(os.path.dirname(outpath), noisy=True) os_utils.cmd([os_utils.which('git'), 'add', os.path.relpath(filename)], echo=True, show_output=True) os_utils.cmd(cmd, echo=True, critical=True) else: log.info(' '.join(cmd)) os_utils.del_empty_dirs(IN_DIR, quiet=False)
def build(self): sass_cmd = [] if self.sass_path.endswith('.bat') or self.sass_path.endswith('.BAT'): RUBYDIR = os.path.dirname(self.sass_path) sass_cmd = [ os.path.join(RUBYDIR, 'ruby.exe'), os.path.join(RUBYDIR, 'sass') ] else: sass_cmd = [self.sass_path] args = ['--scss', '--force', '-C', '-t', self.output_style] if self.compass: args += ['--compass'] for import_path in self.import_paths: args += ['-I=' + import_path] #os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(sass_cmd + args + self.files + [self.target], critical=True, echo=self.should_echo_commands(), show_output=True)
def Initialize(self) -> None: self.preex_sound = self.pathcfg.get( 'sound.old-vox', os.path.join(self.cwd, 'sound', 'vox', '{ID}.wav')) self.nuvox_sound = self.pathcfg.get( 'sound.new-vox', os.path.join(self.cwd, 'sound', 'vox_{SEX}', '{ID}.wav')) self.vox_sounds_path = os.path.join( self.dist_dir, self.pathcfg.get('vox_sounds.path')) self.templatefile = self.pathcfg.get('vox_sounds.template') self.vox_data_path = os.path.join(self.dist_dir, self.pathcfg.get('vox_data')) self.default_voice = VoiceRegistry.Get(USSLTFemale.ID) os_utils.ensureDirExists(self.tmp_dir) os_utils.ensureDirExists(self.data_dir) for sexID, voiceid in self.config.get('voices', { 'fem': USSLTFemale.ID }).items(): voice = VoiceRegistry.Get(voiceid) assert sexID != '' voice.assigned_sex = sexID if sexID in ('fem', 'mas'): sex = EVoiceSex(sexID) assert voice.SEX == sex self.voices += [voice] elif sexID == 'default': default_voice = voice self.voice_assignments[voice.assigned_sex] = [] self.all_voices += [voice] self.configured_voices[sexID] = voice.serialize() self.voice_assignments[self.sfx_voice.assigned_sex] = [] self.all_voices += [self.sfx_voice] self.configured_voices[ self.sfx_voice.assigned_sex] = self.sfx_voice.serialize()
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) data = {} with open(self.files[0], 'r', encoding='utf-8-sig') as inf: if self.from_type == EDataType.YAML: data = yaml.load(inf) if self.from_type == EDataType.JSON: data = json.load(inf) if self.from_type == EDataType.TOML: data = toml.load(inf) with open(self.target, 'w', encoding='utf-8') as outf: if self.to_type == EDataType.YAML: kwargs = {} kwargs['default_flow_style'] = not self.pretty_print if self.indent_chars is not None and self.pretty_print: kwargs['indent'] = self.indent_chars yaml.dump(data, outf, **kwargs) if self.to_type == EDataType.JSON: json.dump(data, outf, indent=self.indent_chars if self.pretty_print and self.indent_chars is not None else None) if self.to_type == EDataType.TOML: toml.dump(data, outf)
config = YAMLConfig("build.yml", config, variables={"nbits": "32"}) config.Load("user-config.yml", merge=True, defaults=userconfig) EXECUTABLES = config.get("paths.executables") ENV.appendTo("PATH", os.path.dirname(EXECUTABLES["7za"])) ENV.set("QMAKESPEC", config.get("qt-makespec", "win32-msvc2013")) #: x64 or x86 short_arch = "x64" if config["architecture"] == "x86_64" else "x86" #: 64 or 32 nbits = "64" if config["architecture"] == "x86_64" else "32" superrepo = os.path.join("build", "modorganizer_super") if not os.path.isdir(superrepo): os_utils.ensureDirExists(superrepo) with os_utils.Chdir(superrepo): os_utils.cmd([EXECUTABLES["git"], "init"], show_output=True, critical=True) ymlvars = {"nbits": nbits, "script_dir": script_dir} prerequisites = YAMLConfig("prerequisites.yml", variables=ymlvars).cfg with log.info("Downloading prerequisites..."): dlPackagesIn(prerequisites) # Copied from Unimake. projs = [ ("modorganizer-archive", "archive", "master", ["7zip", "Qt5"]), ("modorganizer-uibase", "uibase", "new_vfs_library", ["Qt5", "boost"]), ("modorganizer-lootcli", "lootcli", "master", ["LootApi", "boost"]), ("modorganizer-esptk", "esptk", "master", ["boost"]), ("modorganizer-bsatk", "bsatk", "master", ["zlib"]),
def writeCache(self): configHash = self.getConfigHash() targetHash = self.getTargetHash() os_utils.ensureDirExists(os.path.dirname(self.getCacheFile())) with open(self.getCacheFile(), 'w') as f: yaml.dump_all([self.CACHE_VER, configHash, targetHash, self.serialize_file_times(), self.serialize_file_hashes(), self.get_config()], f)
def touch(self, filename): os_utils.ensureDirExists(os.path.dirname(filename)) Path(filename).touch(exist_ok=True)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target), noisy=False) shutil.move(self.files[0], self.target)
def build(self): cmdline = [self.uglifyjs_executable ] + self.options + ['-o', self.target, self.files[0]] os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(cmdline, critical=True, echo=self.should_echo_commands())
def main(): argp = argparse.ArgumentParser(description='Generation script for ss13-vox.') #argp.add_argument('--codebase', choices=['vg', 'tg'], default='vg', help='Which codebase to generate for. (Affects output code and paths.)') argp.add_argument('--threads', '-j', type=int, default=multiprocessing.cpu_count(), help='How many threads to use in ffmpeg.') #argp.add_argument('phrasefiles', nargs='+', type=str, help='A list of phrase files.') args = argp.parse_args() if not os.path.isdir('tmp'): os.makedirs('tmp') DIST_DIR = 'dist' PREEX_SOUND = 'sound/vox/{ID}.wav' NUVOX_SOUND = 'sound/vox_{SEX}/{ID}.wav' voices = [] vox_sounds_path = '' templatefile = '' config = BaseConfig() config.cfg = YAMLConfig('config.yml') pathcfg = BaseConfig() pathcfg.cfg = YAMLConfig('paths.yml').cfg[config.get('codebase', 'vg')] PREEX_SOUND = pathcfg.get('sound.old-vox', PREEX_SOUND) NUVOX_SOUND = pathcfg.get('sound.new-vox', NUVOX_SOUND) voice_assignments = {} all_voices = [] default_voice: Voice = VoiceRegistry.Get(USSLTFemale.ID) sfx_voice: SFXVoice = SFXVoice() configured_voices: Dict[str, dict] = {} for sexID, voiceid in config.get('voices', {'fem': USSLTFemale.ID}).items(): voice = VoiceRegistry.Get(voiceid) assert sexID != '' voice.assigned_sex = sexID if sexID in ('fem', 'mas'): sex = EVoiceSex(sexID) assert voice.SEX == sex voices += [voice] elif sexID == 'default': default_voice = voice voice_assignments[voice.assigned_sex] = [] all_voices += [voice] configured_voices[sexID] = voice.serialize() voice_assignments[sfx_voice.assigned_sex] = [] all_voices += [sfx_voice] configured_voices[sfx_voice.assigned_sex] = sfx_voice.serialize() vox_sounds_path = os.path.join(DIST_DIR, pathcfg.get('vox_sounds.path')) templatefile = pathcfg.get('vox_sounds.template') vox_data_path = os.path.join(DIST_DIR, pathcfg.get('vox_data')) DATA_DIR = os.path.join(DIST_DIR, 'data') os_utils.ensureDirExists(DATA_DIR) with log.info('Parsing lexicon...'): lexicon = ParseLexiconText('lexicon.txt') phrases=[] phrasesByID = {} broked = False for filename in config.get('phrasefiles', ['announcements.txt', 'voxwords.txt']): for p in ParsePhraseListFrom(filename): if p.id in phrasesByID: duplicated = phrasesByID[p.id] log.critical('Duplicate phrase with ID %s in file %s on line %d! First instance in file %s on line %d.', p.id, p.deffile, p.defline, duplicated.deffile, duplicated.defline) broked = True continue phrases += [p] phrasesByID[p.id] = p if broked: sys.exit(1) soundsToKeep = set() for sound in OTHERSOUNDS: soundsToKeep.add(os.path.join(DIST_DIR, sound + '.ogg')) phrases.sort(key=lambda x: x.id) overrides = config.get('overrides', {}) for phrase in phrases: if phrase.id in overrides: phrase.fromOverrides(overrides.get(phrase.id)) phrase_voices = list(voices) # If it has a path, it's being manually specified. if '/' in phrase.id: phrase.filename = phrase.id + '.ogg' phrase_voices = [default_voice] soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, phrase.filename))) else: phrase.filename = ''+NUVOX_SOUND if phrase.hasFlag(EPhraseFlags.OLD_VOX): phrase_voices = [default_voice] phrase.filename = PREEX_SOUND.format(ID=phrase.id) for voice in ['fem', 'mas']: phrase.files[voice] = FileData() phrase.files[voice].filename = phrase.filename phrase.files[voice].checksum = '' phrase.files[voice].duration = phrase.override_duration or -1 phrase.files[voice].size = phrase.override_size or -1 #voice_assignments[voice].append(phrase) soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, phrase.filename))) continue if phrase.hasFlag(EPhraseFlags.SFX): phrase_voices = [sfx_voice] if not phrase.hasFlag(EPhraseFlags.OLD_VOX): log.info('%s - %r', phrase.id, [x.assigned_sex for x in phrase_voices]) for v in phrase_voices: voice_assignments[v.assigned_sex].append(phrase) #phrase.files[v.assigned_sex] = fd #sys.exit(1) for voice in all_voices: print(voice.ID, voice.assigned_sex) DumpLexiconScript(voice.FESTIVAL_VOICE_ID, lexicon.values(), 'tmp/VOXdict.lisp') for phrase in voice_assignments[voice.assigned_sex]: GenerateForWord(phrase, voice, soundsToKeep, args) sexes=set() for vk, fd in phrase.files.items(): soundsToKeep.add(os.path.abspath(os.path.join(DIST_DIR, fd.filename))) jenv = jinja2.Environment(loader=jinja2.FileSystemLoader(['./templates'])) jenv.add_extension('jinja2.ext.do') # {% do ... %} templ = jenv.get_template(templatefile) with log.info('Writing sound list to %s...', vox_sounds_path): os_utils.ensureDirExists(os.path.dirname(vox_sounds_path)) assetcache={} sound2id={} with open(vox_sounds_path, 'w') as f: sexes = { 'fem': [], 'mas': [], 'default': [], #'sfx': [], } for p in phrases: for k in p.files.keys(): assetcache[p.getAssetKey(k)] = p.files[k].filename sound2id[p.files[k].filename] = p.getAssetKey(k) if p.hasFlag(EPhraseFlags.NOT_VOX): continue for k in p.files.keys(): if p.hasFlag(EPhraseFlags.SFX): for sid in ('fem', 'mas'): if p not in sexes[sid]: sexes[sid].append(p) else: sexes[k].append(p) f.write(templ.render( InitClass=InitClass, SEXES=sexes, ASSETCACHE=assetcache, SOUND2ID=sound2id, PHRASES=[p for p in phrases if not p.hasFlag(EPhraseFlags.NOT_VOX)])) soundsToKeep.add(os.path.abspath(vox_sounds_path)) os_utils.ensureDirExists(DATA_DIR) with open(os.path.join(DATA_DIR, 'vox_data.json'), 'w') as f: data = { 'version': 2, 'compiled': time.time(), 'voices': configured_voices, 'words': collections.OrderedDict({w.id: w.serialize() for w in phrases if '/' not in w.id}), } json.dump(data, f, indent=2) soundsToKeep.add(os.path.abspath(os.path.join(DATA_DIR, 'vox_data.json'))) with open('tmp/written.txt', 'w') as f: for filename in sorted(soundsToKeep): f.write(f'{filename}\n') for root, _, files in os.walk(DIST_DIR, topdown=False): for name in files: filename = os.path.abspath(os.path.join(root, name)) if filename not in soundsToKeep: log.warning('Removing {0} (no longer defined)'.format(filename)) os.remove(filename)
def build(self): definition = {} with open(self.filename, 'r') as r: definition=yaml.load(r)['enum'] if 'auto-value' in definition: autoval = definition['auto-value'] i=autoval.get('start',0) for k in definition['values'].keys(): if definition[k].get('auto', True): definition[k]['value']=1 >> i if definition.get('flags', False) else i i += 1 flags = False if 'flags' in definition and definition['flags']: flags=True definition['tests']=definition.get('tests',{}) definition['tests']['unique']=definition['tests'].get('unique',True) definition['tests']['single-bit-only']=definition['tests'].get('single-bit-only',True) default = definition.get('default', 0) for k,vpak in definition['values'].items(): val = self._get_value_for(vpak) if self._get_for(vpak, 'default', False): if flags: default |= val else: default = val if flags or 'tests' in definition: with log.info('Testing %s....', definition['name']): tests = definition.get('tests',{}) if 'increment' in tests: incrdef = tests['increment'] start = incrdef.get('start',0) stop = incrdef.get('stop', len(definition['values'])) vals = [] for k,vpak in definition['values'].items(): vals += [self._get_value_for(vpak)] for i in range(start,stop): if i not in vals: log.error('increment: Missing value %d!', i) if 'unique' in tests and tests['unique']: vals={} for k,vpak in definition['values'].items(): val = self._get_value_for(vpak) if val in vals: log.error('unique: Entry %s is not using a unique value!', k) vals[val]=True if flags: if 'single-bit-only' in tests and tests['single-bit-only']: for k,vpak in definition['values'].items(): val = self._get_value_for(vpak) c = 0 while val > 0: c = val & 1 val >>= 1 if c > 1: log.error('single-bit-only: Entry %s has too many bits!', k) break definition['default'] = default os_utils.ensureDirExists(os.path.dirname(self.target), noisy=True) with open(self.target, 'w') as w: self.writer.write(w, definition)