def process(self, progress): if "build_path" not in self._context: logging.error("source path not known for {}," " are you missing a matching retrieval script?".format(self._context.name)) return False # prepare for out-of-source build build_path = os.path.join(self._context["build_path"], "build") #if os.path.exists(build_path): # shutil.rmtree(build_path) try: os.mkdir(build_path) except: pass soutpath = os.path.join(self._context["build_path"], "stdout.log") serrpath = os.path.join(self._context["build_path"], "stderr.log") try: with on_exit(lambda: progress.finish()): with open(soutpath, "w") as sout: with open(serrpath, "w") as serr: with os_utils.Chdir(self.build_path): os_utils.cmd([config["paths"]["cmake"], "-G", "NMake Makefiles", ".."] + self.__arguments, echo=True, show_output=True, critical=True) proc = Popen([config['tools']['make'], "verbose=1"], shell=True, env=config["__environment"], cwd=build_path, stdout=PIPE, stderr=serr) progress.job = "Compiling" progress.maximum = 100 while proc.poll() is None: while True: line = proc.stdout.readline() if line != '': match = re.search("^\\[([0-9 ][0-9 ][0-9])%\\]", line) if match is not None: progress.value = int(match.group(1)) sout.write(line) else: break if proc.returncode != 0: raise Exception("failed to build (returncode %s), see %s and %s" % (proc.returncode, soutpath, serrpath)) if self.__install: proc = Popen([config['tools']['make'], "install"], shell=True, env=config["__environment"], cwd=build_path, stdout=sout, stderr=serr) proc.communicate() if proc.returncode != 0: raise Exception("failed to install (returncode %s), see %s and %s" % (proc.returncode, soutpath, serrpath)) return False except Exception, e: logging.error(e.message) return False
def build(self): # call rsync -Rrav --progress *.mp3 [email protected]:/host/ss13.nexisonline.net/htdocs/media/ cmd = [self.rsync_executable] cmd += self.opts if self.progress: cmd += ['--progress'] if self.delete: cmd += ['--delete', '--delete-before'] if self.chmod != None: cmd += [f'--chmod={self.chmod:o}'] if self.chown != None: chown = self.chown if self.chgrp is not None: chown += ':' + self.chgrp cmd += [f'--chown={chown}'] if self.keyfile != None: keypath = self.keyfile os_utils.cmd(['chmod', '400', keypath], echo=True, show_output=True, critical=True) if os_utils.is_windows() and ':' in keypath: #keypath = os_utils.cygpath(keypath) keypath = keypath.replace('\\', '/') cmd += ['-e', f"ssh -i {keypath}"] cmd += [x.replace('\\', '/') for x in self.sources] cmd += [self.destination] os_utils.cmd(cmd, show_output=self.show_output, echo=self.should_echo_commands(), critical=True, acceptable_exit_codes=[0, 23]) self.touch(self.target)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd([self.svgo_cmd, '-i', self.source, '-o', self.target] + self.svgo_opts, echo=self.should_echo_commands(), show_output=True, critical=True)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) # BUGFIX: Coffeescript sometimes doesn't want to overwrite shit. - N3X if os.path.isfile(self.target): os.remove(self.target) coffeefile = self.files[0] if len(self.files) > 1: coffeefile = self.getCoffeeFile() if os.path.isfile(coffeefile): os.remove(coffeefile) with codecs.open(coffeefile, 'w', encoding='utf-8-sig') as outf: tq = tqdm(self.files, desc='Concatenating...', leave=False) for infilename in tq: #outf.write('\n`// FILE: {}`\n'.format(infilename)) with codecs.open(infilename, 'r', encoding='utf-8-sig') as inf: for line in inf: outf.write(line.rstrip() + "\n") #outf.write('\n`//# sourceURL={}\n`\n'.format(infilename)) tq.close() coffeefile_basename, _ = os.path.splitext(os.path.basename(coffeefile)) os_utils.cmd([self.coffee_executable] + self.coffee_opts + ['-o', os.path.dirname(self.target), coffeefile], critical=True, echo=self.should_echo_commands(), show_output=True) coffee_output_file = os.path.join(os.path.dirname(self.target), coffeefile_basename+'.js') if coffee_output_file != self.target: log.info('Renaming %s to %s...', coffee_output_file, self.target) os.rename(coffee_output_file, self.target)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.destination), noisy=True) os_utils.single_copy(self.source, self.intermediate_filename, verbose=True) os_utils.cmd([self.webify, self.intermediate_filename], echo=True, critical=True)
def dlPackagesIn(pkgdefs, superrepo="build"): os_utils.ensureDirExists("download") for destination, retrievalData in pkgdefs.items(): rebuild = args.rebuild_all or destination in args.rebuild destination = os.path.join(superrepo, destination) dlType = retrievalData["type"] if dlType == "git": remote = retrievalData.get("remote", "origin") branch = retrievalData.get("branch", "master") commit = retrievalData.get("commit") submodules = retrievalData.get("submodules", False) submodules_remote = retrievalData.get("submodules_remote", False) tag = retrievalData.get("tag") if "uri" not in retrievalData: log.critical("uri not in def for %s", destination) git = GitRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True) with log.info("Checking for updates to %s...", destination): if rebuild or not os.path.isdir(destination): if rebuild or git.CheckForUpdates(remote, branch, tag=tag, commit=commit): log.info("Updates detecting, pulling...") git.Pull(remote, branch, tag=tag, commit=commit, cleanup=True) if submodules: if rebuild: with os_utils.Chdir(destination): os_utils.cmd( ["git", "submodule", "foreach", "--recursive", "git clean -dfx"], echo=True, show_output=True, critical=True, ) git.UpdateSubmodules(submodules_remote) elif dlType == "hg": remote = retrievalData.get("remote", "default") branch = retrievalData.get("branch", "master") commit = retrievalData.get("commit", retrievalData.get("tag")) if "uri" not in retrievalData: log.critical("uri not in def for %s", destination) hg = HgRepository(destination, retrievalData["uri"], quiet=True, noisy_clone=True) with log.info("Checking for updates to %s...", destination): if rebuild or not os.path.isdir(destination): if rebuild or hg.CheckForUpdates(remote, branch): log.info("Updates detecting, pulling...") hg.Pull(remote, branch, commit, cleanup=True) elif dlType == "http": url = retrievalData["url"] ext = retrievalData.get("ext", url[url.rfind(".") :]) filename = os.path.join( script_dir, "download", retrievalData.get("filename", hashlib.md5(url).hexdigest() + ext) ) if not os.path.isfile(filename): with log.info("Downloading %s...", url): http.DownloadFile(url, filename) if (rebuild or not os.path.isdir(destination)) and not retrievalData.get("download-only", False): if rebuild: os_utils.safe_rmtree(destination) os_utils.ensureDirExists(destination) with os_utils.Chdir(destination): os_utils.decompressFile(filename)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) command_line = [self.convert_executable] command_line += [os.path.relpath(x, os.getcwd()) for x in self.files] command_line += [os.path.relpath(self.target, os.getcwd())] os_utils.cmd(command_line, critical=True, echo=self.should_echo_commands(), show_output=True)
def build(self): opts = self.buildOpts() with os_utils.Chdir(self.working_dir): os_utils.cmd([self.exe_path] + self.opts, show_output=True, echo=self.should_echo_commands(), critical=True) if not os.path.isfile(self.target): self.touch(self.target)
def build(self): with os_utils.Chdir(os.path.join(self.base_path)): os_utils.cmd([os_utils.which('yarn')], echo=True, show_output=True, critical=True) os_utils.cmd([os_utils.which('grunt'), 'requirejs', 'uglify:main'], echo=True, show_output=True, critical=True)
def build(self): with os_utils.Chdir(self.cwd): os_utils.cmd(self.cmd, show_output=self.show_output, echo=self.should_echo_commands() if self.echo is None else self.echo, critical=True, globbify=self.globbify) for t in self.provides(): self.touch(t)
def build(self): os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd([ self.inkscape, '-z', '-e', self.target, '-h', str(self.height), '-w', str(self.width), self.files[0] ], critical=True, echo=self.should_echo_commands(), show_output=True)
def Pull(self, remote='default', branch='default', cleanup=False): if not os.path.isdir(self.path): cmd(['hg', 'clone', self.remotes[remote], self.path], echo=not self.quiet or self.noisy_clone, critical=True, show_output=not self.quiet or self.noisy_clone) if self.IsDirty() and cleanup: self._hgcmd(['clean', '--all', '--dirs', '--files']) self._hgcmd(['revert', '-C', '--all']) if self.current_branch != branch: self._hgcmd(['checkout', '-C', branch]) if self.current_rev != self.remote_rev: self._hgcmd(['pull', '-r', self.remote_rev]) return True
def UpdateSubmodules(self, remote=False): with log.info('Updating submodules in %s...', self.path): with Chdir(self.path, quiet=self.quiet): if os.path.isfile('.gitmodules'): more_flags = [] if remote: more_flags.append('--remote') cmd([ 'git', 'submodule', 'update', '--init', '--recursive' ] + more_flags, echo=not self.quiet, critical=True)
def _cmd_create(args): data = { 'id': args.ID, 'playlist': 'lobby', 'template': 'main' } pooldir = os.path.join('lobbyscreens', args.ID) os_utils.ensureDirExists(pooldir, noisy=True) os_utils.ensureDirExists(os.path.join(pooldir, 'files'), noisy=True) written = [] with open(os.path.join(pooldir, '__POOL__.yml'), 'w') as f: yaml.dump(data, f, default_flow_style=False) log.info('Wrote %s.', f.name) written += [f.name] with open('.gitignore', 'w') as f: f.write('/parsed.yml\n') written += [f.name] with os_utils.Chdir(pooldir): if not os.path.isdir('.git'): os_utils.cmd(['git', 'init'], echo=True, show_output=True, critical=True) os_utils.cmd(['git', 'lfs', 'install'], echo=True, show_output=True, critical=True) os_utils.cmd(['git', 'lfs', 'track', '*.png', '*.gif', '*.jpg', '*.webm', '*.webp'], echo=True, show_output=True, critical=True) os_utils.cmd(['git', 'add', '.gitattributes']+written, echo=True, show_output=True, critical=True)
def build(self): sass_cmd = [] sass_cmd = [self.sass_path] args = ['--no-color', '-q', '--stop-on-error', '-s', self.output_style] if self.source_map: args += ['--embed-sources', '--embed-source-map'] for import_path in self.import_paths: args += ['-I', import_path] #os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(sass_cmd + args + self.files + [self.target], critical=True, echo=self.should_echo_commands(), show_output=True)
def compile(self): ofiles = [] for filename in self.files: of = "{}.obj".format(os.path.splitext(filename)[0]) if os_utils.canCopy(filename, of): # Checks mtime. os_utils.cmd([self.compiler, '-c'] + self.cflags + ['-Fo:', of, filename], critical=True, show_output=True, echo=True) ofiles.append(of) os_utils.cmd([self.linker, '/lib', '/nologo', '/out:' + self.output] + ofiles, critical=True, show_output=True, echo=True)
def build(self): sass_cmd = [] if self.sass_convert_path.endswith( '.bat') or self.sass_convert_path.endswith('.BAT'): RUBYDIR = os.path.dirname(self.sass_convert_path) sass_cmd = [ os.path.join(RUBYDIR, 'ruby.exe'), os.path.join(RUBYDIR, 'sass-convert') ] else: sass_cmd = [self.sass_convert_path] args = ['-F', 'css', '-T', 'scss', '-C'] #os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(sass_cmd + args + self.files + [self.target], critical=True, echo=self.should_echo_commands(), show_output=True)
def main(): argp = argparse.ArgumentParser() argp.add_argument('--go', action='store_true') args = argp.parse_args() files_to_proc = [] for root, _, files in os.walk(IN_DIR): for bfn in files: fullpath = os.path.abspath(os.path.join(root, bfn)) if bfn.endswith('.bak'): log.info('rm %s', fullpath) os.remove(fullpath) if bfn.endswith('.php'): files_to_proc += [fullpath] for filename in tqdm.tqdm(files_to_proc, desc='Moving files...', unit='file'): namespace = None outpath = None with open(filename, 'r') as f: for line in f: m = re.match(REG_NS, line) if m is not None: namespace = m.group(1) break if namespace is None: continue nschunks = namespace.split('\\') if nschunks[0] == '': nschunks = nschunks[1:] nschunks = nschunks[1:] nschunks += [os.path.basename(filename).replace('.class', '').replace('.interface','')] outpath = os.path.abspath(os.path.join(OUT_DIR, *nschunks)) if outpath == filename: continue cmd = [os_utils.which('git'), 'mv', os.path.relpath(filename), os.path.relpath(outpath)] if args.go: os_utils.ensureDirExists(os.path.dirname(outpath), noisy=True) os_utils.cmd([os_utils.which('git'), 'add', os.path.relpath(filename)], echo=True, show_output=True) os_utils.cmd(cmd, echo=True, critical=True) else: log.info(' '.join(cmd)) os_utils.del_empty_dirs(IN_DIR, quiet=False)
def build(self): env = os_utils.ENV.clone() env.set('COMPOSER', self.specfile, noisy=self.should_echo_commands()) if self.modules_dir is not None: env.set('COMPOSER_VENDOR_DIR', self.modules_dir, noisy=self.should_echo_commands()) if self.composer_bin_dir is not None: env.set('COMPOSER_BIN_DIR', self.composer_bin_dir, noisy=self.should_echo_commands()) cmdline = self.processOpts() os_utils.cmd(cmdline, show_output=True, echo=self.should_echo_commands(), critical=True, env=env) if os.path.isfile(self.target): self.touch(self.target)
def build(self, CMAKE, dir='.', env=None, target=None, moreflags=[]): if env is None: env = ENV.env flags = ['--build', dir] if target is not None: moreflags += ['--target', target] flags += moreflags with log.info('Running CMake --build:'): BuildEnv.dump(env) return cmd([CMAKE] + flags, env=env, critical=True, echo=True)
def Build(self, file, targets=['target'], ant='ant'): cmdline = [ant] cmdline += ['-file', target] for k, v in sorted(self.defines.items()): cmdline += ['-D{}={}'.format(k, v)] for filename in self.propertyfiles: cmdline += ['-propertyfile', filename] cmdline += targets return cmd(cmdline, critical=True, echo=True)
def Build(self, file, targets=['target'], ant='ant'): cmdline = [ant] cmdline += ['-file', target] for k, v in sorted(self.defines.items()): cmdline += ['-D{}={}'.format(k, v)] for filename in self.propertyfiles: cmdline += ['-propertyfile',filename] cmdline += targets return cmd(cmdline, critical=True, echo=True)
def build(self): sass_cmd = [] if self.sass_path.endswith('.bat') or self.sass_path.endswith('.BAT'): RUBYDIR = os.path.dirname(self.sass_path) sass_cmd = [ os.path.join(RUBYDIR, 'ruby.exe'), os.path.join(RUBYDIR, 'sass') ] else: sass_cmd = [self.sass_path] args = ['--scss', '--force', '-C', '-t', self.output_style] if self.compass: args += ['--compass'] for import_path in self.import_paths: args += ['-I=' + import_path] #os_utils.ensureDirExists(os.path.join('tmp', os.path.dirname(self.target))) os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(sass_cmd + args + self.files + [self.target], critical=True, echo=self.should_echo_commands(), show_output=True)
def Pull(self, remote='origin', branch='master', cleanup=False): if not os.path.isdir(self.path): cmd(['git', 'clone', self.remotes[remote], self.path], echo=not self.quiet or self.noisy_clone, critical=True, show_output=not self.quiet or self.noisy_clone) with Chdir(self.path, quiet=self.quiet): if cleanup: cmd(['git', 'clean', '-fdx'], echo=not self.quiet, critical=True) cmd(['git', 'reset', '--hard'], echo=not self.quiet, critical=True) if self.current_branch != branch: ref = 'remotes/{}/{}'.format(remote, branch) cmd(['git', 'checkout', '-B', branch, ref, '--'], echo=not self.quiet, critical=True) if self.current_commit != self.remote_commit: cmd(['git', 'reset', '--hard', '{}/{}'.format(remote, branch)], echo=not self.quiet, critical=True) return True
def process(self, progress): if "build_path" not in self._context: logging.error("source path not known for {}, are you missing a matching retrieval script?".format(self.name)) environment = dict(self.__environment() if self.__environment() is not None else config["__environment"]) cwd = str(self.__working_directory() if self.__working_directory() is not None else self._context["build_path"]) with os_utils.Chdir(cwd): if not os_utils.cmd(self.__command, critical=False, env=environment, echo=True, show_output=True): return False return True
def run(self, CMAKE, env=None, dir='.', moreflags=[]): if env is None: env = ENV.env flags = [] if self.generator is not None: flags += ['-G', self.generator] for key, value in self.flags.items(): flags += ['-D{0}={1}'.format(key, value)] flags += moreflags with log.info('Running CMake:'): BuildEnv.dump(env) return cmd([CMAKE] + flags + [dir], env=env, critical=True, echo=True) return False
def process(self, progress): if "build_path" not in self._context: logging.error("source path not known for {}," " are you missing a matching retrieval script?".format(self._context.name)) return False if os.path.exists(self._context['edit_path']): shutil.rmtree(self._context['edit_path']) os.mkdir(self._context['edit_path']) soutpath = os.path.join(self._context["build_path"], "stdout.log") serrpath = os.path.join(self._context["build_path"], "stderr.log") with open(soutpath, "w") as sout: with open(serrpath, "w") as serr: with os_utils.Chdir(self._context['edit_path']): return os_utils.cmd([config["paths"]["cmake"], "-G", self.__generator_name(), ".."] + self.__arguments, echo=True, show_output=True, critical=False) return True
def run(self, CMAKE='cmake', env=None, dir='.', moreflags=None, env_dump=False): if env is None: env = ENV.env if moreflags is None: moreflags=[] flags = [] if self.generator is not None: flags += ['-G', self.generator] for key, value in self.flags.items(): flags += ['-D{0}={1}'.format(key, value)] flags += moreflags with log.info('Running CMake:'): if env_dump: BuildEnv.dump(env) return cmd([CMAKE] + flags + [dir], env=env, critical=True, echo=True) return False
def Build(self, target_file, fpm='fpm'): cmdline = [fpm] cmdline += ['-s', self.input_type] cmdline += ['-t', self.output_type] cmdline += ['-C', self.workdir] cmdline += ['-p', target_file] cmdline += ['-n', self.name] cmdline += ['-v', self.version] cmdline += ['-a', self.architecture] if self.maintainer != '': cmdline += ['-m', self.maintainer] if self.description != '': cmdline += ['--description', self.description] if self.iteration > 0: cmdline += ['--iteration', self.iteration] for dep in self.dependencies: cmdline += ['-d', dep] for provided in self.provides: cmdline += ['--provides', provided] for conflict in self.conflicts: cmdline += ['--conflicts', conflict] for replacee in self.replaces: cmdline += ['--replaces', replacee] for config in self.configs: cmdline += ['--config-files', config] cmdline += self._BuildIfNotNone('--after-install', self.afterInstall) cmdline += self._BuildIfNotNone('--before-install', self.beforeInstall) cmdline += self._BuildIfNotNone('--after-remove', self.afterRemove) cmdline += self._BuildIfNotNone('--before-remove', self.beforeRemove) for inp in self.inputs: cmdline += [inp] #print(target_file, repr(cmdline)) return cmd(cmdline, critical=True, echo=True)
def Pull(self, remote='origin', branch='HEAD', commit=None, tag=None, cleanup=False): if branch == 'HEAD': branch = self.remotes[remote].head_branch if self.submodule: log.error('Submodules should not call Pull!') return if not os.path.isdir(self.path): cmd(['git', 'clone', self.remotes[remote].fetch_uri, self.path], echo=not self.quiet or self.noisy_clone, critical=True, show_output=not self.quiet or self.noisy_clone, env=self.noPasswordEnv) with Chdir(self.path, quiet=self.quiet): if cleanup: cmd(['git', 'clean', '-fdx'], echo=not self.quiet, critical=True) cmd(['git', 'reset', '--hard'], echo=not self.quiet, critical=True) if self.current_branch != branch: ref = 'remotes/{}/{}'.format(remote, branch) cmd(['git', 'checkout', '-B', branch, ref, '--'], echo=not self.quiet, critical=True) if tag is not None: commit = self._resolveTagNoChdir(tag) if commit is not None: cmd(['git', 'checkout', commit], echo=not self.quiet, critical=True) else: if self.current_commit != self.remote_commit: cmd([ 'git', 'reset', '--hard', '{}/{}'.format( remote, branch) ], echo=not self.quiet, critical=True) if self.UsesLFS(): log.info('git-lfs detected!') cmd(['git', 'lfs', 'pull'], echo=not self.quiet, critical=True) return True
config.Load("user-config.yml", merge=True, defaults=userconfig) EXECUTABLES = config.get("paths.executables") ENV.appendTo("PATH", os.path.dirname(EXECUTABLES["7za"])) ENV.set("QMAKESPEC", config.get("qt-makespec", "win32-msvc2013")) #: x64 or x86 short_arch = "x64" if config["architecture"] == "x86_64" else "x86" #: 64 or 32 nbits = "64" if config["architecture"] == "x86_64" else "32" superrepo = os.path.join("build", "modorganizer_super") if not os.path.isdir(superrepo): os_utils.ensureDirExists(superrepo) with os_utils.Chdir(superrepo): os_utils.cmd([EXECUTABLES["git"], "init"], show_output=True, critical=True) ymlvars = {"nbits": nbits, "script_dir": script_dir} prerequisites = YAMLConfig("prerequisites.yml", variables=ymlvars).cfg with log.info("Downloading prerequisites..."): dlPackagesIn(prerequisites) # Copied from Unimake. projs = [ ("modorganizer-archive", "archive", "master", ["7zip", "Qt5"]), ("modorganizer-uibase", "uibase", "new_vfs_library", ["Qt5", "boost"]), ("modorganizer-lootcli", "lootcli", "master", ["LootApi", "boost"]), ("modorganizer-esptk", "esptk", "master", ["boost"]), ("modorganizer-bsatk", "bsatk", "master", ["zlib"]), ("modorganizer-nxmhandler", "nxmhandler", "master", ["Qt5"]), ("modorganizer-helper", "helper", "master", ["Qt5"]),
def build(self): gitmodules = {} with open(self.gitmodulesfile, 'r') as tomlf: smid = None for line in tomlf: line = line.strip() m = REG_SUBMODULE_SECTION.match(line) if m is not None: smid = m.group(1).strip() gitmodules[smid] = {} if '=' in line: k, v = line.split('=', 2) gitmodules[smid][k.strip()] = v.strip() gitconfig = {} with open(self.gitconfigfile, 'r') as tomlf: smid = None for line in tomlf: line = line.strip() #print(line) m = REG_SUBMODULE_SECTION.match(line) if m is not None: smid = m.group(1).strip() gitconfig[smid] = {} if smid is not None and '=' in line: #print(line) k, v = line.split('=', 2) gitconfig[smid][k.strip()] = v.strip() ''' with open(self.gitmodulesfile + '.yml', 'w') as f: yaml.dump(gitmodules, f, default_flow_style=False) with open('.gitconfig.yml', 'w') as f: yaml.dump(gitconfig, f, default_flow_style=False) ''' for repoID, repoconf in gitconfig.items(): if repoID not in gitmodules.keys(): with log.warn('Submodule %s is present in .git/config but not .gitmodules!', repoID): pathspec = repoconf.get('path', repoID) path = os.path.abspath(pathspec) tag = repoconf.get('tag', None) branch = repoconf.get('branch', 'HEAD' if tag is None else None) log.info('path = %s', pathspec) for repoID, repoconf in gitmodules.items(): if repoID not in gitconfig.keys(): with log.warn('Submodule %s is present in .gitmodules but not .git/config!', repoID): pathspec = repoconf.get('path', repoID) path = os.path.abspath(pathspec) tag = repoconf.get('tag', None) branch = repoconf.get('branch', 'HEAD' if tag is None else None) opts = [] if branch != 'HEAD': opts += ['-b', branch] log.info('path = %s', pathspec) if os.path.isdir(path): log.warn('Removing existing %s directory.', path) shutil.rmtree(path) cmd = ['git', 'submodule', 'add']+opts+['-f', '--name', repoID, '--', repoconf.get('url'), pathspec] os_utils.cmd(cmd, critical=True, echo=self.should_echo_commands(), show_output=True) #log.error('Would exec: %s', ' '.join(cmd)) for repoID, repoconf in gitmodules.items(): with log.info('Checking %s...', repoID): pathspec = repoconf.get('path', repoID) path = os.path.abspath(pathspec) tag = repoconf.get('tag', None) branch = repoconf.get('branch', 'HEAD' if tag is None else None) if os.path.isdir(path): desired_commit = '' cmdline = ['git', 'ls-tree', Git.GetBranch(), pathspec] stdout, stderr = os_utils.cmd_output(cmdline, echo=self.should_echo_commands(), critical=True) skip_this = False for line in (stdout+stderr).decode('utf-8').splitlines(): if line.startswith('error:') or line.startswith('fatal:'): log.critical(line) raise error.SubprocessThrewError(cmdline, line) line,repoID = line.strip().split('\t') _, _, desired_commit = line.split(' ') if not skip_this: with os_utils.Chdir(path, quiet=not self.should_echo_commands()): cur_commit = Git.GetCommit(short=False, quiet=not self.should_echo_commands()) #log.info(desired_commit) #log.info(cur_commit) if cur_commit == desired_commit: log.info('Commits are synced, skipping.') continue repo = GitRepository(path, origin_uri=repoconf['url'], submodule=True) if repo.CheckForUpdates(branch=branch, quiet=False): if os.path.isdir(path): os_utils.cmd(['git', 'submodule', 'sync', '--', pathspec], critical=True, echo=self.should_echo_commands(), show_output=True) os_utils.cmd(['git', 'submodule', 'update', '--init', '--recursive', pathspec], critical=True, echo=self.should_echo_commands(), show_output=True)
def GenerateForWord(phrase: Phrase, voice: Voice, writtenfiles: set, args: Optional[argparse.Namespace] = None): global PHRASELENGTHS, OLD_SFX, KNOWN_PHONEMES, OTHERSOUNDS my_phonemes = {} if phrase.hasFlag(EPhraseFlags.OLD_VOX): log.info('Skipping %s.ogg (Marked as OLD_VOX)', phrase.id) return if phrase.hasFlag(EPhraseFlags.NOT_VOX): OTHERSOUNDS += [phrase.id] if phrase.parsed_phrase is not None: for _word in phrase.parsed_phrase: _word = _word.lower() if _word in KNOWN_PHONEMES: my_phonemes[_word] = KNOWN_PHONEMES[_word].toLisp().replace('\n', '') filename = phrase.filename.format(ID=phrase.id, SEX=voice.assigned_sex) sox_args = voice.genSoxArgs(args) md5 = json.dumps(phrase.serialize()) md5 += '\n'.join(my_phonemes.values()) md5 += ''.join(sox_args) + PRE_SOX_ARGS + ''.join(RECOMPRESS_ARGS) md5 += voice.ID md5 += filename #filename = os.path.join('sound', 'vox_fem', phrase.id + '.ogg') #if '/' in phrase.id: # filename = os.path.join(phrase.id + '.ogg') oggfile = os.path.abspath(os.path.join('dist', filename)) cachebase = os.path.abspath(os.path.join('cache', phrase.id.replace(os.sep, '_').replace('.', ''))) checkfile = cachebase + voice.ID + '.dat' cachefile = cachebase + voice.ID + '.json' fdata = FileData() fdata.voice = voice.ID fdata.filename = os.path.relpath(oggfile, 'dist') def commitWritten(): nonlocal phrase, voice, oggfile, writtenfiles, fdata if voice.ID == SFXVoice.ID: # Both masculine and feminine voicepacks link to SFX. for sex in ['fem', 'mas']: phrase.files[sex] = fdata else: phrase.files[voice.assigned_sex] = fdata writtenfiles.add(os.path.abspath(oggfile)) parent = os.path.dirname(oggfile) if not os.path.isdir(parent): os.makedirs(parent) parent = os.path.dirname(cachefile) if not os.path.isdir(parent): os.makedirs(parent) if os.path.isfile(oggfile) and os.path.isfile(cachefile): old_md5 = '' if os.path.isfile(checkfile): with open(checkfile, 'r') as md5f: old_md5 = md5f.read() if old_md5 == md5: cachedata = {} with open(cachefile, 'r') as cachef: cachedata = json.load(cachef) fdata.deserialize(cachedata) log.info('Skipping {0} for {1} (exists)'.format(filename, voice.ID)) commitWritten() return log.info('Generating {0} for {1} ({2!r})'.format(filename, voice.ID, phrase.phrase)) text2wave = None if phrase.hasFlag(EPhraseFlags.SFX): text2wave = 'ffmpeg -i '+phrase.phrase+' tmp/VOX-word.wav' else: with open('tmp/VOX-word.txt', 'w') as wf: wf.write(phrase.phrase) text2wave = 'text2wave tmp/VOX-word.txt -o tmp/VOX-word.wav' if os.path.isfile('tmp/VOXdict.lisp'): text2wave = 'text2wave -eval tmp/VOXdict.lisp tmp/VOX-word.txt -o tmp/VOX-word.wav' with open(checkfile, 'w') as wf: wf.write(md5) for fn in ('tmp/VOX-word.wav', 'tmp/VOX-soxpre-word.wav', 'tmp/VOX-sox-word.wav', 'tmp/VOX-encoded.ogg'): if os.path.isfile(fn): os.remove(fn) cmds = [] cmds += [(text2wave.split(' '), 'tmp/VOX-word.wav')] if not phrase.hasFlag(EPhraseFlags.NO_PROCESS) or not phrase.hasFlag(EPhraseFlags.NO_TRIM): cmds += [(['sox', 'tmp/VOX-word.wav', 'tmp/VOX-soxpre-word.wav'] + PRE_SOX_ARGS.split(' '), 'tmp/VOX-soxpre-word.wav')] if not phrase.hasFlag(EPhraseFlags.NO_PROCESS): cmds += [(['sox', cmds[-1][1], 'tmp/VOX-sox-word.wav'] + sox_args, 'tmp/VOX-sox-word.wav')] cmds += [(['oggenc', cmds[-1][1], '-o', 'tmp/VOX-encoded.ogg'], 'tmp/VOX-encoded.ogg')] cmds += [(['ffmpeg', '-i', 'tmp/VOX-encoded.ogg']+RECOMPRESS_ARGS+['-threads',args.threads]+[oggfile], oggfile)] for command_spec in cmds: (command, cfn) = command_spec with os_utils.TimeExecution(command[0]): os_utils.cmd(command, echo=True, critical=True, show_output=command[0] in ('text2wave',)) command = ['ffprobe', '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', oggfile] with os_utils.TimeExecution(command[0]): captured = os_utils.cmd_out(command, echo=False, critical=True) fdata.fromJSON(json.loads(captured)) fdata.checksum = md5sum(oggfile) for command_spec in cmds: (command, cfn) = command_spec if not os.path.isfile(fn): log.error("File '{0}' doesn't exist, command '{1}' probably failed!".format(cfn, command)) sys.exit(1) with open(cachefile, 'w') as f: json.dump(fdata.serialize(), f) commitWritten()
import argparse, os from buildtools import os_utils argp = argparse.ArgumentParser() argp.add_argument('--voice', '-V', choices=['fem','mas'], default='mas') argp.add_argument('words', nargs='+', help='The words you wish to play.') args = argp.parse_args() cmd=[os_utils.which('play')] cmd += [os.path.join('dist', 'sound', f'vox_{args.voice}', w.strip()+'.ogg') for w in args.words] os_utils.cmd(cmd,echo=True,show_output=True, globbify=False)
def build(self): with os_utils.Chdir(os.path.join(self.base_path)): os_utils.cmd([os_utils.which('npm'), 'install'], echo=True, show_output=True, critical=True) os_utils.cmd([os_utils.which('grunt'), 'requirejs', 'uglify:main'], echo=True, show_output=True, critical=True)
def build(self): cmdline = [self.uglifyjs_executable ] + self.options + ['-o', self.target, self.files[0]] os_utils.ensureDirExists(os.path.dirname(self.target)) os_utils.cmd(cmdline, critical=True, echo=self.should_echo_commands())