Exemplo n.º 1
0
 def build(self):
     os_utils.ensureDirExists(os.path.dirname(self.target))
     stdout, stderr = os_utils.cmd_output([self.js2coffee_path]+self.files+self.js2coffee_opts, echo=self.should_echo_commands(), critical=True)
     if stderr.strip() != '':
         log.error(stderr)
     with codecs.open(self.target, 'w', encoding='utf-8-sig') as outf:
         outf.write(stdout)
Exemplo n.º 2
0
    def updateManifest(self):
        currentFiles = []
        newfiles = []
        if self.name in args.get_snapshot:
            log.info("Getting snapshot of %s (%s post-build)...", script_dir, self.name)
            for root, _, files in os.walk(script_dir):
                for filename in files:
                    currentFiles.append(os.path.relpath(os.path.abspath(os.path.join(root, filename)), script_dir))
            log.info("Comparing...")
            newfiles = [fn.replace("\\", "/") for fn in currentFiles if fn not in self.preExisting]

        newmanifest = {}
        with log.info("Checking for %d expected files...", len(self.expected)):
            for expectedFile in self.expected:
                if not os.path.isfile(expectedFile):
                    log.error("MISSING %s", expectedFile)
                    return False
                relfilepath = os.path.relpath(expectedFile, self.build_dir)
                newmanifest[relfilepath] = os.stat(expectedFile).m_time
            log.info("All check out!")

        with open(self.builder_meta_file, "w") as f:
            yaml.dump(
                {"configuration": self.configuration, "manifest": newmanifest, "newfiles": newfiles},
                f,
                default_flow_style=False,
            )

        return True
 def parseWord(self, line):
     """
     walkers: noun "w oo" 'k @ z'
     present: verb 'p r e' "z @ n t"
     monument: noun "mo" 'n y u' 'm @ n t'
     """
     global REGEX_SEARCH_STRINGS
     lineChunks = line.split(' ')
     self.name = lineChunks[0].strip(':')
     self.type = lineChunks[1].strip()
     pronunciation = ' '.join(lineChunks[2:])
     for match in REGEX_SEARCH_STRINGS.finditer(pronunciation):
         stressLevel = 0
         if match.group(1) == '"':
             stressLevel = 1
         phonemes = []
         for phoneme in match.group(2).split(' '):
             if phoneme not in self.VALID_PHONEMES:
                 log.error(
                     'INVALID PHONEME "{0}" IN LEX ENTRY "{1}"'.format(
                         phoneme, self.name))
                 sys.exit(1)
             if self.phoneset in self.PHONE_CONVERSIONS:
                 phoneset = self.PHONE_CONVERSIONS[self.phoneset]
                 if phoneme in phoneset:
                     phoneme = phoneset[phoneme]
             phonemes += [phoneme]
         self.syllables += [(phonemes, stressLevel)]
     log.info('Parsed {0} as {1}.'.format(pronunciation,
                                          repr(self.syllables)))
Exemplo n.º 4
0
    def build(self):
        definition = {}
        with open(self.filename, 'r') as r:
            definition=yaml.load(r)['enum']

        if 'auto-value' in definition:
            autoval = definition['auto-value']
            i=autoval.get('start',0)
            for k in definition['values'].keys():
                if definition[k].get('auto', True):
                    definition[k]['value']=1 >> i if definition.get('flags', False) else i
                    i += 1

        flags = False
        if 'flags' in definition and definition['flags']:
            flags=True
            definition['tests']=definition.get('tests',{})
            definition['tests']['unique']=definition['tests'].get('unique',True)
            definition['tests']['single-bit-only']=definition['tests'].get('single-bit-only',True)

        default = definition.get('default', 0)
        for k,vpak in definition['values'].items():
            val = self._get_value_for(vpak)
            if self._get_for(vpak, 'default', False):
                if flags:
                    default |= val
                else:
                    default = val

        if flags or 'tests' in definition:
            with log.info('Testing %s....', definition['name']):
                tests = definition.get('tests',{})
                if 'increment' in tests:
                    incrdef = tests['increment']
                    start = incrdef.get('start',0)
                    stop = incrdef.get('stop', len(definition['values']))

                    vals = []
                    for k,vpak in definition['values'].items():
                        vals += [self._get_value_for(vpak)]

                    for i in range(start,stop):
                        if i not in vals:
                            log.error('increment: Missing value %d!', i)
                if 'unique' in tests and tests['unique']:
                    vals={}
                    for k,vpak in definition['values'].items():
                        val = self._get_value_for(vpak)
                        if val in vals:
                            log.error('unique: Entry %s is not using a unique value!', k)
                        vals[val]=True
                if flags:
                    if 'single-bit-only' in tests and tests['single-bit-only']:
                        for k,vpak in definition['values'].items():
                            val = self._get_value_for(vpak)
                            c = 0
                            while val > 0:
                                c = val & 1
                                val >>= 1
                                if c > 1:
                                    log.error('single-bit-only: Entry %s has too many bits!', k)
                                    break
        definition['default'] = default
        os_utils.ensureDirExists(os.path.dirname(self.target), noisy=True)
        with open(self.target, 'w') as w:
            self.writer.write(w, definition)
Exemplo n.º 5
0
def GenerateForWord(phrase: Phrase, voice: Voice, writtenfiles: set, args: Optional[argparse.Namespace] = None):
    global PHRASELENGTHS, OLD_SFX, KNOWN_PHONEMES, OTHERSOUNDS
    my_phonemes = {}
    if phrase.hasFlag(EPhraseFlags.OLD_VOX):
        log.info('Skipping %s.ogg (Marked as OLD_VOX)', phrase.id)
        return
    if phrase.hasFlag(EPhraseFlags.NOT_VOX):
        OTHERSOUNDS += [phrase.id]

    if phrase.parsed_phrase is not None:
        for _word in phrase.parsed_phrase:
            _word = _word.lower()
            if _word in KNOWN_PHONEMES:
                my_phonemes[_word] = KNOWN_PHONEMES[_word].toLisp().replace('\n', '')


    filename = phrase.filename.format(ID=phrase.id, SEX=voice.assigned_sex)
    sox_args = voice.genSoxArgs(args)

    md5 = json.dumps(phrase.serialize())
    md5 += '\n'.join(my_phonemes.values())
    md5 += ''.join(sox_args) + PRE_SOX_ARGS + ''.join(RECOMPRESS_ARGS)
    md5 += voice.ID
    md5 += filename

    #filename = os.path.join('sound', 'vox_fem', phrase.id + '.ogg')
    #if '/' in phrase.id:
    #    filename = os.path.join(phrase.id + '.ogg')
    oggfile = os.path.abspath(os.path.join('dist', filename))
    cachebase = os.path.abspath(os.path.join('cache', phrase.id.replace(os.sep, '_').replace('.', '')))
    checkfile = cachebase + voice.ID + '.dat'
    cachefile = cachebase + voice.ID + '.json'

    fdata = FileData()
    fdata.voice = voice.ID
    fdata.filename = os.path.relpath(oggfile, 'dist')

    def commitWritten():
        nonlocal phrase, voice, oggfile, writtenfiles, fdata
        if voice.ID == SFXVoice.ID:
            # Both masculine and feminine voicepacks link to SFX.
            for sex in ['fem', 'mas']:
                phrase.files[sex] = fdata
        else:
            phrase.files[voice.assigned_sex] = fdata
        writtenfiles.add(os.path.abspath(oggfile))

    parent = os.path.dirname(oggfile)
    if not os.path.isdir(parent):
        os.makedirs(parent)

    parent = os.path.dirname(cachefile)
    if not os.path.isdir(parent):
        os.makedirs(parent)

    if os.path.isfile(oggfile) and os.path.isfile(cachefile):
        old_md5 = ''
        if os.path.isfile(checkfile):
            with open(checkfile, 'r') as md5f:
                old_md5 = md5f.read()
        if old_md5 == md5:
            cachedata = {}
            with open(cachefile, 'r') as cachef:
                cachedata = json.load(cachef)
            fdata.deserialize(cachedata)

            log.info('Skipping {0} for {1} (exists)'.format(filename, voice.ID))
            commitWritten()
            return
    log.info('Generating {0} for {1} ({2!r})'.format(filename, voice.ID, phrase.phrase))
    text2wave = None
    if phrase.hasFlag(EPhraseFlags.SFX):
        text2wave = 'ffmpeg -i '+phrase.phrase+' tmp/VOX-word.wav'
    else:
        with open('tmp/VOX-word.txt', 'w') as wf:
            wf.write(phrase.phrase)

        text2wave = 'text2wave tmp/VOX-word.txt -o tmp/VOX-word.wav'
        if os.path.isfile('tmp/VOXdict.lisp'):
            text2wave = 'text2wave -eval tmp/VOXdict.lisp tmp/VOX-word.txt -o tmp/VOX-word.wav'
    with open(checkfile, 'w') as wf:
        wf.write(md5)
    for fn in ('tmp/VOX-word.wav', 'tmp/VOX-soxpre-word.wav', 'tmp/VOX-sox-word.wav', 'tmp/VOX-encoded.ogg'):
        if os.path.isfile(fn):
            os.remove(fn)

    cmds = []
    cmds += [(text2wave.split(' '), 'tmp/VOX-word.wav')]
    if not phrase.hasFlag(EPhraseFlags.NO_PROCESS) or not phrase.hasFlag(EPhraseFlags.NO_TRIM):
        cmds += [(['sox', 'tmp/VOX-word.wav', 'tmp/VOX-soxpre-word.wav'] + PRE_SOX_ARGS.split(' '), 'tmp/VOX-soxpre-word.wav')]
    if not phrase.hasFlag(EPhraseFlags.NO_PROCESS):
        cmds += [(['sox', cmds[-1][1], 'tmp/VOX-sox-word.wav'] + sox_args, 'tmp/VOX-sox-word.wav')]
    cmds += [(['oggenc', cmds[-1][1], '-o', 'tmp/VOX-encoded.ogg'], 'tmp/VOX-encoded.ogg')]
    cmds += [(['ffmpeg', '-i', 'tmp/VOX-encoded.ogg']+RECOMPRESS_ARGS+['-threads',args.threads]+[oggfile], oggfile)]
    for command_spec in cmds:
        (command, cfn) = command_spec
        with os_utils.TimeExecution(command[0]):
            os_utils.cmd(command, echo=True, critical=True, show_output=command[0] in ('text2wave',))

    command = ['ffprobe', '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', oggfile]
    with os_utils.TimeExecution(command[0]):
        captured = os_utils.cmd_out(command, echo=False, critical=True)
        fdata.fromJSON(json.loads(captured))
        fdata.checksum = md5sum(oggfile)

    for command_spec in cmds:
        (command, cfn) = command_spec
        if not os.path.isfile(fn):
            log.error("File '{0}' doesn't exist, command '{1}' probably failed!".format(cfn, command))
            sys.exit(1)

    with open(cachefile, 'w') as f:
        json.dump(fdata.serialize(), f)

    commitWritten()
Exemplo n.º 6
0
        tree = ET.parse(os.path.join(qtappdata_root, "qtcreator", "profiles.xml"))
        root = tree.getroot()

        profiles = []

        for profile in root.findall("data/valuemap"):
            profiles.append(
                (profile.find("value[@key='PE.Profile.Id']").text, profile.find("value[@key='PE.Profile.Name']").text)
            )

        arch = nbits + "bit"
        profiles = filter(lambda x: arch in x[1], sorted(profiles, reverse=True))[0]
        config["qt.profile.id"] = profiles[0]
        config["qt.profile.name"] = profiles[1].replace("%{Qt:Version}", "5.5.1")
except Exception as e:
    log.error(e)

#####################################
# PREREQUISITES
#####################################
# This should probably be dumped into seperate modules or something, but this'll do for now.
CMakeBuilder(
    "zlib", prerequisites["zlib"], firstDirIn(os.path.join(script_dir, "build", "zlib"), startswith="zlib-")
).TryBuild()

"""
zlib_dir = firstDirIn(os.path.join(script_dir, 'build', 'zlib'), startswith='zlib-')
with log.info('Building zlib...'):
    with os_utils.Chdir(zlib_dir):
        cmake = CMake()
        cmake.setFlag('CMAKE_BUILD_TYPE', config.get('cmake.build-type'))