示例#1
0
def run(destiny):
    dir_list = os.listdir(os.getcwd())
    with PE(max_workers=2) as t:
        for _dir in dir_list:
            t.submit(
                send, _dir, destiny
            )
def Pattern_Generate_VCTK(wav_Path):
    os.makedirs(hp.Speaker_Embedding.Train.Pattern_Path, exist_ok=True)

    print('VCTK raw file list generating...')
    file_List = []
    for root, directory_List, file_Name_List in os.walk(wav_Path):
        speaker = 'VCTK.{}'.format(root.replace('\\', '/').split('/')[-1])
        for file_Name in file_Name_List:
            wav_File_Path = os.path.join(root, file_Name).replace('\\', '/')
            if not os.path.splitext(
                    wav_File_Path)[1].upper() in using_Extension:
                continue
            file_List.append((speaker, wav_File_Path))
    print('VCTK raw file list generating...Done')

    with PE(max_workers=max_Worker) as pe:
        for pattern_Index, (speaker, wav_File_Path) in enumerate(file_List):
            pe.submit(Pickle_Generate, 'VCTK', pattern_Index, speaker,
                      wav_File_Path)
def Pattern_Generate_VCTK(wav_Path="D:/Simulation_Raw_Data/VCTK/wav48"):
    if not os.path.exists(speaker_Embedding_Parameters.pattern_Path):
        os.makedirs(speaker_Embedding_Parameters.pattern_Path)

    print("VCTK raw file list generating...")
    file_List = []
    for root, directory_List, file_Name_List in os.walk(wav_Path):
        speaker = "VCTK." + root.replace("\\", "/").split("/")[-1]
        for file_Name in file_Name_List:
            wav_File_Path = os.path.join(root, file_Name).replace("\\", "/")
            if not os.path.splitext(
                    wav_File_Path)[1].upper() in using_Extension:
                continue
            file_List.append((speaker, wav_File_Path))
    print("VCTK raw file list generating...Done")

    with PE(max_workers=max_Worker) as pe:
        for pattern_Index, (speaker, wav_File_Path) in enumerate(file_List):
            pe.submit(Pickle_Generate, "VCTK", pattern_Index, speaker,
                      wav_File_Path)
        bc2013_File_Path_List, bc2013_Text_List_Dict = BC2013_Info_Load(
            bc2013_Path=argument_Dict['bc2013_path'],
            max_Count=argument_Dict['max_count'])
        total_Pattern_Count += len(bc2013_File_Path_List)
    if not argument_Dict['fv_path'] is None:
        fv_File_Path_List, fv_Text_List_Dict, fv_Speaker_Dict = FV_Info_Load(
            fv_Path=argument_Dict['fv_path'],
            max_Count=argument_Dict['max_count'])
        total_Pattern_Count += len(fv_File_Path_List)

    if total_Pattern_Count == 0:
        raise ValueError('Total pattern count is zero.')

    os.makedirs(hp_Dict['Train']['Pattern_Path'], exist_ok=True)
    total_Generated_Pattern_Count = 0
    with PE(max_workers=int(argument_Dict['max_worker'])) as pe:
        if not argument_Dict['lj_path'] is None:
            for index, file_Path in enumerate(lj_File_Path_List):
                pe.submit(
                    Pattern_File_Generate, file_Path, lj_Text_Dict[file_Path],
                    token_Index_Dict, 'LJ', '',
                    'LJ {:05d}/{:05d}    Total {:05d}/{:05d}'.format(
                        index, len(lj_File_Path_List),
                        total_Generated_Pattern_Count,
                        total_Pattern_Count), 60, argument_Dict['all_save'])
                total_Generated_Pattern_Count += 1

        if not argument_Dict['vctk_path'] is None:
            for index, file_Path in enumerate(vctk_File_Path_List):
                pe.submit(
                    Pattern_File_Generate, file_Path,
示例#5
0
            file_List.append((word, pronunciation_Dict[word], talker,
                              os.path.join(root, file).replace('\\', '/')))

    return file_List


if __name__ == '__main__':
    os.makedirs(hp_Dict['Pattern']['Pattern_Path'], exist_ok=True)
    max_Worker = 10

    file_List = Get_File_List()

    print(semantic_Dict)

    with PE(max_workers=max_Worker) as pe:
        for word, pronunciation, talker, voice_File_Path in file_List:
            pe.submit(
                Pattern_File_Geneate,
                word,
                pronunciation,
                talker,  #In paper, identifier is 'talker'.
                voice_File_Path)

    Metadata_Generate()

    # Example
    #Metadata_Subset_Generate(
    #    identifier_List= ['Agnes', 'Alex', 'Allison', 'Ava',  'Bruce', 'Fred', 'Junior', 'Kathy', 'Princess', 'Ralph', 'Samantha', 'Susan', 'Tom', 'Vicki', 'Victoria'],
    #    metadata_File_Name = 'METADATA.1KW.15T.PICKLE'
    #    )
示例#6
0
    if not args.vctk_path is None:
        vctk_Paths, vctk_Speaker_Dict = VCTK_Info_Load(path=args.vctk_path)
        path_List.extend(vctk_Paths)
        speaker_Dict.update(vctk_Speaker_Dict)
        dataset_Dict.update({path: 'VCTK' for path in vctk_Paths})
        tag_Dict.update({path: '' for path in vctk_Paths})
    if not args.libri_path is None:
        libri_Paths, libri_Speaker_Dict = Libri_Info_Load(path=args.libri_path)
        path_List.extend(libri_Paths)
        speaker_Dict.update(libri_Speaker_Dict)
        dataset_Dict.update({path: 'Libri' for path in libri_Paths})
        tag_Dict.update({path: '' for path in libri_Paths})

    if len(path_List) == 0:
        raise ValueError('Total info count must be bigger than 0.')

    speaker_Index_Dict = Speaker_Index_Dict_Generate(speaker_Dict)

    with PE(max_workers=args.max_worker) as pe:
        for _ in tqdm(pe.map(
                lambda params: Pattern_File_Generate(*params),
            [(path, speaker_Index_Dict[speaker_Dict[path]], speaker_Dict[path],
              dataset_Dict[path], tag_Dict[path]) for path in path_List]),
                      total=len(path_List)):
            pass

    Metadata_Generate()

# python Pattern_Generator.py -lj "D:\Pattern\ENG\LJSpeech" -bc2013 "D:\Pattern\ENG\BC2013" -cmua "D:\Pattern\ENG\CMUA" -vctk "D:\Pattern\ENG\VCTK" -libri "D:\Pattern\ENG\LibriTTS"
# python Pattern_Generator.py -vctk "D:\Pattern\ENG\VCTK" -libri "D:\Pattern\ENG\LibriTTS"
    def handle_multi(self, queue):
        """ Function that handles the downloads with multi-threading. """

        download_queue = []
        adapter_queue = []
        callback_queue = []

        it = 0
        count = self.stats['total_images']
        for item in queue:
            download_queue.append(item)

            if (len(download_queue) % self.bulksize) == 0:
                with PE(max_workers=self.bulksize) as executor:
                    for request in executor.map(self.download, download_queue):
                        item = request['item']
                        response = request['response']
                        response['identical'] = False

                        # If the HTTP Request was a failure
                        if not response['status']:
                            print(response)
                            exit()
                            it = self.stats['downloads']['total_errors']
                            self.stats['downloads']['errors'][it] = response
                            self.stats['downloads']['total_errors'] += 1

                            print(
                                '[error] Could not download file: "{}"'.format(
                                    item['url']))
                            it += 1

                            continue

                        # If the current and previous checksums match verification
                        response_hash = hashit(response['content'])
                        if item['destination'] in self.old_states:
                            if self.old_states[
                                    item['destination']] == response_hash:
                                text = '[info] Identical file: "{}" found.'.format(
                                    item['url'])
                                if self.verbose:
                                    print(text)

                                item['message'] = text
                                index = len(self.stats['ignored']['files'])
                                self.stats['ignored']['files'][index] = item
                                self.stats['ignored']['total'] += 1

                                self.identicals[item['destination']] = True
                                response['identical'] = True

                                it += 1

                                # If callback function is set
                                if self.__callback is not None:
                                    del response['content']
                                    response['destination'] = item[
                                        'destination']

                                    callback_queue.append(response)

                                continue

                        self.states[item['destination']] = response_hash

                        adapter_queue.append({
                            'destination':
                            item['destination'],
                            'body':
                            response['content'],
                            'content-type':
                            response['content-type']
                        })

                        del response['content']
                        response['destination'] = item['destination']

                        if self.__callback is not None:
                            callback_queue.append(response)

                        it = self.stats['downloads']['total_successes']
                        self.stats['downloads']['successes'][it] = response
                        self.stats['downloads']['total_successes'] += 1
                        it += 1

                stats = self.__process_callback(adapter_queue)
                self.handle_upload_stats(stats)
                adapter_queue.clear()
                download_queue.clear()
                print('{}/{}'.format(it, count))

                # Callback
                if self.__callback is not None:
                    self.__callback(callback_queue)

                    callback_queue.clear()

        # Last download trial if the queue is not empty
        if len(download_queue) > 0:
            with PE(max_workers=self.bulksize) as executor:
                for request in executor.map(self.download, download_queue):
                    item = request['item']
                    response = request['response']
                    response['identical'] = False

                    if not response['status']:
                        it = self.stats['downloads']['total_errors']
                        self.stats['downloads']['errors'][it] = response
                        self.stats['downloads']['total_errors'] += 1

                        print('[error] Could not download file: "{}"'.format(
                            item['url']))

                        continue

                    response_hash = hashit(response['content'])
                    if item['destination'] in self.old_states:
                        if self.old_states[
                                item['destination']] == response_hash:
                            text = '[info] Identical file: "{}" found.'.format(
                                item['url'])
                            if self.verbose:
                                print(text)

                            item['message'] = text
                            item['content-type'] = response['content-type']

                            index = len(self.stats['ignored']['files'])
                            self.stats['ignored']['files'][index] = item
                            self.stats['ignored']['total'] += 1

                            self.identicals[item['destination']] = True
                            response['identical'] = True

                            it += 1

                            # If callback is set
                            if self.__callback is not None:
                                del response['content']
                                response['destination'] = item['destination']

                                callback_queue.append(response)

                            continue

                    self.states[item['destination']] = response_hash

                    adapter_queue.append({
                        'destination':
                        item['destination'],
                        'body':
                        response['content'],
                        'content-type':
                        response['content-type']
                    })

                    response = {
                        'destination': item['destination'],
                        'url': response['url'],
                        'httpcode': response['httpcode'],
                        'status': response['status'],
                        'content-type': response['content-type'],
                        'identical': response['identical']
                    }

                    if self.__callback is not None:
                        callback_queue.append(response)

                    it = self.stats['downloads']['total_successes']
                    self.stats['downloads']['successes'][it] = response
                    self.stats['downloads']['total_successes'] += 1

            stats = self.__process_callback(adapter_queue)
            self.handle_upload_stats(stats)
            adapter_queue.clear()
            download_queue.clear()

            # Callback
            if self.__callback is not None:
                self.__callback(callback_queue)

                callback_queue.clear()