コード例 #1
0
import fire
from utils import remove_all_files
from tensorpack.callbacks.saver import ModelSaver


def train(case='default', ckpt=None, gpu=None, r=False):
    '''
    :param case: experiment case name
    :param ckpt: checkpoint to load model
    :param gpu: comma separated list of GPU(s) to use
    :param r: start from the beginning.
    '''

    hp.set_hparam_yaml(case)
    if r:
        remove_all_files(hp.logdir)

    # model
    model = IAFVocoder(batch_size=hp.train.batch_size, length=hp.signal.length)

    # dataset
    dataset = Dataset(hp.data_path,
                      hp.train.batch_size,
                      length=hp.signal.length)
    print('dataset size is {}'.format(len(dataset.wav_files)))

    # set logger for event and model saver
    logger.set_logger_dir(hp.logdir)

    train_conf = TrainConfig(
        model=model,
コード例 #2
0
                        help='experiment case name of train1')
    parser.add_argument('case2',
                        type=str,
                        help='experiment case name of train2')
    parser.add_argument('-ckpt', help='checkpoint to load model.')
    parser.add_argument('-gpu', help='comma separated list of GPU(s) to use.')
    parser.add_argument('-r',
                        action='store_true',
                        help='start training from the beginning.')
    arguments = parser.parse_args()
    return arguments


if __name__ == '__main__':
    args = get_arguments()
    print(args.case2)
    hp.set_hparam_yaml(args.case2,
                       default_file='hparams/{}.yaml'.format(args.case2))
    logdir_train1 = '{}/{}/train1'.format(hp.logdir_path, args.case1)
    logdir_train2 = '{}/{}/train2'.format(hp.logdir_path, args.case2)

    if args.r:
        remove_all_files(logdir_train2)

    print('case1: {}, case2: {}, logdir1: {}, logdir2: {}'.format(
        args.case1, args.case2, logdir_train1, logdir_train2))

    train(args, logdir1=logdir_train1, logdir2=logdir_train2)

    print("Done")
コード例 #3
0
        work_sheets = utils.get_all_worksheets(xlsx_path)
        for sheet_name, work_sheet in work_sheets.iteritems():
            sheet_item = sheet(work_sheet)
            sheet_item.debug_print()
            all_sheet[sheet_name] = sheet_item

    generate_code = 1
    xlsx_file = ''

    if len(sys.argv) >= 2:
        generate_code = int(sys.argv[1])
    if len(sys.argv) >= 3:
        xlsx_file = os.path.basename(sys.argv[2])

    if xlsx_file == '':
        utils.remove_all_files(JSON_DIR)
        utils.remove_all_files(CSHARP_DIR)
        for sheet_name, sheet_item in all_sheet.iteritems():
            if generate_code == 1:
                export_csharp(sheet_item)
            export_json(sheet_item)
    else:
        if all_xlsx.has_key(xlsx_file):
            work_sheets = utils.get_all_worksheets(all_xlsx[xlsx_file])
            for sheet_name, work_sheet in work_sheets.iteritems():
                sheet_item = all_sheet[sheet_name]
                if generate_code == 1:
                    export_csharp(sheet_item)
                export_json(sheet_item)

    export_hash_helper()