def get_data(path, isTrain, stat_file): ds = LMDBDataPoint(path, shuffle=isTrain) mean, std = serialize.loads(open(stat_file).read()) ds = MapDataComponent(ds, lambda x: (x - mean) / std) ds = TIMITBatch(ds, BATCH) if isTrain: ds = PrefetchDataZMQ(ds, 1) return ds
def get_data(path, isTrain, stat_file): ds = LMDBSerializer.load(path, shuffle=isTrain) mean, std = serialize.loads(open(stat_file, 'rb').read()) ds = MapDataComponent(ds, lambda x: (x - mean) / std) ds = TIMITBatch(ds, BATCH) if isTrain: ds = MultiProcessRunnerZMQ(ds, 1) return ds
def get_config(): logger.auto_set_dir() ds = TIMITData() ds = TIMITBatch(ds, BATCH) ds = PrefetchDataZMQ(ds, 1) step_per_epoch = ds.size() lr = symbolic_functions.get_scalar_var('learning_rate', 1e-3, summary=True) return TrainConfig( dataset=ds, #optimizer=tf.train.AdamOptimizer(lr), optimizer=tf.train.MomentumOptimizer(lr, 0.9), callbacks=Callbacks([ StatPrinter(), ModelSaver(), ]), model=Model(), step_per_epoch=step_per_epoch, max_epoch=100, )