Пример #1
0
# CTC Beam Search Decoder to decode pred string from the prob map
decoded, log_prob = tf.nn.ctc_beam_search_decoder(logits, SeqLens)

#Reading test data...
InputListTest, SeqLensTest, _ = ReadData(cfg.TEST_LOCATION, cfg.TEST_LIST,
                                         cfg.TEST_NB, WND_HEIGHT, WND_WIDTH,
                                         WND_SHIFT, VEC_PER_WND, '')

print('Initializing...')

session = tf.Session()

session.run(tf.global_variables_initializer())

LoadModel(session, cfg.SaveDir + '/')

try:
    session.run(tf.assign(phase_train, False))

    randIxs = range(0, len(InputListTest))

    start, end = (0, cfg.BatchSize)

    batch = 0
    while end <= len(InputListTest):
        batchInputs = []
        batchSeqLengths = []
        for batchI, origI in enumerate(randIxs[start:end]):
            batchInputs.extend(InputListTest[origI])
            batchSeqLengths.append(SeqLensTest[origI])
Пример #2
0
LogFile.flush()

session = tf.Session()

session.run(tf.global_variables_initializer())

LocalTrainSummary = tf.summary.merge([TrainLoss_s, TrainError_s])

OverallSummary = tf.summary.merge([
    OverallTrainingLoss_s, OverallTrainingError_s, OverallValidationLoss_s,
    OverallValidationError_s
])

SummaryWriter = tf.summary.FileWriter(cfg.LogDir, session.graph)

if cfg.StartingEpoch != 0: LoadModel(session, cfg.SaveDir + '/')

try:
    for epoch in range(cfg.StartingEpoch, cfg.NEpochs):

        LogFile.write(
            "######################################################\n")
        LogFile.write("Training Data\n")
        LogFile.flush()

        TrainingLoss = []
        TrainingError = []

        if cfg.RandomBatches == True:
            randIxs = np.random.permutation(len(inputList))
        else: