示例#1
0
      '\nFloatX: {}'
      '\n'.format(img_ht, num_classes, num_samples, num_epochs, th.config.floatX))

################################
print('Preparing the Data')
try:
    conv_sz = nnet_args['midlayerargs']['conv_sz']
except KeyError:
    conv_sz = 1

data_x, data_y = [], []
bad_data = False

for x, y in zip(data['x'], data['y']):
    # Insert blanks at alternate locations in the labelling (blank is num_classes)
    y1 = utils.insert_blanks(y, num_classes)
    data_y.append(np.asarray(y1, dtype=np.int32))
    data_x.append(np.asarray(x, dtype=th.config.floatX))

    if printer.ylen(y1) > (1 + len(x[0])) // conv_sz:
        bad_data = True
        printer.show_all(y1, x, None, (x[:, ::conv_sz], 'Squissed'))


################################
print('Building the Network')
ntwk = NeuralNet(img_ht, num_classes, **nnet_args)
print(ntwk)

print('Training the Network')
for epoch in range(num_epochs):
示例#2
0
################################ Train
print('Training the Network')
for epoch in range(num_epochs):
    ntwk.update_learning_rate(epoch)
    edit_dist, tot_len = 0, 0

    print('Epoch: {} '.format(epoch))
    # keeping 1 backup file as data might get lost, if script is stopped while pickling
    os.rename(network_fname, 'ntwk.bkp.pkl')
    with open(network_fname, 'wb') as fh:
        pickle.dump(ntwk, fh)
    print('Network saved to {}'.format(network_fname))

    for samp in range(num_samples):
        x, _, y = scriber.get_text_image()
        y_blanked = utils.insert_blanks(y, alphabet_size, num_blanks_at_start=2)
        # if len(y_blanked) < 2:
        #     print(y_blanked, end=' ')
        #     continue
        cst, pred, forward_probs = ntwk.trainer(x, y_blanked)

        if np.isinf(cst):
            printer.show_all(y, x, pred,
                             (forward_probs > 1e-20, 'Forward probabilities:', y_blanked))
            print('Exiting on account of Inf Cost...')
            break

        if samp == 0 and epoch==num_epochs-1:   # or len(y) == 0:
            pred, hidden = ntwk.tester(x)

            print('Epoch:{:6d} Cost:{:.3f}'.format(epoch, float(cst)))
示例#3
0
# Print
print('\nArguments:')
utils.write_dict(args)
print('FloatX: {}'.format(th.config.floatX))
print('Alphabet Size: {}'.format(alphabet_size))

################################ Train
print('Training the Network')
for epoch in range(num_epochs):
    ntwk.update_learning_rate(epoch)
    edit_dist, tot_len = 0, 0

    for samp in range(num_samples):
        x, _, y = scriber.get_text_image()
        y_blanked = utils.insert_blanks(y, alphabet_size, num_blanks_at_start=2)
        # if len(y_blanked) < 2:
        #     print(y_blanked, end=' ')
        #     continue
        cst, pred, forward_probs = ntwk.trainer(x, y_blanked)

        if np.isinf(cst):
            printer.show_all(y, x, pred,
                             (forward_probs > 1e-20, 'Forward probabilities:', y_blanked))
            print('Exiting on account of Inf Cost...')
            break

        if samp == 0 and epoch==num_epochs-1:   # or len(y) == 0:
            pred, hidden = ntwk.tester(x)

            print('Epoch:{:6d} Cost:{:.3f}'.format(epoch, float(cst)))
示例#4
0
      '\n'.format(img_ht, num_classes, num_samples, num_epochs,
                  th.config.floatX))

################################
print('Preparing the Data')
try:
    conv_sz = nnet_args['midlayerargs']['conv_sz']
except KeyError:
    conv_sz = 1

data_x, data_y = [], []
bad_data = False

for x, y in zip(data['x'], data['y']):
    # Insert blanks at alternate locations in the labelling (blank is num_classes)
    y1 = utils.insert_blanks(y, num_classes)
    data_y.append(np.asarray(y1, dtype=np.int32))
    data_x.append(np.asarray(x, dtype=th.config.floatX))

    if printer.ylen(y1) > (1 + len(x[0])) // conv_sz:
        bad_data = True
        printer.show_all(y1, x, None, (x[:, ::conv_sz], 'Squissed'))

################################
print('Building the Network')
ntwk = NeuralNet(img_ht, num_classes, **nnet_args)
print(ntwk)

print('Training the Network')
for epoch in range(num_epochs):
    print('Epoch : ', epoch)
示例#5
0
print(scriber)

print('Building the Network')
ntwk = nn.NeuralNet(scriber.nDims, scriber.nClasses, **nnet_args)
print(ntwk)

################################
print('Training the Network')

for epoch in range(num_epochs):
    ntwk.update_learning_rate(epoch)

    for samp in range(num_samples):
        x, y1 = scriber.get_sample()
        if len(y1) < 2:
            continue
        y = utils.insert_blanks(y1, scriber.nClasses)
        cst, pred, forward_probs = ntwk.trainer(x, y)

        if np.isinf(cst):
            printer.show_all(y, x, pred, (forward_probs > 1e-20, 'Forward probabilities:'))
            print('Exiting on account of Inf Cost...')
            break

        if samp == 0:
            pred, hidden = ntwk.tester(x)

            print('Epoch:{:6d} Cost:{:.3f}'.format(epoch, float(cst)))
            printer.show_all(y, x, pred,
                             (forward_probs > 1e-20, 'Forward probabilities:'),
                             ((hidden + 1)/2, 'Hidden Layer:'))