def generate(self, text, timesteps=1000, seed=None, filepath='samples/conditional.png'):
        self.build_model(seq_length=1, max_sentence_length=len(text))
        sample = np.zeros((1, timesteps + 1, 3), dtype='float32')
        char_index, _ = char_to_index()

        one_hot_text = tf.expand_dims(one_hot_encode(text, self.num_characters, char_index), 0)
        text_length = tf.expand_dims(tf.constant(len(text)), 0)

        input_states = self.initial_states(1)
        for i in range(timesteps):
            outputs, input_states, phi = self.model([sample[:,i:i+1,:], input_states, one_hot_text, text_length])
            input_states[-1] = tf.reshape(input_states[-1], (1, self.num_characters))
            sample[0,i+1] = self.sample(outputs, seed)

            # stopping heuristic
            finished = True
            phi_last = phi[0,0,-1]
            for phi_u in phi[0,0,:-1]:
                if phi_u.numpy() > phi_last.numpy():
                    finished = False
                    break

            # prevent early stopping
            if i < 100:
                finished = False

            if finished:
                break

        # remove first zeros and discard unused timesteps
        sample = sample[0,1:i]
        plot_stroke(sample, save_name=filepath)
        return sample
def main(arguments):

    model_choice = arguments.model
    model_path = arguments.model_path
    dataset_path = arguments.dataset

    assert (model_choice == 1 or model_choice == 2 or model_choice
            == 3), "Invalid choice: Choose among 1, 2, and 3 only."
    assert os.path.exists(
        path=model_path), "{} does not exist!".format(model_path)
    assert os.path.exists(
        path=dataset_path), "{} does not exist!".format(dataset_path)

    dataset = np.load(dataset_path)

    features, labels = load_data(dataset=dataset)

    labels = one_hot_encode(labels=labels)

    dataset_size = features.shape[0]
    print(features.shape)

    if model_choice == 2:
        features = np.reshape(
            features,
            (
                features.shape[0],
                int(np.sqrt(features.shape[1])),
                int(np.sqrt(features.shape[1])),
            ),
        )
        predictions, accuracies = predict(
            dataset=[features, labels],
            model=model_choice,
            model_path=model_path,
            size=dataset_size,
            batch_size=256,
            cell_size=256,
        )
    else:
        predictions, accuracies = predict(
            dataset=[features, labels],
            model=model_choice,
            model_path=model_path,
            size=dataset_size,
            batch_size=256,
        )

    print("Predictions : {}".format(predictions))
    print("Accuracies : {}".format(accuracies))
    print("Average accuracy : {}".format(np.mean(accuracies)))
def main(arguments):
    model_choice = arguments.model
    model_path = arguments.model_path
    dataset_path = arguments.dataset

    assert os.path.exists(
        path=model_path), '{} does not exists!'.format(model_path)
    assert os.path.exists(
        path=dataset_path), '{} does not exists!'.format(dataset_path)

    dataset = np.load(dataset_path)
    features, labels = load_data(dataset=dataset)
    labels = one_hot_encode(labels=labels)

    dataset_size = features.shape[0]
    print(features.shape)

    if model_choice == 2:
        features = np.reshape(
            features, (features.shape[0], int(np.sqrt(
                (features.shape[1]))), int(np.sqrt((features.shape[1])))))
        predictions, accuracies = predict(dataset=[features, labels],
                                          model=model_choice,
                                          model_path=model_path,
                                          size=dataset_size,
                                          batch_size=256,
                                          cell_size=256)
    else:
        predictions, accuracies = predict(dataset=[features, labels],
                                          model=model_choice,
                                          model_path=model_path,
                                          size=dataset_size,
                                          batch_size=256)

    print('predictions: {}'.format(predictions))
    print('Accuracies: {}'.format(accuracies))
    print('Avrage accuracy : {}'.format(np.mean(accuracies)))
def main(arguments):

    model_choice = arguments.model
    assert model_choice == 1 or model_choice == 2 or model_choice == 3,\
        'Invalid choice: Choose among 1, 2, and 3 only.'

    dataset = np.load(arguments.dataset)

    features, labels = load_data(dataset=dataset)

    labels = one_hot_encode(labels=labels)

    # get the number of features
    num_features = features.shape[1]

    # get the number of classes
    num_classes = labels.shape[1]

    # split the dataset by 70/30
    train_features, test_features, train_labels, test_labels = train_test_split(
        features, labels, test_size=0.30, stratify=labels)

    train_size = int(train_features.shape[0])
    train_features = train_features[:train_size - (train_size % BATCH_SIZE)]
    train_labels = train_labels[:train_size - (train_size % BATCH_SIZE)]

    test_size = int(test_features.shape[0])
    test_features = test_features[:test_size - (test_size % BATCH_SIZE)]
    test_labels = test_labels[:test_size - (test_size % BATCH_SIZE)]

    if model_choice == 1:
        model = CNN(alpha=LEARNING_RATE,
                    batch_size=BATCH_SIZE,
                    num_classes=num_classes,
                    penalty_parameter=arguments.penalty_parameter,
                    sequence_length=num_features)
        model.train(checkpoint_path=arguments.checkpoint_path,
                    log_path=arguments.log_path,
                    result_path=arguments.result_path,
                    epochs=arguments.num_epochs,
                    train_data=[train_features, train_labels],
                    train_size=int(train_features.shape[0]),
                    test_data=[test_features, test_labels],
                    test_size=int(test_features.shape[0]))
    elif model_choice == 2:
        train_features = np.reshape(
            train_features,
            (train_features.shape[0], int(np.sqrt(train_features.shape[1])),
             int(np.sqrt(train_features.shape[1]))))
        test_features = np.reshape(
            test_features,
            (test_features.shape[0], int(np.sqrt(test_features.shape[1])),
             int(np.sqrt(test_features.shape[1]))))
        model = GruSvm(alpha=LEARNING_RATE,
                       batch_size=BATCH_SIZE,
                       cell_size=CELL_SIZE,
                       dropout_rate=DROPOUT_RATE,
                       num_classes=num_classes,
                       num_layers=NUM_LAYERS,
                       sequence_height=train_features.shape[2],
                       sequence_width=train_features.shape[1],
                       svm_c=arguments.penalty_parameter)
        model.train(checkpoint_path=arguments.checkpoint_path,
                    log_path=arguments.log_path,
                    epochs=arguments.num_epochs,
                    train_data=[train_features, train_labels],
                    train_size=int(train_features.shape[0]),
                    test_data=[test_features, test_labels],
                    test_size=int(test_features.shape[0]),
                    result_path=arguments.result_path)
    elif model_choice == 3:
        model = MLP(alpha=LEARNING_RATE,
                    batch_size=BATCH_SIZE,
                    node_size=NODE_SIZE,
                    num_classes=num_classes,
                    num_features=num_features,
                    penalty_parameter=arguments.penalty_parameter)
        model.train(checkpoint_path=arguments.checkpoint_path,
                    num_epochs=arguments.num_epochs,
                    log_path=arguments.log_path,
                    train_data=[train_features, train_labels],
                    train_size=int(train_features.shape[0]),
                    test_data=[test_features, test_labels],
                    test_size=int(test_features.shape[0]),
                    result_path=arguments.result_path)