Пример #1
0
def create_esn_models(is_cuda):
    """
    Create ESN models.
    :return:
    """
    # ESN cell with WV
    esn_wv = etnn.LiESN(input_dim=300,
                        hidden_dim=reservoir_size,
                        output_dim=1,
                        spectral_radius=spectral_radius,
                        sparsity=input_sparsity,
                        input_scaling=input_scaling,
                        w_sparsity=w_sparsity,
                        learning_algo='inv',
                        leaky_rate=0.01,
                        feedbacks=args.feedbacks,
                        seed=1 if args.keep_w else None)
    if is_cuda:
        esn_wv.cuda()
    # end if

    # ESN cell with WV
    esn_c3 = etnn.LiESN(input_dim=60,
                        hidden_dim=reservoir_size,
                        output_dim=1,
                        spectral_radius=spectral_radius,
                        sparsity=input_sparsity,
                        input_scaling=input_scaling,
                        w_sparsity=w_sparsity,
                        learning_algo='inv',
                        leaky_rate=0.001,
                        feedbacks=args.feedbacks,
                        seed=1 if args.keep_w else None)
    if is_cuda:
        esn_c3.cuda()
    # end if

    return esn_wv, esn_c3
# Data loader
trainloader = DataLoader(switch_train_dataset,
                         batch_size=batch_size,
                         shuffle=False,
                         num_workers=2)
testloader = DataLoader(switch_test_dataset,
                        batch_size=batch_size,
                        shuffle=False,
                        num_workers=2)

# ESN cell
esn = etnn.LiESN(input_dim=input_dim,
                 hidden_dim=n_hidden,
                 output_dim=1,
                 spectral_radius=spectral_radius,
                 learning_algo='inv',
                 leaky_rate=leaky_rate,
                 feedbacks=True)
if use_cuda:
    esn.cuda()
# end if

# For each batch
for data in trainloader:
    # Inputs and outputs
    inputs, targets = data

    # To variable
    inputs, targets = Variable(inputs), Variable(targets)
    if use_cuda: inputs, targets = inputs.cuda(), targets.cuda()
    xp.set_state(space)

    # Average sample
    average_sample = np.array([])

    # For each sample
    for n in range(args.n_samples):
        # Set sample
        xp.set_sample_state(n)

        # ESN cell
        esn = etnn.LiESN(input_dim=transformer.transforms[-1].input_dim,
                         hidden_dim=reservoir_size,
                         output_dim=reutersloader.dataset.n_authors,
                         spectral_radius=spectral_radius,
                         sparsity=input_sparsity,
                         input_scaling=input_scaling,
                         w_sparsity=w_sparsity,
                         w=w if args.keep_w else None,
                         learning_algo='inv',
                         leaky_rate=leak_rate)
        if use_cuda:
            esn.cuda()
        # end if

        # Get training data for this fold
        for i, data in enumerate(reutersloader):
            # Inputs and labels
            inputs, labels, time_labels = data
            plt.imshow(inputs[0, 0].t().numpy(), cmap='Greys')
            plt.show()
        # end for
    # Average sample
    average_sample = np.array([])

    # For each sample
    for n in range(args.n_samples):
        # Set sample
        xp.set_sample_state(n)

        # ESN cell
        esn = etnn.LiESN(input_dim=sfgram_dataset.transform.input_dim,
                         hidden_dim=reservoir_size,
                         output_dim=1,
                         spectral_radius=spectral_radius,
                         sparsity=input_sparsity,
                         input_scaling=input_scaling,
                         w_sparsity=w_sparsity,
                         learning_algo='inv',
                         leaky_rate=leak_rate,
                         feedbacks=args.feedbacks,
                         seed=1 if args.keep_w else None)
        if use_cuda:
            esn.cuda()
        # end if

        # For each batch
        for k in range(10):
            # Choose fold
            xp.set_fold_state(k)
            sfgram_loader_train.dataset.set_fold(k)
            sfgram_loader_test.dataset.set_fold(k)
Пример #5
0
    # Certainty data
    certainty_data = np.zeros((2, args.n_samples * 1500))
    certainty_index = 0

    # For each sample
    for n in range(args.n_samples):
        # Set sample
        xp.set_sample_state(n)

        # ESN cell
        esn = etnn.LiESN(input_dim=reutersc50_dataset.transform.input_dim,
                         hidden_dim=reservoir_size,
                         output_dim=reutersc50_dataset.n_authors,
                         spectral_radius=spectral_radius,
                         sparsity=input_sparsity,
                         input_scaling=input_scaling,
                         w_sparsity=w_sparsity,
                         w=w if args.keep_w else None,
                         learning_algo='inv',
                         leaky_rate=leak_rate,
                         feedbacks=args.feedbacks,
                         wfdb_sparsity=feedbacks_sparsity)
        if use_cuda:
            esn.cuda()
        # end if

        # Average
        average_k_fold = np.array([])

        # OOV
        oov = np.array([])
Пример #6
0
        # OOV
        oov = np.array([])

        # Models
        models = list()

        # Create models
        for m in range(n_models):
            # ESN cell
            esn = etnn.LiESN(
                input_dim=300 if feature[m][0] == 'wv' else 60,
                hidden_dim=reservoir_size,
                output_dim=reutersc50_dataset.n_authors,
                spectral_radius=spectral_radius,
                sparsity=input_sparsity,
                input_scaling=input_scaling,
                w_sparsity=w_sparsity,
                w=w if args.keep_w else None,
                learning_algo='inv',
                leaky_rate=float(leak_rate[m][0]),
                feedbacks=args.feedbacks,
                wfdb_sparsity=feedbacks_sparsity
            )
            if use_cuda:
                esn.cuda()
            # end if
            models.append(esn)
        # end for

        # For each batch
        for k in range(10):
            # Choose fold
        # Input weights
        win_generator = echotorch.utils.matrix_generation.NormalMatrixGenerator(
            connectivity=1.0 - input_sparsity,
            scale=input_scaling,
            apply_spectral_radius=False,
        )

        # Bias vector
        wbias_generator = echotorch.utils.matrix_generation.NormalMatrixGenerator(
            connectivity=1.0, scale=0, apply_spectral_radius=False)

        # ESN cell
        esn = etnn.LiESN(input_dim=sfgram_dataset.transform.input_dim,
                         output_dim=1,
                         hidden_dim=reservoir_size,
                         leaky_rate=leak_rate,
                         ridge_param=ridge_param,
                         w_generator=w_generator,
                         win_generator=win_generator,
                         wbias_generator=wbias_generator)
        if use_cuda:
            esn.cuda()
        # end if

        # For each batch
        for k in range(5):
            # Choose fold
            xp.set_fold_state(k)
            sfgram_loader_train.dataset.set_fold(k)
            sfgram_loader_dev.dataset.set_fold(k)
            sfgram_loader_test.dataset.set_fold(k)
Пример #8
0
    # Authors
    author_to_idx = dict()
    for idx, author in enumerate(pan18loader_training.dataset.authors):
        author_to_idx[author] = idx
    # end for

    # Number of authors
    n_authors = len(author_to_idx)

    # ESN cell
    esn = etnn.LiESN(input_dim=transformer.transforms[1].input_dim,
                     hidden_dim=reservoir_size,
                     output_dim=n_authors,
                     spectral_radius=spectral_radius,
                     sparsity=input_sparsity,
                     input_scaling=input_scaling,
                     w_sparsity=w_sparsity,
                     learning_algo='inv',
                     leaky_rate=leak_rate)

    # Get training data for this fold
    for i, data in enumerate(pan18loader_training):
        # Inputs and labels
        inputs, labels = data

        # Create time labels
        author_id = author_to_idx[labels[0]]
        tag_vector = torch.zeros(1, inputs.size(1), n_authors)
        tag_vector[0, :, author_id] = 1.0