Exemplo n.º 1
0
def train_aetcnn(x_train: List, x_test: List, y_train: np.array,
                 y_test: np.array) -> Dict:
    sc = CustomMinMaxScaler()
    x_train = sc.fit_transform(x_train)
    x_test = sc.transform(x_test)

    model = AETCN()
    n_experiments = 100
    embeddings_dim = x_train[0].shape[1]

    params = {
        'epochs':
        np.random.choice(np.arange(1, 10), size=n_experiments).tolist(),
        'learning_rate':
        np.random.choice(10**np.linspace(-4, -0.5),
                         size=n_experiments).tolist(),
        'batch_size':
        np.random.choice([2**i for i in range(3, 8)],
                         size=n_experiments).tolist(),
        'input_shape': [embeddings_dim] * n_experiments,
        'layers':
        generate_layer_settings(embeddings_dim, n_experiments),
        'kernel_size':
        np.random.choice([2 * i + 1 for i in range(1, 6)],
                         size=n_experiments).tolist(),
        'window':
        np.random.randint(10, 100, size=n_experiments).tolist(),
        'dropout':
        np.random.uniform(0, 0.5, size=n_experiments).tolist()
    }
    evaluated_hyperparams = random_search(
        (x_train[y_train == 0], x_test, None, y_test), model, params)
    return evaluated_hyperparams
Exemplo n.º 2
0
def train_sa_cnn1d(x_train: List, x_test: List, y_train: np.array,
                   y_test: np.array) -> Dict:
    sc = CustomMinMaxScaler()
    x_train = sc.fit_transform(x_train)
    x_test = sc.transform(x_test)

    model = SACNN1D()
    n_experiments = 100
    embeddings_dim = x_train[0].shape[1]

    encoder_kernel_sizes = np.random.choice([2 * i + 1 for i in range(1, 4)],
                                            size=n_experiments).tolist()
    layers = generate_layer_settings(embeddings_dim, n_experiments)
    params = {
        'epochs':
        np.random.choice(np.arange(1, 10), size=n_experiments).tolist(),
        'learning_rate':
        np.random.choice(10**np.linspace(-4, -0.5),
                         size=n_experiments).tolist(),
        'batch_size':
        np.random.choice([2**i for i in range(3, 8)],
                         size=n_experiments).tolist(),
        'input_shape': [embeddings_dim] * n_experiments,
        'layers':
        layers,
        'encoder_kernel_size':
        encoder_kernel_sizes,
        'decoder_kernel_size':
        np.random.choice([2 * i + 1 for i in range(2, 7)],
                         size=n_experiments).tolist(),
        'encoder_heads':
        get_encoder_heads(layers),
        'decoder_heads':
        get_decoder_heads(layers),
        'window':
        get_1d_window_size(encoder_kernel_sizes, layers, get_encoder_size),
        'dropout':
        np.random.uniform(0, 0.3, size=n_experiments).tolist()
    }
    evaluated_hyperparams = random_search(
        (x_train[y_train == 0], x_test, None, y_test), model, params)
    return evaluated_hyperparams