Exemplo n.º 1
0
def main(*args):
    file_name = os.path.basename(os.path.splitext(__file__)[0])
    time = datetime.datetime.now().isoformat()
    description = f'redwine'
    # description = 'TEST'
    logger = Logger(log_dir=os.path.join(file_name, description, time))
    logger.log_file(__file__)
    print('log_dir', logger.log_dir)

    # Red wine data
    data = pd.read_csv('../data/winequality-red.csv')
    X = np.asarray(data.drop(['quality'],
                             axis=1))  # Drop the labels for the X-data
    # Adjust to quality scale between 0 and 6
    y = np.asarray(data['quality'] - min(data['quality']), int)

    # --- Model and loss ---
    model = Model()

    optimizer = tf.optimizers.Adam(0.001)
    loss = tf.keras.losses.SparseCategoricalCrossentropy()
    model.compile(optimizer=optimizer, loss=loss, metrics=['accuracy'])
    # model.build((1, 2))
    # print(model.summary())
    print(*model.trainable_variables, sep='\n')

    keep_best_callback = KeepBestCallback()
    history: tf.keras.callbacks.History = model.fit(
        X,
        y,
        validation_split=0.2,
        epochs=1000,
        callbacks=[keep_best_callback],
        verbose=2)
    print('Best', 'val_accuracy',
          history.history['val_accuracy'][keep_best_callback.best_epoch])
    print('Best', 'val_loss',
          history.history['val_loss'][keep_best_callback.best_epoch])
Exemplo n.º 2
0
def main(*args):
    file_name = os.path.basename(os.path.splitext(__file__)[0])
    time = datetime.datetime.now().isoformat()
    data_idx = int(args[0])
    # data_idx = 6
    description = f'no_qc_16neurons_relu_data{data_idx}'
    # description = 'TEST'
    logger = Logger(log_dir=os.path.join(file_name, description, time))
    logger.log_file(__file__)
    print('log_dir', logger.log_dir)

    with open(__file__) as this_file:
        this_file = this_file.read()
    # This data goes into C1 and T1
    plain_x, labels = my_datasets.article_2003_09887_data(data_idx)
    colors = ['blue' if label == 1 else 'red' for label in labels]
    plain_x = normalize_data(plain_x)
    plain_x = tf.convert_to_tensor(plain_x, dtype=float_type)
    labels = tf.convert_to_tensor(labels, dtype=float_type)

    # --- Feature extraction ---
    features = tf.convert_to_tensor([
        plain_x[..., 0],
        plain_x[..., 1],
    ])
    features = tf.transpose(features)

    # --- Model and loss ---
    model = U3_U()

    optimizer = tf.optimizers.Adam(0.01)
    # loss = P00MaximisationLoss()
    model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy'])
    # model.build((1, 2))
    # print(model.summary())
    print(*model.trainable_variables, sep='\n')

    keep_best_callback = KeepBestCallback()

    history = model.fit(features, labels,
                          validation_split=0.2,
                          epochs=100,
                          callbacks=[keep_best_callback],
                          use_multiprocessing=True)

    # --- Test result ---
    model_fit = model
    model_fit(features[:2,...])  # Phony init of model
    model_fit.set_weights(keep_best_callback.best_weights)
    for i in range(len(model_fit.weights)):
        model_fit.weights[i].assign(keep_best_callback.best_weights[i])
    # out = model_fit(features)
    # x = plain_x.numpy()
    # c_out = out.numpy().flatten()
    # # labels = labels.numpy()
    # fig = plt.figure()
    # plt.title(f'{description}')
    # plt.scatter(x[:, 0], x[:, 1], c=c_out, cmap=plt.get_cmap('bwr'))
    # plt.show()

    X = plain_x.numpy()
    lab = labels.numpy()
    int_labels = np.array(lab>.5, int)
    fig = plt.figure(figsize=(5, 5))
    plot_decision_regions(X=X, y=int_labels, clf=model_fit, legend=2)
    train_labels = model_fit.predict(X, use_multiprocessing=True)
    plt.title(description)
    plt.show()

    logger.log_variables(fig, 'fig',
                         X, 'X',
                         int_labels, 'y',
                         train_labels, 'train_labels',
                         keep_best_callback.best_weights, 'weights',
                         history.history, 'history')
    logger.log_figure(fig, 'fig.pdf')
Exemplo n.º 3
0
import tensorflow as tf

import os
import tensorflow as tf
import numpy as np
import datetime
from logtools import Logger

π = np.pi

file_name = os.path.basename(os.path.splitext(__file__)[0])
time = datetime.datetime.now().isoformat()
description = 'MNIST_intermetdiate_layer'
# description = 'TEST'
logger = Logger(log_dir=os.path.join(file_name, description, time))
logger.log_file(__file__)
print('log_dir', logger.log_dir)

# sHHHH = tensor(4*[H]) @ s0000

class NN(tf.keras.Sequential):
    def __init__(self):
        input_dense = tf.keras.layers.Dense(10, name='input_linear_combi')
        # output_dense = tf.keras.layers.Dense(10, name='mnist')

        super(NN, self).__init__(layers=[
            tf.keras.layers.Flatten(),
            input_dense,
            # QC(),  # This works as an activation function
            # output_dense
        ])
Exemplo n.º 4
0
def main(data_idx):
    # Data from -1 to 1
    data_generator = my_datasets.complex_1D_functions
    data_index = data_idx
    x, y = data_generator(data_index)

    file_name = os.path.basename(os.path.splitext(__file__)[0])
    time = datetime.datetime.now().isoformat()
    description = f'{data_generator.__name__}({data_index})_smart_iSWAP_rxzx_6'
    # description = 'TEST_U_bias'
    logger = Logger(log_dir=os.path.join(file_name, description, time))
    logger.log_file(__file__)
    print('log_dir', logger.log_dir)


    # --- Feature extraction ---
    def calc_features(x):
        features = tf.convert_to_tensor([  # Features as descried in the article
            np.arccos(x**2),
            np.arcsin(x),
        ])
        return tf.transpose(features)  # (batch, feature)

    features = calc_features(x)

    # --- Model and loss ---
    model = Model()

    optimizer = tf.optimizers.Adam(0.001)
    model.compile(optimizer=optimizer, loss='mean_squared_error')
    print(*model.trainable_variables, sep='\n')

    keep_best_callback = KeepBestCallback()
    early_stopping_callback = tf.keras.callbacks.EarlyStopping(min_delta=0.00001, patience=100)

    history = model.fit(features, y,
                        validation_split=0.2,
                        epochs=10000,
                        callbacks=[keep_best_callback],
                        use_multiprocessing=True,
                        verbose=2)

    # --- Test result ---
    model_fit = model
    model_fit(features[:10,...])  # Phony init of model
    model_fit.set_weights(keep_best_callback.best_weights)
    for i in range(len(model_fit.weights)):
        model_fit.weights[i].assign(keep_best_callback.best_weights[i])

    fig = plt.figure()
    x_fit = np.linspace(-1, 1, 1000)
    features_fit = calc_features(x_fit)
    y_fit = model_fit.predict(features_fit)
    plt.plot(x, y, '.b', label='Data')
    plt.plot(x_fit, y_fit, '-r', label='Fit')
    plt.title(description)
    plt.show()

    # X = f.numpy()
    # lab = labels.numpy()
    # int_labels = np.array(lab>.5, int)
    # fig = plt.figure(figsize=(5, 5))
    # plot_decision_regions(X=X, y=int_labels, clf=model_fit, legend=2)
    # train_labels = model_fit.predict(X, use_multiprocessing=True)
    # plt.title(description)
    # plt.show()

    logger.log_variables(fig, 'fig',
                         x, 'X',
                         y, 'y',
                         keep_best_callback.best_weights, 'weights',
                         history.history, 'history')
    logger.log_text(f'best_val_loss = {keep_best_callback.best_val_loss}', 'best_val_loss.txt')
    logger.log_text(f'{keep_best_callback.best_weights}', 'weights.txt')
    logger.log_figure(fig, 'fig.pdf')