Пример #1
0
def main(cfg: DictConfig):
    session = Session(
        repo=(Path.home() / "projects/rlbook/experiments/outputs/bandit").as_posix(),
        experiment=cfg.bandit["_target_"].split(".")[-1],
    )

    testbed = instantiate(cfg.testbed)
    bandit = instantiate(cfg.bandit, Q_init=call(cfg.Q_init, testbed))

    local_logger.info(f"Running bandit: {cfg.run}")
    local_logger.debug(f"Testbed expected values: {testbed.expected_values}")
    local_logger.debug(f"bandit config: {cfg['bandit']}")
    local_logger.debug(f"run config: {cfg['run']}")
    session.set_params(OmegaConf.to_container(cfg.run), "experiment")
    bandit.run(testbed, **OmegaConf.to_container(cfg.run))

    df_ar = bandit.output_df()
    df_ar = optimal_action(df_ar)
    local_logger.debug(f"\n{df_ar[['run', 'step', 'action', 'optimal_action', 'reward']].head(15)}")

    bandit_type = cfg.bandit._target_.split(".")[-1]
    Q_init = cfg.Q_init._target_.split(".")[-1]

    task_name = f"{bandit_type} - " + ", ".join(
        [
            f"{k}: {OmegaConf.select(cfg, v).split('.')[-1]}"
            if isinstance(OmegaConf.select(cfg, v), str)
            else f"{k}: {OmegaConf.select(cfg, v)}"
            for k, v in cfg.task_labels.items()
        ]
    )
    local_logger.debug(f"{task_name}")

    hp_testbed = OmegaConf.to_container(cfg.testbed)
    hp = OmegaConf.to_container(cfg.bandit)
    hp["Q_init"] = cfg.Q_init._target_
    hp["p_drift"] = hp_testbed["p_drift"]
    session.set_params(hp, "hyperparameters")

    # for i in range(min(3, cfg.run.n_runs)):
    #     fig = steps_violin_plotter(df_ar, testbed, run=i)
    #     writer.add_figure(f"run{i}", fig, global_step=cfg.run.steps)

    final_avg_reward = write_scalars(df_ar, session, "reward", "average_reward", hp)

    final_optimal_action = write_scalars(
        df_ar, session, "optimal_action_percent", "optimal_action_percent", hp
    )
    final_metrics = {
        "average_reward": final_avg_reward,
        "optimal_action_percent": final_optimal_action,
    }
    session.set_params(final_metrics, "final_metrics")
    local_logger.debug(f"final_metrics: {final_metrics}")
Пример #2
0
from aim import Session

import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms

# Create Aim Session
aim_sess = Session()

# Device configuration
device = torch.device('cpu')

# Hyper parameters
num_epochs = 5
num_classes = 10
batch_size = 50
learning_rate = 0.01

# aim - Track hyper parameters
aim_sess.set_params(
    {
        'num_epochs': num_epochs,
        'num_classes': num_classes,
        'batch_size': batch_size,
        'learning_rate': learning_rate,
    },
    name='hparams')

# MNIST dataset
train_dataset = torchvision.datasets.MNIST(root='./data/',
Пример #3
0
model = Sequential()

# Conv block
model.add(
    Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))

# Dense block
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))

model.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.Adadelta(),
              metrics=['accuracy'])

model.fit(x_train,
          y_train,
          batch_size=batch_size,
          epochs=epochs,
          verbose=1,
          validation_data=(x_test, y_test),
          callbacks=[
              AimCallback(session=Session(experiment='test_keras_cb')),
          ])

# score = model.evaluate(x_test, y_test, verbose=0)
Пример #4
0
from aim.tensorflow import AimCallback
from aim import Session

import tensorflow as tf

mnist = tf.keras.datasets.mnist

(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0

model = tf.keras.models.Sequential([
    tf.keras.layers.Flatten(input_shape=(28, 28)),
    tf.keras.layers.Dense(128, activation='relu'),
    tf.keras.layers.Dropout(0.2),
    tf.keras.layers.Dense(10)
])

loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)

model.compile(optimizer='adam', loss=loss_fn, metrics=['accuracy'])

model.fit(
    x_train,
    y_train,
    epochs=5,
    callbacks=[AimCallback(session=Session(experiment='test_tf_1_keras_cb'))])
Пример #5
0
from aim import Session

sess = Session()

sess.set_params({
    'foo': 'bar',
})

for i in range(10):
    sess.track(i, name='val')
Пример #6
0
from aim import Session

sess1 = Session(experiment='line')
sess2 = Session(experiment='linex2')

sess1.set_params({
    'k': '1',
})
sess2.set_params({
    'k': '2',
})

for i in range(10):
    sess1.track(i, name='val')
    sess2.track(i*2, name='val')