Beispiel #1
0
def test_test():
    """ """
    import pinot
    import copy

    layer = pinot.representation.dgl_legacy.gn()
    net_representation = pinot.representation.Sequential(
        layer, [32, "tanh", 32, "tanh", 32, "tanh"])
    net = pinot.Net(net_representation)

    train = pinot.Train(
        net=net,
        data=pinot.data.utils.batch(pinot.data.esol()[:10], 5),
        n_epochs=1,
        optimizer=torch.optim.Adam(net.parameters()),
    )

    train.train()

    test = pinot.Test(
        net=net,
        data=pinot.data.utils.batch(pinot.data.esol()[:10], 5),
        metrics=[pinot.rmse, pinot.r2],
        states=train.states,
    )

    test.test()
Beispiel #2
0
def setup():

    import pinot

    def f(x):
        """Example from
        https://pyro.ai/examples/bo.html

        Parameters
        ----------
        x :
            

        Returns
        -------

        """
        return (6 * x - 2)**2 * torch.sin(12 * x - 4)

    x = torch.linspace(0, 1)[:, None]
    y = f(x)

    net = pinot.Net(representation=torch.nn.Sequential(torch.nn.Linear(1, 50),
                                                       torch.nn.Tanh()))

    unseen_data = x

    return net, unseen_data
def bo():
    """ """
    import pinot

    def f(x):
        """Example from
        https://pyro.ai/examples/bo.html

        Parameters
        ----------
        x :
            

        Returns
        -------

        """
        return (6 * x - 2)**2 * torch.sin(12 * x - 4)

    x = torch.linspace(0, 1)[:, None]
    y = f(x)

    net = pinot.Net(representation=torch.nn.Sequential(torch.nn.Linear(1, 50),
                                                       torch.nn.Tanh()))

    return pinot.active.experiment.BayesOptExperiment(
        net=net,
        data=torch.cat([x, y], dim=1),
        optimizer=torch.optim.Adam(net.parameters(), 1e-3),
        acquisition=pinot.active.acquisition.probability_of_improvement,
        num_epochs=10,
    )
Beispiel #4
0
def test_train():
    """ """
    import pinot

    layer = pinot.representation.dgl_legacy.GN
    net_representation = pinot.representation.Sequential(
        layer, [32, "tanh", 32, "tanh", 32, "tanh"])

    net = pinot.Net(net_representation)

    train = pinot.Train(
        net=net,
        data=pinot.data.esol()[:10],
        n_epochs=1,
        optimizer=torch.optim.Adam(net.parameters()),
    )

    train.train()
Beispiel #5
0
def test_train_and_test():
    """ """
    import pinot

    layer = pinot.representation.dgl_legacy.gn()
    net_representation = pinot.representation.Sequential(
        layer, [32, "tanh", 32, "tanh", 32, "tanh"])
    net = pinot.Net(net_representation)

    train_and_test = pinot.TrainAndTest(
        net=net,
        optimizer=torch.optim.Adam(net.parameters(), 1e-3),
        n_epochs=1,
        data_tr=pinot.data.utils.batch(pinot.data.esol()[:10], 5),
        data_te=pinot.data.utils.batch(pinot.data.esol()[:10], 5),
    )

    print(train_and_test)
Beispiel #6
0
def run(args):
    # get single layer
    layer = getattr(
        pinot.representation,
        args.model).GN

    # nest layers together to form the representation learning
    net_representation = pinot.representation.Sequential(
        layer,
        eval(args.config)) # output mu and sigma

    # get the last units as the input units of prediction layer
    param_in_units = list(filter(lambda x: type(x)==int, eval(args.config)))[-1]

    # construct a separated prediction net
    net_regression = pinot.regression.Linear(
        param_in_units,
        int(args.n_params))

    # get the distribution class
    distribution_class = getattr(
        getattr(
            torch.distributions,
            args.distribution.lower()),
        args.distribution.capitalize())

    if args.representation_parameter != '':
        net_representation.load_state_dict(
            torch.load(args.representation_parameter))

    if args.regression_parameter != '':
        net_parameterization.load_state_dict(
            torch.load(args.regression_parameter))

    net = pinot.Net(
        net_representation, 
        net_regression,
        distribution_class)

    # get the entire dataset
    ds= getattr(
        pinot.data,
        args.data)()

    # not normalizing for now
    # y_mean, y_std, norm, unnorm = pinot.data.utils.normalize(ds) 

    # get data specs
    batch_size = int(args.batch_size)
    partition = [int(x) for x in args.partition.split(':')]
    assert len(partition) == 2, 'only training and test here.'

    # batch
    ds = pinot.data.utils.batch(ds, batch_size)
    ds_tr, ds_te = pinot.data.utils.split(ds, partition)
    
    # get the training specs
    lr = float(args.lr)
    opt = getattr(torch.optim, args.opt)(
        net.parameters(),
        lr)
    n_epochs = int(args.n_epochs)

    # define reporters
    now = datetime.now() 
    time_str = now.strftime("%Y-%m-%d-%H%M%S%f")
    os.mkdir(time_str)


    markdown_reporter = pinot.app.reporters.MarkdownReporter(
        time_str, ds_tr, ds_te, args=args, net=net)
    visual_reporter = pinot.app.reporters.VisualReporter(
        time_str, ds_tr, ds_te)
    weight_reporter = pinot.app.reporters.WeightReporter(
        time_str)

    reporters = [
        markdown_reporter,
        visual_reporter,
        weight_reporter]

    pinot.app.utils.train(net, ds_tr, ds_te, opt, reporters, n_epochs)
Beispiel #7
0
def run(args):
    """

    Parameters
    ----------
    args :


    Returns
    -------

    """
    if args.info:
        logging.basicConfig(level=logging.INFO)

    logs = logging.getLogger("pinot")
    net_representation = None
    # If there are no pretrained generative model specified
    if args.pretrained_gen_model is None:
        logs.info(
            "No pretrained model is specified, training generative model" +
            "using background data ...")
        # Load the background training data
        logs.info("Loading dataset: " + args.background_data)
        background_data = getattr(pinot.data, args.background_data)()
        # Get the number of node features and initialize representation
        # layer as a variational auto-encoder
        input_feat_dim = background_data[0][0].ndata["h"].shape[1]

        batched_background_data = pinot.data.utils.batch(
            background_data, args.batch_size_gen)

        net_representation = GCNModelVAE(
            input_feat_dim,
            gcn_type=args.layer,
            gcn_hidden_dims=args.hidden_dims_gvae,
            embedding_dim=args.embedding_dim,
        )

        # And then train this model
        gen_optimizer = pinot.app.utils.optimizer_translation(
            args.optimizer_generative,
            lr=args.lr_generative)(net_representation)
        logs.info("Training generative model ...")
        generative_train = pinot.app.experiment.Train(
            net_representation,
            batched_background_data,
            gen_optimizer,
            args.n_epochs_generative,
        )

        generative_train.train()
        # When done, save the generative model
        torch.save(net_representation, args.save_model)
        logs.info(
            "Finished training generative model and saving trained model")

    else:
        # Load the pretrained generative model
        logs.info("Loading pretrained generative model")
        net_representation = torch.load(args.pretrained_gen_model)
        logs.info("Finished loading!")

    # Freeze the gradient if the user does not specify --free_gradient
    if not args.free_gradient:
        for param in net_representation.parameters():
            param.requires_grad = False

    # Initialize the Net from with the generative model
    net = pinot.Net(net_representation, noise_model=args.noise_model)

    # get the entire dataset
    ds = getattr(pinot.data, args.data)()

    # not normalizing for now
    # y_mean, y_std, norm, unnorm = pinot.data.utils.normalize(ds)

    # get data specs
    batch_size = int(args.batch_size)
    partition = [int(x) for x in args.partition.split(":")]
    assert len(partition) == 2, "only training and test here."

    # batch
    ds = pinot.data.utils.batch(ds, batch_size)
    ds_tr, ds_te = pinot.data.utils.split(ds, partition)

    if torch.cuda.is_available():
        ds_tr = [(g.to(torch.device("cuda:0")), y.to(torch.device("cuda:0")))
                 for g, y in ds_tr]
        ds_te = [(g.to(torch.device("cuda:0")), y.to(torch.device("cuda:0")))
                 for g, y in ds_te]

        net = net.to(torch.device("cuda:0"))

    optimizer = pinot.app.utils.optimizer_translation(args.optimizer,
                                                      lr=args.lr,
                                                      kl_loss_scaling=1.0 /
                                                      float(len(ds_tr)))(net)

    train_and_test = pinot.app.experiment.TrainAndTest(
        net=net,
        data_tr=ds_tr,
        data_te=ds_te,
        optimizer=optimizer,
        n_epochs=args.n_epochs,
    )

    result = train_and_test.run()

    os.makedirs(args.out, exist_ok=True)

    torch.save(net.state_dict(), args.out + "/model_state_dict.th")

    with open(args.out + "/architecture.txt", "w") as f_handle:
        f_handle.write(str(train_and_test))

    with open(args.out + "/result_table.md", "w") as f_handle:
        f_handle.write(pinot.app.report.markdown(result))

    curves = pinot.app.report.curve(result)

    for spec, curve in curves.items():
        np.save(args.out + "/" + "_".join(spec) + ".npy", curve)

    with open(args.out + "/result.html", "w") as f_handle:
        f_handle.write(pinot.app.report.html(result))
Beispiel #8
0
def run(args):
    """

    Parameters
    ----------
    args :
        

    Returns
    -------

    """
    layer = pinot.representation.dgl_legacy.gn(model_name=args.layer)

    net_representation = pinot.representation.Sequential(layer=layer,
                                                         config=args.config)

    net = pinot.Net(
        net_representation,
        output_regressor_class=getattr(pinot.regressors,
                                       args.output_regressor),
    )

    # get the entire dataset
    ds = getattr(pinot.data, args.data)()

    # not normalizing for now
    # y_mean, y_std, norm, unnorm = pinot.data.utils.normalize(ds)

    # get data specs
    batch_size = int(args.batch_size)
    partition = [int(x) for x in args.partition.split(":")]
    assert len(partition) == 2, "only training and test here."

    # batch

    if "Exact" in args.output_regressor:
        ds_tr, ds_te = pinot.data.utils.split(ds, partition)

        ds_tr = pinot.data.utils.batch(ds_tr, len(ds_tr))
        ds_te = pinot.data.utils.batch(ds_te, len(ds_te))

    elif 'mixed' in args.data:
        ds = ds.to(torch.device('cuda:0'))
        ds_tr, ds_te = ds.split(partition)
        ds_tr = ds_tr.view('fixed_size_batch', batch_size=args.batch_size)
        ds_te = ds_te.view('fixed_size_batch', batch_size=len(ds_te))

    else:
        ds = pinot.data.utils.batch(ds, batch_size)
        ds_tr, ds_te = pinot.data.utils.split(ds, partition)

    if torch.cuda.is_available() and 'mixed' not in args.data:
        ds_tr = [(g.to(torch.device("cuda:0")), y.to(torch.device("cuda:0")))
                 for g, y in ds_tr]
        ds_te = [(g.to(torch.device("cuda:0")), y.to(torch.device("cuda:0")))
                 for g, y in ds_te]

        net = net.to(torch.device("cuda:0"))

    optimizer = pinot.app.utils.optimizer_translation(
        args.optimizer,
        weight_decay=0.01,
        lr=args.lr,
    )(net)

    train_and_test = pinot.app.experiment.TrainAndTest(
        net=net,
        data_tr=ds_tr,
        data_te=ds_te,
        optimizer=optimizer,
        n_epochs=args.n_epochs,
    )

    result = train_and_test.run()

    os.mkdir(args.out)

    torch.save(net.state_dict(), args.out + "/model_state_dict.th")

    with open(args.out + "/architecture.txt", "w") as f_handle:
        f_handle.write(str(train_and_test))

    with open(args.out + "/result_table.md", "w") as f_handle:
        f_handle.write(pinot.app.report.markdown(result))

    curves = pinot.app.report.curve(result)

    for spec, curve in curves.items():
        np.save(args.out + "/" + "_".join(spec) + ".npy", curve)

    with open(args.out + "/result.html", "w") as f_handle:
        f_handle.write(pinot.app.report.html(result))