def main_link_pred(models, args, data_loader):
    for model, param_grid in models.items():
        for params in param_grid:

            if args.h_dim is None:
                params["h_dim"] = args.node_emb_size
            else:
                params["h_dim"] = args.h_dim

            params["num_steps"] = args.n_layers

            date_time = str(datetime.now())
            print("\n\n")
            print(date_time)
            print(f"Model: {model.__name__}, Params: {params}")

            model_attempt = get_name(model, date_time)

            model_base = get_model_base(args, model_attempt)

            dataset = TestLinkPredGraph(data_loader=data_loader)

            args.objectives = "link_pred"

            from SourceCodeTools.models.graph.train.sampling_multitask2 import training_procedure

            trainer, scores = training_procedure(dataset,
                                                 model,
                                                 copy(params),
                                                 args,
                                                 model_base,
                                                 trainer=TestTrainer)

            return scores
def main(models, args):
    for model, param_grid in models.items():
        for params in param_grid:

            if args.h_dim is None:
                params["h_dim"] = args.node_emb_size
            else:
                params["h_dim"] = args.h_dim

            date_time = str(datetime.now())
            print("\n\n")
            print(date_time)
            print(f"Model: {model.__name__}, Params: {params}")

            model_attempt = get_name(model, date_time)

            model_base = get_model_base(args, model_attempt)

            dataset = read_or_create_gnn_dataset(args=args, model_base=model_base)

            from SourceCodeTools.models.graph.train.sampling_multitask2 import evaluation_procedure

            checkpoints = detect_checkpoint_files(model_base)

            for epoch, ckpt_path in checkpoints:
                shutil.copy(ckpt_path, os.path.join(model_base, "saved_state.pt"))

                evaluation_procedure(dataset, model, params, args, model_base)
def main(models, args):
    for model, param_grid in models.items():
        for params in param_grid:

            if args.h_dim is None:
                params["h_dim"] = args.node_emb_size
            else:
                params["h_dim"] = args.h_dim

            params["num_steps"] = args.n_layers

            date_time = str(datetime.now())
            print("\n\n")
            print(date_time)
            print(f"Model: {model.__name__}, Params: {params}")

            model_attempt = get_name(model, date_time)

            model_base = get_model_base(args, model_attempt)

            dataset = read_or_create_gnn_dataset(args=args,
                                                 model_base=model_base)

            def write_params(args, params):
                args = copy(args.__dict__)
                args.update(params)
                args['activation'] = args['activation'].__name__
                with open(join(model_base, "params.json"), "w") as mdata:
                    mdata.write(json.dumps(args, indent=4))

            if not args.restore_state:
                write_params(args, params)

            from SourceCodeTools.models.graph.train.sampling_multitask2 import training_procedure

            trainer, scores = \
                training_procedure(dataset, model, copy(params), args, model_base)

            trainer.save_checkpoint(model_base)

            print("Saving...")

            params['activation'] = params['activation'].__name__

            metadata = {
                "base": model_base,
                "name": model_attempt,
                "parameters": params,
                "layers": "embeddings.pkl",
                "mappings": "nodes.csv",
                "state": "state_dict.pt",
                "scores": scores,
                "time": date_time,
            }

            metadata.update(args.__dict__)

            # pickle.dump(dataset, open(join(model_base, "dataset.pkl"), "wb"))
            import pickle
            pickle.dump(trainer.get_embeddings(),
                        open(join(model_base, metadata['layers']), "wb"))

            with open(join(model_base, "metadata.json"), "w") as mdata:
                mdata.write(json.dumps(metadata, indent=4))

            print("Done saving")