示例#1
0
                                                                  labels=labels,
                                                                  predict_category=args['predict_category'],
                                                                  device=args['device'],
                                                                  mode='test')

        test_accuracy = get_ogb_evaluator(
            predicts=test_y_predicts.argmax(dim=1),
            labels=test_y_trues)

        print(
            f'Epoch: {epoch + 1}, learning rate: {optimizer.param_groups[0]["lr"]}, '
            f'train loss: {train_total_loss:.4f}, accuracy {train_accuracy:.4f}, \n'
            f'valid loss: {val_total_loss:.4f}, accuracy {val_accuracy:.4f}, \n'
            f'test loss: {test_total_loss:.4f}, accuracy {test_accuracy:.4f}')

        early_stop = early_stopping.step([('accuracy', val_accuracy, True)], model)

        if early_stop:
            break

    # load best model
    early_stopping.load_checkpoint(model)

    print('performing model inference...')
    # evaluate the best model
    model.eval()
    nodes_representation = model[0].inference(graph, copy.deepcopy(
        {ntype: graph.nodes[ntype].data['feat'] for ntype in graph.ntypes}), device=args['device'])

    train_y_predicts = model[1](convert_to_gpu(nodes_representation[args['predict_category']], device=args['device']))[train_idx]
    train_y_trues = convert_to_gpu(labels[train_idx], device=args['device'])
示例#2
0
            device=args['device'],
            mode='test')

        test_accuracy, test_macro_f1 = evaluate_node_classification(
            predicts=test_y_predicts.argmax(dim=1), labels=test_y_trues)

        print(
            f'Epoch: {epoch + 1}, learning rate: {optimizer.param_groups[0]["lr"]}, train loss: {train_total_loss:.4f}, '
            f'accuracy {train_accuracy:.4f}, macro f1 {train_macro_f1:.4f}, \n'
            f'valid loss: {val_total_loss:.4f}, '
            f'accuracy {val_accuracy:.4f}, macro f1 {val_macro_f1:.4f} \n'
            f'test loss: {test_total_loss:.4f}, '
            f'accuracy {test_accuracy:.4f}, macro f1 {test_macro_f1:.4f}')

        early_stop = early_stopping.step([('accuracy', val_accuracy, True),
                                          ('macro_f1', val_macro_f1, True)],
                                         model)

        if early_stop:
            break

    # load best model
    early_stopping.load_checkpoint(model)

    # evaluate the best model
    model.eval()

    nodes_representation, _ = model[0].inference(
        graph,
        copy.deepcopy({(stype, etype, dtype): graph.nodes[dtype].data['feat']
                       for stype, etype, dtype in graph.canonical_etypes}),
        if best_validate_RMSE is None or val_RMSE < best_validate_RMSE:
            best_validate_RMSE = val_RMSE
            scores = {
                "RMSE": float(f"{test_RMSE:.4f}"),
                "MAE": float(f"{test_MAE:.4f}")
            }
            final_result = json.dumps(scores, indent=4)

        print(
            f'Epoch: {epoch}, learning rate: {optimizer.param_groups[0]["lr"]}, train loss: {train_total_loss:.4f}, RMSE {train_RMSE:.4f}, MAE {train_MAE:.4f}, \n'
            f'validate loss: {val_total_loss:.4f}, RMSE {val_RMSE:.4f}, MAE {val_MAE:.4f}, \n'
            f'test loss: {test_total_loss:.4f}, RMSE {test_RMSE:.4f}, MAE {test_MAE:.4f}'
        )

        early_stop = early_stopping.step([('RMSE', val_RMSE, False),
                                          ('MAE', val_MAE, False)], model)

        if early_stop:
            break

    # save model result
    save_result_folder = f"../results/{args['dataset']}"
    if not os.path.exists(save_result_folder):
        os.makedirs(save_result_folder, exist_ok=True)
    save_result_path = os.path.join(save_result_folder,
                                    f"{args['model_name']}.json")

    with open(save_result_path, 'w') as file:
        file.write(final_result)
        file.close()