Пример #1
0
            elif args.optimizer =='Adam':
                model = CoxPH(net, optimizer=tt.optim.Adam(lr=args.lr, weight_decay=args.weight_decay),device=device)
    

            wandb.init(project='icml_new_'+args.dataset, 
                    group=f'fold{fold}_'+args.loss+args.optimizer,
                    name=f'L{args.num_layers}N{args.num_nodes}D{args.dropout}W{args.weight_decay}B{args.batch_size}',
                    config=args)

            wandb.watch(net)

            # Loss configuration ============================================================

            patience=5
            if args.loss =='rank':
                model.loss = DSAFTRankLoss(alpha=args.alpha, beta=args.beta)
            elif args.loss == 'mae':
                model.loss = DSAFTMAELoss()
            elif args.loss == 'rmse':
                model.loss = DSAFTRMSELoss()
            elif args.loss =='kspl':
                model.loss = DSAFTNKSPLLoss(args.an, args.sigma)
            elif args.loss =='kspl_new':
                model.loss = DSAFTNKSPLLossNew(args.an, args.sigma)

            # Training ======================================================================
            batch_size = args.batch_size
            lrfinder = model.lr_finder(x_train, y_train, batch_size, tolerance=10)
            best = lrfinder.get_best_lr()

            model.optimizer.set_lr(best)
Пример #2
0
                      device=device)

    wandb.init(
        project=args.dataset,
        group=args.loss + '_' + args.optimizer,
        name=
        f'L{args.num_layers}N{args.num_nodes}D{args.dropout}W{args.weight_decay}B{args.batch_size}',
        config=args)

    wandb.watch(net)

    # Loss configuration ============================================================

    patience = 10
    if args.loss == 'rank':
        model.loss = DSAFTRankLoss()
    elif args.loss == 'mae':
        model.loss = DSAFTMAELoss()
    elif args.loss == 'rmse':
        model.loss = DSAFTRMSELoss()
    elif args.loss == 'kspl':
        model.loss = DSAFTNKSPLLoss(args.an, args.sigma)
    elif args.loss == 'kspl_new':
        model.loss = DSAFTNKSPLLossNew(args.an, args.sigma)

    # Training ======================================================================
    batch_size = args.batch_size
    lrfinder = model.lr_finder(x_train, y_train, batch_size, tolerance=10)
    best = lrfinder.get_best_lr()

    # model.optimizer.set_lr(args.lr)