def __init__(self, args): self.args = args self.nClass = args.nClass self.device = torch.device('cpu') self.autoencoder = AutoencoderBN().to(self.device) self.regressor = OrdinalRegressionModel(self.nClass).to(self.device) self.classification = rainFallClassification().to(self.device)
def __init__(self, args, trainRegressionDataLoader, trainRegressionClassificationLoader, testDataLoader, trainRainFallLoader, means, std): self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') self.trainRegressionDataLoader = trainRegressionDataLoader self.trainRegressionClassificationLoader = trainRegressionClassificationLoader self.testDataLoader = testDataLoader self.classificationLoader = trainRainFallLoader self.run_datetime = datetime.datetime.now() self.out_path = args.out self.sigma = args.sigma self.beta = args.beta self.earlyStop = args.earlyStop self.nClass = args.nClass self.noiseMean = torch.zeros(args.batch_size, args.featureNums, 17, 17) self.noiseStd = 1e-3 self.model = AutoencoderBN(self.noiseMean, self.noiseStd).to(self.device) self.regressionModel = Regression(self.nClass).to(self.device) self.classificationModel = regressionClassification(self.nClass).to(self.device) self.rainFallClassifierModel = rainFallClassification().to(self.device) self.meanStdNormalizer = MeanVarianceNormalizer(means, std).to(self.device) self.meanvarLoss = MeanVarLoss(self.nClass).to(self.device) self.normaliedLoss = NormalizerLoss(std).to(self.device) self.focalLoss = FocalLoss(self.nClass, alpha=0.25, gamma=2).to(self.device) self.rainFocalLoss = FocalLoss(2, alpha=0.25, gamma=2).to(self.device) self.regressionOptim = torch.optim.Adam([ {'params': self.regressionModel.parameters(), 'lr': args.lr, 'weight_decay': args.weight_decay}, {'params': self.model.parameters(), 'lr': args.lr, 'weight_decay': args.weight_decay}, ], lr=args.lr * 10, weight_decay=args.weight_decay * 10) self.classificationOptim = torch.optim.Adam(self.classificationModel.parameters(), lr=args.lr * 100) self.rainFallOptim = torch.optim.Adam(self.rainFallClassifierModel.parameters(), lr=args.lr * 10) # self.reconstructOptim = torch.optim.Adam(self.model.parameters(), lr=args.lr, weight_decay=args.weight_decay) self.scheduler = torch.optim.lr_scheduler.StepLR(self.regressionOptim, step_size=750 * 2) self.criterion = nn.MSELoss() self.classificationCriterion = nn.CrossEntropyLoss() if not os.path.exists(self.out_path): os.makedirs(self.out_path) self.logger = Logger(self.out_path) with open(os.path.join(self.out_path, "para.json"), "w") as f: json.dump(args.__dict__, f) self.epoch = 0 self.iteration = 0 self.classificationIteration = 0 self.rainfallclassificationIteration = 0 self.test_step = 0 self.max_epoch = args.epochs self.val_interval = args.interval self.res = 0 self.bestConstructLoss = 1e7 self.bestConstructEpoch = 0 self.best_error = 1e7; self.best_res_epoch = 0