self.record_config()

    def record_config(self):
        with open(os.path.join(self.log_dir, 'TrainConfig.json'), 'w') as f:
            f.write(json.dumps(self.args))

    def get_log_dir(self):
        if hasattr(self, 'log_dir'):
            return self.log_dir
        else:
            return None

    def get_model_dir(self):
        if hasattr(self, 'model_dir'):
            return self.model_dir
        else:
            return None

    def get_result_dir(self):
        if hasattr(self, 'result_dir'):
            return self.result_dir
        else:
            return None


if __name__ == "__main__":
    args = Config().get_config()
    logger = TrainLogger(args)
    logger.record_config()
    model_path = logger.get_model_dir()
    print(model_path)
Beispiel #2
0
            running_loss.update(loss.item(), label.size(0))
            running_cindex.update(cindex, pair)

    epoch_loss = running_loss.get_average()
    epoch_cindex = running_cindex.get_average()
    running_loss.reset()
    running_cindex.reset()

    model.train()

    return epoch_loss, epoch_cindex

# %%
for fold in range(5):
    config = Config()
    args = config.get_config()
    args['fold'] = fold
    logger = TrainLogger(args)
    logger.info(__file__)

    data_root = args.get("data_root")
    DATASET = args.get("dataset")
    split_type = args.get("split_type")
    save_model = args.get("save_model")
    fold = args.get("fold")

    fpath = os.path.join(data_root, DATASET)
    dp = DataPrepared(fpath)
    train_index, val_index, test_index = dp.read_sets(fold, split_type=split_type)
    df = dp.get_data()
Beispiel #3
0
        self.result_dir = os.path.join(load_dir, 'result')
        create_dir([self.result_dir])

        log_path = os.path.join(self.log_dir, 'Test.log')
        super().__init__(log_path)

        self.record_config()

    def record_config(self):
        with open(os.path.join(self.log_dir, 'TestConfig.json'), 'w') as f:
            f.write(json.dumps(self.args))

    def get_model_path(self):
        if hasattr(self, 'model_path'):
            return self.model_path
        else:
            return None

    def get_result_dir(self):
        if hasattr(self, 'result_dir'):
            return self.result_dir
        else:
            return None


if __name__ == "__main__":
    args = Config(train=False).get_config()
    logger = TestLogger(args)
    logger.record_config()
    model_path = logger.get_model_path()
    print(model_path)
Beispiel #4
0
            label_list.append(label.detach().cpu().numpy())
            running_loss.update(loss.item(), label.size(0))

    pred = np.concatenate(pred_list, axis=0)
    label = np.concatenate(label_list, axis=0)

    epoch_cindex = get_cindex(label, pred)

    epoch_loss = running_loss.get_average()
    running_loss.reset()

    return epoch_loss, epoch_cindex


# %%
config = Config(train=False)
args = config.get_config()
logger = TestLogger(args)
logger.info(__file__)

data_root = args.get("data_root")
DATASET = args.get("dataset")
split_type = args.get("split_type")
save_model = args.get("save_model")
fold = args.get("fold")

# %%

fpath = os.path.join(data_root, DATASET, 'CNN-CNN')
dp = DataPrepared(fpath)
train_index, val_index, test_index = dp.read_sets(fold, split_type=split_type)