Esempio n. 1
0
    if args.config_file != "":
        cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)
    #cfg.freeze()  #skip this, cfg can be modified

    gl._init()
    gl.set_value('cfg', cfg)

    set_seed(cfg.MISC.SEED)
    #writer = SummaryWriter()
    #gl.set_value('writer', writer)

    output_dir = cfg.MISC.OUT_DIR
    if output_dir and not os.path.exists(output_dir):
        os.makedirs(output_dir)
    logger = call_logger(osp.join(output_dir, cfg.MISC.LOGFILE))
    gl.set_value('logger', logger)
    logger.info("Running with config:\n{}".format(cfg))

    # make dataloader
    train_loader_all, valid_loader_all = make_data_loader(cfg)

    # prepare model
    model = build_model(cfg)

    torch.save(model.state_dict(),
               osp.join(output_dir, f"{cfg.MODEL.NAME}-init.pth"))

    # make loss
    criterion = make_loss(cfg)
Esempio n. 2
0
    parser.add_argument('--tta', default=0, type=int,help='if tta')




    
    
    args = parser.parse_args()
    
    if not osp.exists(args.datasets):
        raise ValueError(f"Dataset Folder not exist")  
    
    #logger file
    os.makedirs(args.out_dir,exist_ok = True)
    logger = call_logger(osp.join(args.out_dir, args.log_file))
    
    

    

    set_seed(args.seed)
    
    #%%model  
    
    if 'metatwoview' in args.net:
        model = ISICModel_meta(n_class = args.n_class,arch = args.net )
    
    elif 'metasingleview' in args.net:
        model = ISICModel_singleview_meta(n_class = args.n_class,arch = args.net )  
        
    parser.add_argument(
        '--loss_type', default='ce', type=str, help='loss_func'
    )  # ce/ ce_smooth/bce/bce_smmoth/focal_loss_bce/focal_loss

    parser.add_argument('--fn_csv',
                        default='out.csv',
                        type=str,
                        help='out csv file')

    args = parser.parse_args()

    if args.is_valid and not osp.exists(args.datasets):
        raise ValueError(f"Dataset Folder not exist")

    #logger file
    logger = call_logger(args.log_file)

    #%%model

    model = models.ISICModel(n_class=args.n_class, arch=args.net)
    model.init()
    model.load_state_dict(torch.load(args.model_file))
    configs = model.configs

    #%% augmentation dataset and dataloader

    if args.is_tta:
        test_transform = TrainAugmentation_albu(size=configs.image_size,
                                                mean=configs.image_mean,
                                                std=configs.image_std)
    else:
Esempio n. 4
0
#    return str(Path(out_path)/(Path(dataset.file_path).stem + suffix + '.h5'))
#    #return f'{os.path.splitext(dataset.file_path)[0]}{suffix}.h5'



if __name__ == '__main__':   
    # Load configuration
    config = load_config()
    
    # Load model state
    model_path = config['model_path']
    
    model_fd = Path(model_path).parent
    
    
    logger = call_logger(log_file = str(model_fd/'test_log.txt'),log_name = 'UNetPredict')
    
    # Create the model
    model = get_model(config)

    if 'output_path' in config.keys():
        out_path = config['output_path']
    else: 
        out_path = str(model_fd/'h5_pred')
    os.makedirs(out_path,exist_ok = True)
        
    
    logger.info(f'Loading model from {model_path}...')
    utils.load_checkpoint(model_path, model)
    logger.info(f"Sending the model to '{config['device']}'")
    model = model.to(config['device'])
from transforms.data_preprocessing import TrainAugmentation_bbox_albu, TestAugmentation_bbox_albu

from dataset.custom_dataset import CustomDataset_bbox, CustomDataset_bbox_centernet
from torch.utils.data import DataLoader
import torch.nn.functional as F

from torch.optim import SGD
from optim import build_optimizer
from torch.optim.lr_scheduler import MultiStepLR, CyclicLR
from torch.nn.utils import clip_grad_norm_

from tools.loggers import call_logger
from utils.utils import AvgerageMeter

log_file = 'log_locate1.txt'
logger = call_logger(log_file)


def train(loader,
          net,
          criterion,
          optimizer,
          device,
          epoch,
          scheduler=None,
          net_type='resnet50_c3_locate'):
    net.train(True)
    optimizer.zero_grad()

    if net_type == 'resnet34_c3_locate' or net_type == 'resnet50_c3_locate':
from pathlib import Path
import os
import cv2
import pydicom
import h5py
from scipy.ndimage import binary_dilation
from pathlib import Path
import os.path as osp
import pandas as pd

from skimage.morphology import skeletonize_3d
from scipy import signal, ndimage
from skimage import measure
from tools.loggers import call_logger

logger = call_logger('CTraw/gen.log', 'GenData')
fd_all = 'CTraw/19_ct'
mask_h5 = 'CTraw/h5/h5_19'
h5_fd = './data/h5'

debugim_fd = './data/debug_img'
name_chs = ['V', 'A', 'PV']

n_class = 2
w_class = [1.0, 0.2]  #second term not used
w_class_in_roi = [2.0, 2.0]

df_pid = pd.read_csv('./CTraw/19_ct.csv',
                     dtype={
                         'patient': str,
                         'value': int,
Esempio n. 7
0
import SimpleITK as sitk
import numpy as np
from pathlib import Path
import os
import cv2
import pydicom
import h5py
from scipy.ndimage import binary_dilation
from tools.loggers import call_logger

fn1 = 'resources/CT/01/data/A_1.mha'
fn2 = 'resources/CT/01/data/PV_1.mha'
dicom_fd = './resources/CT/01/data/'
name_chs = ['V', 'A', 'PV']
out_h5py = './data/h5_rs_one_case/ct01m_c1.h5'
logger = call_logger(log_file=None, 'GenData')

n_class = 2
w_class = [1.0, 0.2]

w_class_in_roi = [2.0, 2.0]

itkimage1 = sitk.ReadImage(fn1)
labels1 = (sitk.GetArrayFromImage(itkimage1)[::-1, :, :] == 1).astype(
    'int')  #dongmai

#labels0 = (1-labels1).astype('int')

labels1_dilate = binary_dilation(labels1, structure=np.ones(
    (5, 5, 5))).astype(labels1.dtype)
Esempio n. 8
0
    # Create main logger
    #logger = get_logger('UNet3DTrainer')
    #Load and log experiment configuration
    config = load_config()

    chkpt_sav = config['trainer']['checkpoint_dir']
    path_log = Path('../checkpoints') / chkpt_sav
    config['trainer']['checkpoint_dir'] = str(path_log)
    os.makedirs(path_log, exist_ok=True)

    #set seed
    manual_seed = config.get('manual_seed', 0)
    set_seed(manual_seed)

    if config['findlr']:
        logger = call_logger(log_file=None, log_name='UNet3DTrainer')
    else:
        logger = call_logger(log_file=str(
            (Path('../checkpoints') / chkpt_sav) / 'log.txt'),
                             log_name='UNet3DTrainer')

    logger.info(config)

    manual_seed = config.get('manual_seed', None)
    if manual_seed is not None:
        logger.info(f'Seed the RNG for all devices with {manual_seed}')
        torch.manual_seed(manual_seed)
        # see https://pytorch.org/docs/stable/notes/randomness.html
        torch.backends.cudnn.deterministic = True
        torch.backends.cudnn.benchmark = False