コード例 #1
0
ファイル: fbb_test.py プロジェクト: cyhu1014/tumor-segment
train_index = np.load('train.npy')
valid_index = np.load('valid.npy')
valid_index = valid_index[:24]

b_size = 2
workers = 0

model = model_bbox(b_size, 4)
model.cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001, betas=(0.5, 0.999))
criterion = nn.MSELoss()

load_checkpoint('best_fbb.pth', model, optimizer)

train_set = tumor_dataset(path=train_path,
                          out_index=train_index,
                          csv=csv,
                          read_label=False)
train_loader = DataLoader(train_set,
                          batch_size=b_size,
                          shuffle=True,
                          num_workers=workers)
valid_set = tumor_dataset(path=train_path,
                          out_index=valid_index,
                          csv=csv,
                          read_label=False)
valid_loader = DataLoader(valid_set,
                          batch_size=b_size,
                          shuffle=False,
                          num_workers=workers)

model.eval()
コード例 #2
0
from models import UNet3d_vae
from loss import loss_3d_crossentropy, F1_Loss
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1"

train_path = '../vae/brats18_data/train_2/'
type1 = ['flair', 't1', 't1ce', 't2']

batch_size = 1
workers = 2
classes = 5
x = 64
y = 64
z = 64

train_set = tumor_dataset(path=train_path)
train_loader = DataLoader(train_set,
                          batch_size=batch_size,
                          shuffle=False,
                          num_workers=workers)
valid_set = tumor_dataset(path=train_path)
valid_loader = DataLoader(valid_set,
                          batch_size=batch_size,
                          shuffle=False,
                          num_workers=workers)

model = UNet3d_vae(4, classes, 64, 64, 64)
print(model)
model.cuda()

optimizer = torch.optim.Adam(model.parameters(),
コード例 #3
0
            return gt, self.list[self.out_index[index]]
        elif (self.read_label == False):
            return feat, self.list[self.out_index[index]]
        else:
            return feat, gt, self.list[self.out_index[index]]

    def __len__(self):
        """ Total number of samples in the dataset """
        return self.len


b_size = 2
workers = 0

train_set = tumor_dataset(path=train_path,
                          out_index=train_index,
                          read_label=False)
train_loader = DataLoader(train_set,
                          batch_size=b_size,
                          shuffle=False,
                          num_workers=workers)
dataloader = train_loader

m = model_bbox(b_size, 4)
m.cuda()

for i, data in enumerate(dataloader):
    print(data[0].shape)
    img = data[0]
    img = img.cuda()
    m(img)
コード例 #4
0
ファイル: yolo.py プロジェクト: cyhu1014/tumor-segment
        return self.len


box_length = 30
def get_yololabel (csv,filename,axis):
    min_x = csv.loc[(filename, axis)]['min_x'] 
    max_x = csv.loc[(filename, axis)]['max_x'] 
    min_y = csv.loc[(filename, axis)]['min_y'] 
    max_y = csv.loc[(filename, axis)]['max_y'] 
    x_size = csv.loc[(filename, axis)]['x_size'] 
    y_size = csv.loc[(filename, axis)]['y_size'] 
    new_bbox = np.zeros(8,8,4)
    

    print(min_x,min_y,max_x,max_y)
train_set = tumor_dataset(path = train_path,out_index=train_index)
train_loader = DataLoader(train_set, batch_size=batch_size,shuffle=True, num_workers=workers)
valid_set = tumor_dataset(path = train_path,out_index=valid_index)
valid_loader = DataLoader(valid_set, batch_size=1,shuffle=False, num_workers=workers)
csv = pd.read_csv('tumor_analysis_2d_xy_plane_z_axis.csv' )
csv.set_index(keys=['file_name','z'],inplace=True)   

for i , (img,filename) in enumerate(train_loader):
    print(img.shape,filename)
    break





コード例 #5
0
ファイル: fbb_main.py プロジェクト: cyhu1014/tumor-segment
def main():
    b_size = 2
    workers = 0
    n_epochs = 100

    train_set = tumor_dataset(path=train_path,
                              out_index=train_index,
                              csv=csv,
                              read_label=False)
    train_loader = DataLoader(train_set,
                              batch_size=b_size,
                              shuffle=True,
                              num_workers=workers)
    valid_set = tumor_dataset(path=train_path,
                              out_index=valid_index,
                              csv=csv,
                              read_label=False)
    valid_loader = DataLoader(valid_set,
                              batch_size=b_size,
                              shuffle=False,
                              num_workers=workers)
    dataloader = train_loader

    model = model_bbox(b_size, 4)
    model.cuda()
    optimizer = torch.optim.Adam(model.parameters(),
                                 lr=0.001,
                                 betas=(0.5, 0.999))
    criterion = nn.MSELoss()

    total_loss = []
    best_loss = np.inf
    best_epoch = 0
    for epoch in range(n_epochs):
        model.train()
        epoch_loss = 0
        for i, (img, label, _) in enumerate(dataloader):
            model.zero_grad()
            img = img.cuda()
            label = label.float().cuda()
            pred = model(img)
            loss = criterion(pred, label)
            loss.backward()
            optimizer.step()
            epoch_loss += loss.detach().item()
            print('[%d/%d],[%d/%d],loss : %.4f' %
                  (epoch, n_epochs, i, len(dataloader), loss.detach().item()),
                  end='\r')
        print('\n')
        epoch_loss /= len(dataloader)
        total_loss.append(epoch_loss)
        valid_loss = test(valid_loader)
        if (best_loss >= valid_loss):
            best_loss = valid_loss
            best_epoch = epoch
            save_checkpoint('best_fbb.pth', model, optimizer)
            print('save best')
        save_checkpoint('final_fbb.pth', model, optimizer)
        print(
            '-----------------------Epoch : %d ,train loss : %.4f ,valid_loss %.4f -------------Best : %d , loss %.4f----------------------'
            % (epoch, epoch_loss, valid_loss, best_epoch, best_loss))