import time import copy import pandas as pd import torch from torch.autograd import Variable from densenet import densenet169 from utils import plot_training, n_p, get_count from train import train_model, get_metrics from pipeline import get_study_level_data, get_dataloaders # #### load study level dict data study_data = get_study_level_data(study_type='XR_WRIST') # #### Create dataloaders pipeline data_cat = ['train', 'valid'] # data categories dataloaders = get_dataloaders(study_data, batch_size=1) dataset_sizes = {x: len(study_data[x]) for x in data_cat} # #### Build model # tai = total abnormal images, tni = total normal images tai = {x: get_count(study_data[x], 'positive') for x in data_cat} tni = {x: get_count(study_data[x], 'negative') for x in data_cat} Wt1 = {x: n_p(tni[x] / (tni[x] + tai[x])) for x in data_cat} Wt0 = {x: n_p(tai[x] / (tni[x] + tai[x])) for x in data_cat} print('tai:', tai) print('tni:', tni, '\n') print('Wt0 train:', Wt0['train']) print('Wt0 valid:', Wt0['valid']) print('Wt1 train:', Wt1['train']) print('Wt1 valid:', Wt1['valid'])
def __init__(self, Wt1, Wt0): super(Loss, self).__init__() self.Wt1 = Wt1 self.Wt0 = Wt0 def forward(self, inputs, targets, phase): loss = -(self.Wt1[phase] * targets * inputs.log() + self.Wt0[phase] * (1 - targets) * (1 - inputs).log()) return loss if __name__ == '__main__': # #### load study level dict data study_data = get_study_level_data( study_type='XR_WRIST' ) #选择某一部位的图像,返回值是一个字典,有train和valid两类,value是dataframe数据 # #### Create dataloaders pipeline data_cat = ['train', 'valid'] # data categories dataloaders = get_dataloaders(study_data, batch_size=1) dataset_sizes = {x: len(study_data[x]) for x in data_cat} #样本的数量 # #### Build model # tai = total abnormal images, tni = total normal images tai = {x: get_count(study_data[x], 'positive') for x in data_cat} tni = {x: get_count(study_data[x], 'negative') for x in data_cat} Wt1 = {x: n_p(tni[x] / (tni[x] + tai[x])) for x in data_cat} Wt0 = {x: n_p(tai[x] / (tni[x] + tai[x])) for x in data_cat} print('tai:', tai)
import time import copy import pandas as pd import torch from torch.autograd import Variable from mvdensenet import densenet169 from utils import plot_training, n_p, get_count from train import train_model, get_metrics from pipeline import get_study_level_data, get_dataloaders # #### load study level dict data study_data = get_study_level_data(study_type='XR_ELBOW') # #### Create dataloaders pipeline data_cat = ['train', 'valid'] # data categories dataloaders = get_dataloaders(study_data, batch_size=1) dataset_sizes = {x: len(study_data[x]) for x in data_cat} # #### Build model # tai = total abnormal images, tni = total normal images tai = {x: get_count(study_data[x], 'positive') for x in data_cat} tni = {x: get_count(study_data[x], 'negative') for x in data_cat} Wt1 = {x: n_p(tni[x] / (tni[x] + tai[x])) for x in data_cat} Wt0 = {x: n_p(tai[x] / (tni[x] + tai[x])) for x in data_cat} print('tai:', tai) print('tni:', tni, '\n') print('Wt0 train:', Wt0['train']) print('Wt0 valid:', Wt0['valid']) print('Wt1 train:', Wt1['train']) print('Wt1 valid:', Wt1['valid'])
import copy import pandas as pd import torch from torch.autograd import Variable from densenet import densenet169 from utils import plot_training, n_p, get_count from train import train_model, get_metrics from pipeline import get_study_level_data, get_dataloaders # #### load study level dict data assert len(sys.argv) == 2 study_type=sys.argv[1] study_data = get_study_level_data(study_type=study_type) # #### Create dataloaders pipeline data_cat = ['train', 'valid'] # data categories dataloaders = get_dataloaders(study_data, batch_size=1) dataset_sizes = {x: len(study_data[x]) for x in data_cat} # #### Build model # tai = total abnormal images, tni = total normal images tai = {x: get_count(study_data[x], 'positive') for x in data_cat} tni = {x: get_count(study_data[x], 'negative') for x in data_cat} Wt1 = {x: n_p(tni[x] / (tni[x] + tai[x])) for x in data_cat} Wt0 = {x: n_p(tai[x] / (tni[x] + tai[x])) for x in data_cat} print('tai:', tai) print('tni:', tni, '\n')