def Transform(self, dc: DataContainer, store_folder=None, store_key=None): if dc.IsEmpty(): return dc new_dc = deepcopy(dc) array = new_dc.array array -= self._interception array /= self._slop array = np.nan_to_num(array) new_dc.array = array new_dc.UpdateFrame() if store_folder is not None and store_key is not None: assert(len(store_key) > 0) self.SaveNormalDataContainer(new_dc, store_folder, store_key) return new_dc
def __init__(self): super(AalenAdditive, self).__init__(AalenAdditiveFitter(), self.__class__.__name__) # # class Weibull(BaseFitter): # def __init__(self): # super(Weibull, self).__init__(WeibullAFTFitter(), self.__class__.__name__) if __name__ == '__main__': import numpy as np model = CoxPH() print(model.name) # model = AalenAdditive() # print(model.name) train_dc = DataContainer() train_dc.Load(r'..\..\Demo\train.csv', event_name='status', duration_name='time') model.Fit(train_dc) result = model.Summary() print(result) # model.Save(r'..\..\Demo') # # model_new = AalenAdditive() # model_new.Load(r'..\..\Demo') # model_new.Summary()
assignments = np.array( (dc.df.shape[0] // self.k + 1) * list(range(1, self.k + 1))) assignments = assignments[:dc.df.shape[0]] for i in range(1, self.k + 1): ix = assignments == i train_dc = deepcopy(dc) train_dc.df = df.loc[~ix] train_dc.UpdateData() val_dc = deepcopy(dc) val_dc.df = df.loc[ix] val_dc.UpdateData() yield train_dc, val_dc if __name__ == '__main__': from SA.Fitter import MyCox dc = DataContainer() dc.Load(r'C:\Users\yangs\Desktop\Radiomics_pvp_hcc_os_top20_train.csv', event_name='status', duration_name='time') print(dc) fitter = MyCox() cv = CrossValidation() cv.Generate(dc)
def Transform(self, dc: DataContainer, store_folder=None, store_key=None): assert(dc.GetFeatureName() == self.sub_features) if store_folder is not None and store_key is not None: dc.Save(os.path.join(store_folder, '{}_reduced_features.csv'.format(store_key))) return dc
def SaveNormalDataContainer(self, dc: DataContainer, store_folder, store_key): dc.Save(os.path.join(store_folder, '{}_normalized_{}_feature.csv'.format(self._name, store_key)))
"and divided by it. Then the feature vector was mapped to an unit vector. " NormalizerMinMax = Normalizer('MinMax', unit_description, MinMaxNormFunc) def ZNormalizeFunc(array): return np.std(array, axis=0), np.mean(array, axis=0) z_description = "We applied the normalization on the feature matrix. For each feature vector, we calculated the mean " \ "value and the standard deviation. Each feature vector was subtracted by the mean value and was divided " \ "by the standard deviation. After normalization process, each vector has zero center and unit standard " \ "deviation. " NormalizerZscore = Normalizer('Zscore', z_description, ZNormalizeFunc) def MeanNormFunc(array): return np.max(array, axis=0) - np.min(array, axis=0), np.mean(array, axis=0) z_0_description = "We applied the normalization on the feature matrix. Each feature vector was subtracted by the mean " \ "value of the vector and was divided by the length of it. " NormalizerMean = Normalizer('Mean', z_0_description, MeanNormFunc) if __name__ == '__main__': file_path = r'C:\Users\yangs\Desktop\Radiomics_pvp_hcc_os_top20_train.csv' dc = DataContainer() dc.Load(file_path, event_name='status', duration_name='time') normal = NormalizerNone new_dc = normal.Fit(dc) print(new_dc) new_dc = normal.Transform(dc) print(new_dc)