def Save(self, p_sFileName): oData = { "FileFormat": "TALOS008", "IsBinary": self.IsBinary, "EpochNumber": self.EpochNumber, "FileNames": self.FileNames, "Accuracy": self.Accuracy, "Recall": self.Recall, "Precision": self.Precision, "F1Score": self.F1Score, "CrossF1Score": self.CrossF1Score, "ObjectiveF1Score": self.ObjectiveF1Score, "PositiveF1Score": self.PositiveF1Score, "BestEpochs": self.BestEpochs, "BestPoints": self.BestPoints, "BestRecall": self.BestRecall, "BestPrecision": self.BestPrecision, "BestF1Score": self.BestF1Score, "BestCrossF1Score": self.BestCrossF1Score, "BestObjectiveF1Score": self.BestObjectiveF1Score, "BestPositiveF1Score": self.BestPositiveF1Score, "DiscardedEpochs": self.DiscardedEpochs, "BestRecallEpochs": self.BestRecallEpochs, "BestPrecisionEpochs": self.BestPrecisionEpochs, "BestF1ScoreEpochs": self.BestF1ScoreEpochs, "BestCrossF1ScoreEpochs": self.BestCrossF1ScoreEpochs, "BestObjectiveF1ScoreEpochs": self.BestObjectiveF1ScoreEpochs, "BestPositiveScoreEpochs": self.BestPositiveScoreEpochs } Storage.SerializeObjectToFile(p_sFileName, oData, p_bIsOverwritting=True)
def Save(self, p_sFileName): oData = { "FileFormat": "TALOS008", "Kind": self.Kind, "IDs": self.IDs, "Actual": self.ActualClasses, "Predicted": self.PredictedClasses, "PredictedProbsTop": self.PredictedProbsTop, "TopKappa": self.TopKappa, "Accuracy": self.Accuracy, "TopKAccuracy": self.TopKAccuracy, "AveragePrecision": self.AveragePrecision, "AverageRecall": self.AverageRecall, "AverageF1Score": self.AverageF1Score, "AverageSupport": self.AverageSupport #,"Top1Error" : None #,"Top5Error" : None , "ClassPrecision": self.Precision, "ClassRecall": self.Recall, "ClassF1Score": self.F1Score, "ClassSupport": self.Support, "ConfusionMatrix": self.ConfusionMatrix } Storage.SerializeObjectToFile(p_sFileName, oData)
def __saveClassesToDisk(self): oData = { "FormatVersion": "TALOS10", "ClassCodes": self.ClassCodes, "ClassDescr": self.ClassDescr, "ClassFoldersTrain": self.Train.ClassFolders, "ClassFoldersVal": self.Validation.ClassFolders, "ClassFoldersTest": self.Testing.ClassFolders, "ClassSamplesAvailableTrain": self.Train.ClassSamplesAvailable, "ClassSamplesAvailableVal": self.Validation.ClassSamplesAvailable, "ClassSamplesAvailableTest": self.Testing.ClassSamplesAvailable, "HasTrain": self.Train.IsActive, "HasVal": self.Validation.IsActive, "HasTest": self.Testing.IsActive, "CaltechClassDescr": self.CaltechClassDescr, "ImageNetClassID": self.ImageNetClassID, "ImageNetClassCodes": self.ImageNetClassCodes, "ImageNetClassDescr": self.ImageNetClassDescr, "TrainSamplesPerClass": self.TrainSamplesPerClass, "PageSize": self.PageSize } Storage.SerializeObjectToFile(self.DataSetFolder.ClassesFile, oData, p_bIsOverwritting=True)
def __writeCounter(self, p_nNumber): self.Counter["RunCounter"] = p_nNumber Storage.SerializeObjectToFile(self.CountersFileName, self.Counter, True)
def Save(self, p_nImageDimensions): oTrain = self.Train.PageIterator(self.PageSize) for nPageIndex, oPage in enumerate(oTrain): # [sPageFileName, nIDs, sSampleFiles, nTargets] sPageFileName = oPage[0] nIDs = oPage[1] sSampleFiles = oPage[2] nTargets = oPage[3] nSamples = np.zeros((len(sSampleFiles), p_nImageDimensions[0], p_nImageDimensions[1], 3), dtype=np.uint8) print("%d/%d samples:" % (nPageIndex + 1, oTrain.EstimatedPages), nSamples.shape) if not Storage.IsExistingFile(sPageFileName): for nIndex, sFileName in enumerate(sSampleFiles): #img = timg.LoadImageAndCropToSize(sFileName, p_tSize=p_nImageDimensions) #nSamples[nIndex,:,:,:]=img[:,:,:] img = timg.LoadImageAndMakeAugmentedSquare( sFileName, p_tSize=p_nImageDimensions) # Place the RGB properties in the 4th dimension of the tensor in order to be Tensorflow ready nSamples[nIndex, :, :, :] = img[0][:, :, :] oData = {"IDs": nIDs, "Samples": nSamples, "Targets": nTargets} Storage.SerializeObjectToFile( sPageFileName, oData, p_nExtraLabel="%d/%d" % (nPageIndex + 1, oTrain.EstimatedPages)) else: print(" {%d/%d} Exists %s" % (nPageIndex + 1, oTrain.EstimatedPages, sPageFileName)) oVal = self.Validation.PageIterator(self.PageSize) for nPageIndex, oPage in enumerate(oVal): # [sPageFileName, nIDs, sSampleFiles, nTargets] sPageFileName = oPage[0] nIDs = oPage[1] sSampleFiles = oPage[2] nTargets = oPage[3] nSamples = np.zeros((len(sSampleFiles), p_nImageDimensions[0], p_nImageDimensions[1], 3), dtype=np.uint8) print("%d/%d samples:" % (nPageIndex + 1, oVal.EstimatedPages), nSamples.shape) if not Storage.IsExistingFile(sPageFileName): for nIndex, sFileName in enumerate(sSampleFiles): img = timg.LoadImageAndMakeAugmentedSquare( sFileName, p_tSize=p_nImageDimensions) nSamples[nIndex, :, :, :] = img[0][:, :, :] oData = {"IDs": nIDs, "Samples": nSamples, "Targets": nTargets} Storage.SerializeObjectToFile( sPageFileName, oData, p_nExtraLabel="%d/%d" % (nPageIndex + 1, oVal.EstimatedPages)) else: print(" {%d/%d} Exists %s" % (nPageIndex + 1, oVal.EstimatedPages, sPageFileName)) oTrain = self.Testing.PageIterator(self.PageSize) for nPageIndex, oPage in enumerate(oTrain): # [sPageFileName, nIDs, sSampleFiles, nTargets] sPageFileName = oPage[0] nIDs = oPage[1] sSampleFiles = oPage[2] nTargets = oPage[3] nSamples = np.zeros((len(sSampleFiles), p_nImageDimensions[0], p_nImageDimensions[1], 3), dtype=np.uint8) print("%d/%d samples:" % (nPageIndex + 1, oTrain.EstimatedPages), nSamples.shape) if not Storage.IsExistingFile(sPageFileName): for nIndex, sFileName in enumerate(sSampleFiles): img = timg.LoadImageAndMakeAugmentedSquare( sFileName, p_tSize=p_nImageDimensions) nSamples[nIndex, :, :, :] = img[0][:, :, :] oData = {"IDs": nIDs, "Samples": nSamples, "Targets": nTargets} Storage.SerializeObjectToFile( sPageFileName, oData, p_nExtraLabel="%d/%d" % (nPageIndex + 1, oTrain.EstimatedPages)) else: print(" {%d/%d} Exists %s" % (nPageIndex + 1, oTrain.EstimatedPages, sPageFileName))