def CompressModels(self, p_nEpochNumbers): sUID = self.ParentExperiment.MinuteUID.UID for nEpochToCompress in p_nEpochNumbers: sModelFolder = self.ModelFolderTemplate % nEpochToCompress bContinueToDelete, sArchiveName = Storage.CompressFolder( sModelFolder, "model_%s_epoch_%.3d.zip" % (sUID, nEpochToCompress)) if bContinueToDelete: bContinueToDelete = Storage.IsExistingFile(sArchiveName) if bContinueToDelete: self.DeleteSavedModel(nEpochToCompress)
def __loadClassesFromDisk(self): bResult = Storage.IsExistingFile(self.DataSetFolder.ClassesFile) if bResult: oData = Storage.DeserializeObjectFromFile( self.DataSetFolder.ClassesFile) self.ClassCodes = oData["ClassCodes"] self.ClassDescr = oData["ClassDescr"] self.ClassCount = len(self.ClassCodes) assert len( self.ClassDescr ) == self.ClassCount, "incorrect count of class descriptions %d" % len( self.ClassDescr) self.Train.ClassFolders = oData["ClassFoldersTrain"] self.Validation.ClassFolders = oData["ClassFoldersVal"] self.Testing.ClassFolders = oData["ClassFoldersTest"] self.Train.ClassSamplesAvailable = oData[ "ClassSamplesAvailableTrain"] self.Validation.ClassSamplesAvailable = oData[ "ClassSamplesAvailableVal"] self.Testing.ClassSamplesAvailable = oData[ "ClassSamplesAvailableTest"] self.Train.IsActive = oData["HasTrain"] self.Validation.IsActive = oData["HasVal"] self.Testing.IsActive = oData["HasTest"] self.CaltechClassDescr = oData["CaltechClassDescr"] self.ImageNetClassID = oData["ImageNetClassID"] self.ImageNetClassCodes = oData["ImageNetClassCodes"] self.ImageNetClassDescr = oData["ImageNetClassDescr"] self.TrainSamplesPerClass = oData["TrainSamplesPerClass"] self.PageSize = oData["PageSize"] self.Log.Print(" |__ Classes: %d" % self.ClassCount) else: raise Exception("No dataset found under %s" % self.DataSetFolder.BaseFolder) return bResult
def IsExistingModelResults(self): #print(self.ExperimentSub.ModelResultsFileNameTemplate % self.CurrentModelEpochNumber) return Storage.IsExistingFile( self.ExperimentSub.ModelResultsFileNameTemplate % self.CurrentModelEpochNumber)
def Save(self, p_nImageDimensions): oTrain = self.Train.PageIterator(self.PageSize) for nPageIndex, oPage in enumerate(oTrain): # [sPageFileName, nIDs, sSampleFiles, nTargets] sPageFileName = oPage[0] nIDs = oPage[1] sSampleFiles = oPage[2] nTargets = oPage[3] nSamples = np.zeros((len(sSampleFiles), p_nImageDimensions[0], p_nImageDimensions[1], 3), dtype=np.uint8) print("%d/%d samples:" % (nPageIndex + 1, oTrain.EstimatedPages), nSamples.shape) if not Storage.IsExistingFile(sPageFileName): for nIndex, sFileName in enumerate(sSampleFiles): #img = timg.LoadImageAndCropToSize(sFileName, p_tSize=p_nImageDimensions) #nSamples[nIndex,:,:,:]=img[:,:,:] img = timg.LoadImageAndMakeAugmentedSquare( sFileName, p_tSize=p_nImageDimensions) # Place the RGB properties in the 4th dimension of the tensor in order to be Tensorflow ready nSamples[nIndex, :, :, :] = img[0][:, :, :] oData = {"IDs": nIDs, "Samples": nSamples, "Targets": nTargets} Storage.SerializeObjectToFile( sPageFileName, oData, p_nExtraLabel="%d/%d" % (nPageIndex + 1, oTrain.EstimatedPages)) else: print(" {%d/%d} Exists %s" % (nPageIndex + 1, oTrain.EstimatedPages, sPageFileName)) oVal = self.Validation.PageIterator(self.PageSize) for nPageIndex, oPage in enumerate(oVal): # [sPageFileName, nIDs, sSampleFiles, nTargets] sPageFileName = oPage[0] nIDs = oPage[1] sSampleFiles = oPage[2] nTargets = oPage[3] nSamples = np.zeros((len(sSampleFiles), p_nImageDimensions[0], p_nImageDimensions[1], 3), dtype=np.uint8) print("%d/%d samples:" % (nPageIndex + 1, oVal.EstimatedPages), nSamples.shape) if not Storage.IsExistingFile(sPageFileName): for nIndex, sFileName in enumerate(sSampleFiles): img = timg.LoadImageAndMakeAugmentedSquare( sFileName, p_tSize=p_nImageDimensions) nSamples[nIndex, :, :, :] = img[0][:, :, :] oData = {"IDs": nIDs, "Samples": nSamples, "Targets": nTargets} Storage.SerializeObjectToFile( sPageFileName, oData, p_nExtraLabel="%d/%d" % (nPageIndex + 1, oVal.EstimatedPages)) else: print(" {%d/%d} Exists %s" % (nPageIndex + 1, oVal.EstimatedPages, sPageFileName)) oTrain = self.Testing.PageIterator(self.PageSize) for nPageIndex, oPage in enumerate(oTrain): # [sPageFileName, nIDs, sSampleFiles, nTargets] sPageFileName = oPage[0] nIDs = oPage[1] sSampleFiles = oPage[2] nTargets = oPage[3] nSamples = np.zeros((len(sSampleFiles), p_nImageDimensions[0], p_nImageDimensions[1], 3), dtype=np.uint8) print("%d/%d samples:" % (nPageIndex + 1, oTrain.EstimatedPages), nSamples.shape) if not Storage.IsExistingFile(sPageFileName): for nIndex, sFileName in enumerate(sSampleFiles): img = timg.LoadImageAndMakeAugmentedSquare( sFileName, p_tSize=p_nImageDimensions) nSamples[nIndex, :, :, :] = img[0][:, :, :] oData = {"IDs": nIDs, "Samples": nSamples, "Targets": nTargets} Storage.SerializeObjectToFile( sPageFileName, oData, p_nExtraLabel="%d/%d" % (nPageIndex + 1, oTrain.EstimatedPages)) else: print(" {%d/%d} Exists %s" % (nPageIndex + 1, oTrain.EstimatedPages, sPageFileName))