def parallelize(self, nbJobs=-1, tempFolder=None): """ Parallelize the coordinator Parameters ---------- nbJobs : int {>0, -1} (default : -1) The parallelization factor. If "-1", the maximum factor is used tempFolder : filepath (default : None) The temporary folder used for memmap. If none, some default folder will be use (see the :lib:`joblib` library) """ self._exec = ParallelExecutor(nbJobs, self.getLogger(), self.verbosity, tempFolder)
def __init__(self, logger=None, verbosity=None, dtype=np.float32, labeltype=np.uint8): Progressable.__init__(self, logger, verbosity) self._exec = SerialExecutor(logger, verbosity) self._dtype = dtype self._labeltype = labeltype if dtype is np.float: self._rescaler = Rescaler() else: self._rescaler = MaxoutRescaler(dtype)
class Coordinator(Progressable): """ =========== Coordinator =========== :class:`Coordinator` are responsible for applying a feature extraction mechanism to all the data contained in a imageBuffer and keeping the consistency if it creates several feature vectors for one image. The extraction mechanism is class dependent. It is the class responsability to document its policy. """ __metaclass__ = ABCMeta def __init__(self, logger=None, verbosity=None, dtype=np.float32, labeltype=np.uint8): Progressable.__init__(self, logger, verbosity) self._exec = SerialExecutor(logger, verbosity) self._dtype = dtype self._labeltype = labeltype if dtype is np.float: self._rescaler = Rescaler() else: self._rescaler = MaxoutRescaler(dtype) def parallelize(self, nbJobs=-1, tempFolder=None): """ Parallelize the coordinator Parameters ---------- nbJobs : int {>0, -1} (default : -1) The parallelization factor. If "-1", the maximum factor is used tempFolder : filepath (default : None) The temporary folder used for memmap. If none, some default folder will be use (see the :lib:`joblib` library) """ self._exec = ParallelExecutor(nbJobs, self.getLogger(), self.verbosity, tempFolder) def process(self, imageBuffer, learningPhase=True): """ Extracts the feature vectors for the images contained in the :class:`ImageBuffer` Abstract method to overload. Parameters ---------- imageBuffer : :class:`ImageBuffer` The data to process learningPhase : bool (default : True) Specifies whether it is the learning phase. For some :class:`Coordinator`, this is not important but it might be for the stateful ones Return ------ X : a numpy 2D array the N x M feature matrix. Each of the N rows correspond to an object and each of the M columns correspond to a variable y : an iterable of int the N labels corresponding to the N objects of X Note ---- The method might provide several feature vectors per original image. It ensures the consistency with the labels and is explicit about the mapping. Implementation -------------- The method :meth:`process` only "schedule" the work. The implementation of what is to be done is the responbility of the method :meth:`_onProcess`. It is this method that should be overloaded """ self._nbColors = imageBuffer.nbBands() self.logMsg("Allocating the memory...", 35) nbFeatures = self.nbFeaturesPerObject(self._nbColors) nbObjs = self.nbObjects(imageBuffer) X = self._exec.createArray((nbObjs, nbFeatures), self._dtype) y = self._exec.createArray((nbObjs), self._labeltype) self.logMsg("X shape : "+str(X.shape), 35) self.logMsg("X dtype : "+str(X.dtype), 35) self.logSize("X total size : ", (X.size*X.itemsize), 35) self._exec.executeWithStart("Extracting features", self._onProcess, imageBuffer, learningPhase=learningPhase, XResult=X, yResult=y) return X, y @abstractmethod def _onProcess(self, imageBuffer, startIndex, learningPhase, XResult, yResult): """ Extracts the feature vectors for the images contained in the :class:`ImageBuffer` Abstract method to overload. Parameters ---------- imageBuffer : :class:`ImageBuffer` The data to process learningPhase : bool (default : True) Specifies whether it is the learning phase. For some :class:`Coordinator`, this is not important but it might be for the stateful ones Return ------ X : a numpy 2D array the N x M feature matrix. Each of the N rows correspond to an object and each of the M columns correspond to a variable y : an iterable of int the N labels corresponding to the N objects of X Note ---- The method might provide several feature vectors per original image. It ensures the consistency with the labels and is explicit about the mapping. """ pass def __call__(self, imageBuffer, learningPhase): """Delegate to :meth:`process`""" return self.process(imageBuffer, learningPhase) def clean(self, *args): self.setTask(1, "Cleaning up") for resource in args: self._exec.clean(resource) self.endTask() @abstractmethod def nbFeaturesPerObject(self, nbColors=1): """ Return the number of features that this :class:`Coordinator` will produce per object """ pass def nbObjects(self, imageBuffer): """ Return the number of objects that this :class:`Coordinator` will produce """ return len(imageBuffer) def getLogger(self): """ Return ------ logger : :class:`Logger` The internal logger (might be None) """ return self._logger
class Coordinator(Progressable): """ =========== Coordinator =========== :class:`Coordinator` are responsible for applying a feature extraction mechanism to all the data contained in a imageBuffer and keeping the consistency if it creates several feature vectors for one image. The extraction mechanism is class dependent. It is the class responsability to document its policy. """ __metaclass__ = ABCMeta def __init__(self, logger=None, verbosity=None, dtype=np.float32, labeltype=np.uint8): Progressable.__init__(self, logger, verbosity) self._exec = SerialExecutor(logger, verbosity) self._dtype = dtype self._labeltype = labeltype if dtype is np.float: self._rescaler = Rescaler() else: self._rescaler = MaxoutRescaler(dtype) def parallelize(self, nbJobs=-1, tempFolder=None): """ Parallelize the coordinator Parameters ---------- nbJobs : int {>0, -1} (default : -1) The parallelization factor. If "-1", the maximum factor is used tempFolder : filepath (default : None) The temporary folder used for memmap. If none, some default folder will be use (see the :lib:`joblib` library) """ self._exec = ParallelExecutor(nbJobs, self.getLogger(), self.verbosity, tempFolder) def process(self, imageBuffer, learningPhase=True): """ Extracts the feature vectors for the images contained in the :class:`ImageBuffer` Abstract method to overload. Parameters ---------- imageBuffer : :class:`ImageBuffer` The data to process learningPhase : bool (default : True) Specifies whether it is the learning phase. For some :class:`Coordinator`, this is not important but it might be for the stateful ones Return ------ X : a numpy 2D array the N x M feature matrix. Each of the N rows correspond to an object and each of the M columns correspond to a variable y : an iterable of int the N labels corresponding to the N objects of X Note ---- The method might provide several feature vectors per original image. It ensures the consistency with the labels and is explicit about the mapping. Implementation -------------- The method :meth:`process` only "schedule" the work. The implementation of what is to be done is the responbility of the method :meth:`_onProcess`. It is this method that should be overloaded """ self._nbColors = imageBuffer.nbBands() self.logMsg("Allocating the memory...", 35) nbFeatures = self.nbFeaturesPerObject(self._nbColors) nbObjs = self.nbObjects(imageBuffer) X = self._exec.createArray((nbObjs, nbFeatures), self._dtype) y = self._exec.createArray((nbObjs), self._labeltype) self.logMsg("X shape : " + str(X.shape), 35) self.logMsg("X dtype : " + str(X.dtype), 35) self.logSize("X total size : ", (X.size * X.itemsize), 35) self._exec.executeWithStart("Extracting features", self._onProcess, imageBuffer, learningPhase=learningPhase, XResult=X, yResult=y) return X, y @abstractmethod def _onProcess(self, imageBuffer, startIndex, learningPhase, XResult, yResult): """ Extracts the feature vectors for the images contained in the :class:`ImageBuffer` Abstract method to overload. Parameters ---------- imageBuffer : :class:`ImageBuffer` The data to process learningPhase : bool (default : True) Specifies whether it is the learning phase. For some :class:`Coordinator`, this is not important but it might be for the stateful ones Return ------ X : a numpy 2D array the N x M feature matrix. Each of the N rows correspond to an object and each of the M columns correspond to a variable y : an iterable of int the N labels corresponding to the N objects of X Note ---- The method might provide several feature vectors per original image. It ensures the consistency with the labels and is explicit about the mapping. """ pass def __call__(self, imageBuffer, learningPhase): """Delegate to :meth:`process`""" return self.process(imageBuffer, learningPhase) def clean(self, *args): self.setTask(1, "Cleaning up") for resource in args: self._exec.clean(resource) self.endTask() @abstractmethod def nbFeaturesPerObject(self, nbColors=1): """ Return the number of features that this :class:`Coordinator` will produce per object """ pass def nbObjects(self, imageBuffer): """ Return the number of objects that this :class:`Coordinator` will produce """ return len(imageBuffer) def getLogger(self): """ Return ------ logger : :class:`Logger` The internal logger (might be None) """ return self._logger