def resync(self): # Quicker version than the general purpose implementation
		result = ParameterSource.resync(self)
		for psource in self._psourceList:
			result = combineSyncResult(result, psource.resync())
		oldMaxParameters = self._maxParameters
		self._maxParameters = self.initMaxParameters()
		return (result[0], result[1], oldMaxParameters != self._maxParameters)
	def resync(self):
		(result_redo, result_disable, result_sizeChange) = ParameterSource.resync(self)
		if self.resyncEnabled() and self._dataProvider:
			# Get old and new dataset information
			old = DataProvider.loadFromFile(self.getDataPath('cache.dat')).getBlocks()
			self._dataProvider.clearCache()
			new = self._dataProvider.getBlocks()
			self._dataProvider.saveToFile(self.getDataPath('cache-new.dat'), new)

			# Use old splitting information to synchronize with new dataset infos
			jobChanges = self._dataSplitter.resyncMapping(self.getDataPath('map-new.tar'), old, new)
			if jobChanges:
				# Move current splitting to backup and use the new splitting from now on
				def backupRename(old, cur, new):
					if self._keepOld:
						os.rename(self.getDataPath(cur), self.getDataPath(old))
					os.rename(self.getDataPath(new), self.getDataPath(cur))
				backupRename(  'map-old-%d.tar' % time.time(),   'map.tar',   'map-new.tar')
				backupRename('cache-old-%d.dat' % time.time(), 'cache.dat', 'cache-new.dat')
				old_maxN = self._dataSplitter.getMaxJobs()
				self._dataSplitter.importPartitions(self.getDataPath('map.tar'))
				self._maxN = self._dataSplitter.getMaxJobs()
				result_redo.update(jobChanges[0])
				result_disable.update(jobChanges[1])
				result_sizeChange = result_sizeChange or (old_maxN != self._maxN)
			self.resyncFinished()
		return (result_redo, result_disable, result_sizeChange)
	def resync(self):
		(result_redo, result_disable, result_sizeChange) = ParameterSource.resync(self)
		(psource_redo, psource_disable, _) = self._psource.resync() # size change is irrelevant if outside of range
		for pNum in psource_redo:
			if (pNum >= self._posStart) and (pNum <= self._posEnd):
				result_redo.add(pNum - self._posStart)
		for pNum in psource_disable:
			if (pNum >= self._posStart) and (pNum <= self._posEnd):
				result_disable.add(pNum - self._posStart)
		oldPosEnd = self._posEnd
		self._posEnd = utils.QM(self._posEndUser is None, self._psource.getMaxParameters() - 1, self._posEndUser)
		return (result_redo, result_disable, result_sizeChange or (oldPosEnd != self._posEnd))
	def resync(self):
		(result_redo, result_disable, result_sizeChange) = ParameterSource.resync(self)
		if self.resyncEnabled():
			(psource_redo, psource_disable, psource_sizeChange) = self._psource.resync()
			self._pSpace = self.initPSpace()
			for pNum, pInfo in enumerate(self._pSpace):
				subNum, _ = pInfo # ignore lookupIndex
				if subNum in psource_redo:
					result_redo.add(pNum)
				if subNum in psource_disable:
					result_disable.add(pNum)
			self.resyncFinished()
		return (result_redo, result_disable, result_sizeChange or psource_sizeChange)
	def resync(self):
		oldMaxParameters = self._maxParameters
		# Perform resync of subsources
		psourceResyncList = lmap(lambda p: p.resync(), self._psourceList)
		# Update max for _translateNum
		self._psourceMaxList = lmap(lambda p: p.getMaxParameters(), self._psourceList)
		self._maxParameters = self.initMaxParameters()
		# translate affected pNums from subsources
		(result_redo, result_disable, dummy) = ParameterSource.resync(self)
		for (idx, psource_resync) in enumerate(psourceResyncList):
			(psource_redo, psource_disable, dummy) = psource_resync
			for pNum in psource_redo:
				result_redo.update(self._translateNum(idx, pNum))
			for pNum in psource_disable:
				result_disable.update(self._translateNum(idx, pNum))
		return (result_redo, result_disable, oldMaxParameters != self._maxParameters)
 def resync(self):
     (result_redo, result_disable,
      result_sizeChange) = ParameterSource.resync(self)
     if self.resyncEnabled():
         (psource_redo, psource_disable,
          psource_sizeChange) = self._psource.resync()
         self._pSpace = self.initPSpace()
         for pNum, pInfo in enumerate(self._pSpace):
             subNum, _ = pInfo  # ignore lookupIndex
             if subNum in psource_redo:
                 result_redo.add(pNum)
             if subNum in psource_disable:
                 result_disable.add(pNum)
         self.resyncFinished()
     return (result_redo, result_disable, result_sizeChange
             or psource_sizeChange)
    def resync(self):
        (result_redo, result_disable,
         result_sizeChange) = ParameterSource.resync(self)
        if self.resyncEnabled() and self._dataProvider:
            # Get old and new dataset information
            old = DataProvider.loadFromFile(
                self.getDataPath('cache.dat')).getBlocks()
            self._dataProvider.clearCache()
            new = self._dataProvider.getBlocks()
            self._dataProvider.saveToFile(self.getDataPath('cache-new.dat'),
                                          new)

            # Use old splitting information to synchronize with new dataset infos
            jobChanges = self._dataSplitter.resyncMapping(
                self.getDataPath('map-new.tar'), old, new)
            if jobChanges:
                # Move current splitting to backup and use the new splitting from now on
                def backupRename(old, cur, new):
                    if self._keepOld:
                        os.rename(self.getDataPath(cur), self.getDataPath(old))
                    os.rename(self.getDataPath(new), self.getDataPath(cur))

                backupRename('map-old-%d.tar' % time.time(), 'map.tar',
                             'map-new.tar')
                backupRename('cache-old-%d.dat' % time.time(), 'cache.dat',
                             'cache-new.dat')
                old_maxN = self._dataSplitter.getMaxJobs()
                self._dataSplitter.importPartitions(
                    self.getDataPath('map.tar'))
                self._maxN = self._dataSplitter.getMaxJobs()
                result_redo.update(jobChanges[0])
                result_disable.update(jobChanges[1])
                result_sizeChange = result_sizeChange or (old_maxN !=
                                                          self._maxN)
            self.resyncFinished()
        return (result_redo, result_disable, result_sizeChange)