def asynchronousFinalize(self, batch): """ Method updating all global estimators with the contributions of the new batch. """ # Postprocess on finished batches for level in range(len(self.batchIndices[batch])): # update global estimators mda.updateGlobalMomentEstimator_Task( self.indices[level].qoiEstimator, self.batchIndices[batch][level].qoiEstimator, self.indices[level].costEstimator, self.batchIndices[batch][level].costEstimator, batch, ) # delete COMPSs future objects no longer needed delete_object( self.batchIndices[batch][level].costEstimator, *self.batchIndices[batch][level].qoiEstimator, ) # Update model coefficients for cost, bias, variance with new observations self.updatePredictors() for level in range(len(self.indices)): # synchronize estimator needed for checking convergence and updating hierarchy self.indices[level].qoiEstimator = get_value_from_remote( self.indices[level].qoiEstimator) self.indices[level].costEstimator = get_value_from_remote( self.indices[level].costEstimator)
def estimation(self, assemblerCoordinates=None): """ For an entry of assembler, collect estimations from all indices corresponding to the same entry of estimatorsForAssembler. Then, pass this list to the assembleEstimation method of that entry of assembler. """ # If nothing is specified, assemble all estimations if assemblerCoordinates is None: assemblerCoordinates = range(len(self.assemblers)) # Extract current hierarchy from list of indices hierarchy = self.hierarchy() ## Get list of estimations for assemblers without duplicate calls # List of which assembler needs what estimations args = [self.estimatorsForAssembler[c] for c in assemblerCoordinates] # Convert nested list to nested tuple mapArg = [((i, j), (v[0], tuple(v[1]))) for i, a in enumerate(args) for j, v in enumerate(a)] # Create dictionary of {argument: [coord1, ...], ...} # such that args[coord1[0]][coord1[1]] = argument argMap = defaultdict(list) for t in mapArg: argMap[t[1]].append(t[0]) # Initialise and fill list of estimations for assemblers estimations = [[[] for _ in a] for a in args] for estArgs, coords in argMap.items(): # Compute this unique estimation est = self.indexEstimation(*estArgs) # Distribute it wherever is appeared in args for c in coords: estimations[c[0]][c[1]] = est # Run the corresponding estimation methods on this globalEstimations = [] to_delete = [] # Iterate over couples (coord,estimation) for c, e in zip(assemblerCoordinates, estimations): ge = self.assemblers[c].assembleEstimation(hierarchy, e) globalEstimations.append(ge) if not e in to_delete: to_delete.append(e) # Delete COMPSs objects # Flatten list of depth 2 then unpack delete_object(*chain.from_iterable(hierarchy), *to_delete) return globalEstimations
def test_delete_object(): init() f = func1(10, 20, keep=True) delete_object(f) with pytest.raises(ExaquteException): delete_object(f) f = func1(10, 20) with pytest.raises(ExaquteException): delete_object(f)
def update(self, newIndexAndSampleNumbers): """ Update the Monte Carlo index to a new number of samples. First, decide the number of new samples to be generated, then generate each sample and the associated cost with newSample and pass them to the update methods of qoiEstimator and costEstimator. """ # Compute number of new samples based on areSamplesRecycled # TODO Minimum number of new samples hard coded here to 6, since # program not planned for moment > 4 estimation. Accept/infer this # value later based on max(qoiEstimator[i].order) if self.areSamplesRecycled is True: number_new_samples = max( 5, newIndexAndSampleNumbers[1] - self.sampleNumber()) else: number_new_samples = newIndexAndSampleNumbers[1] for i in range(len(self.qoiEstimator)): self.qoiEstimator[i].reset() self.costEstimator.reset() ### Drawing samples # Generate the required number of correlated samples # and estimate cost samples = [] times = [] for _ in range(number_new_samples): # Generate a new sample (list of solver outputs for one random event) # See the documentation of method newSample for the data structure. new_sample, new_time = self.newSample() # append to corresponding list samples.append(new_sample) times.append(new_time) # Note on data structure of samples # --------------------------------- # WARNING: This is adapted from the documentation of SampleGenerator and # may be outdated. It is recommended that you use the documentation instead. # Let us note S = samples, S_j = samples[j], etc.. S is a nested lists of depth > 2. # len(S) == number_new_samples; len(S_j) == MonteCarloIndex.numberOfSolvers(). # Elements of S_jk may be future objects. # If solver outputs are not split, (i.e. not MonteCarloIndex.areSamplesSplit()), # len(S_jk) == MonteCarloIndex.sampleDimension(). # Example, for any event i. If MonteCarloIndex.numberOfSolvers() == 2 and # SampleGenerator.sampleDimension() == 3: # S_i1 == [s1_o1, s1_o2, s1_o3] (== [future, future, future] if parallel) # (s: solver, o: output); idem for S_i2. # If solver outputs are split, (i.e. MonteCarloIndex.areSamplesSplit()), # S_ij is of length len(MonteCarloIndex.sampleSplitSizes()) # and S_ijk is of length MonteCarloIndex.sampleSplitSizes()[k]. # Example, for any event i. If MonteCarloIndex.numberOfSolvers() == 2 and # MonteCarloIndex.sampleSplitSizes() == [3, 2]: # S_i1 == [ S_(1,1), S_(1,2) ] == [ [s1_o1, s1_o2, s1_o3], [s1_o4, s1_o5] ] # == [future, future] if parallel # (s: solver, o: output); idem for solver S_i2. ### Estimator update # Get the multi-indices to the subsets of estimators, samples and times # that will be used to update the estimators indexEst, indexSamp, indexTime = self._updateSubsets( number_new_samples) # Iterator over the 'solver' dimension of the sample and time arrays # TODO Idea: include this in multi-indices returned by _updateSubsets? solverRg = range(self.numberOfSolvers()) ## Case: solver outputs are not split if not self.areSamplesSplit(): # Iterate over subsets of estimators # For the moment, we actually iterate over estimators (subsets are singletons) for g, iE in enumerate(indexEst): # Update for self.qoiEstimators # Iterate over subsets of events for indexS in indexSamp[g]: # Assemble subset of samples # TODO When MomentEstimator is updated to specs, one level of nesting will # have to be added above solver level: each element of sampleGroup is to # be a list of sample components; as of now, it is just a single component. sampleGroup = [[samples[i][j][k] for j in solverRg] for i, k in indexS] # Update estimator with sample subset self.qoiEstimator[iE].update(sampleGroup) # Update self.costEstimator # Iterate over subsets of events for indexT in indexTime: # Assemble subset of samples # in bidimensional array expected by MomentEstimator.update # TODO There must be a better way to handle this timeGroup = [[summation_Task(*times[i])] for i in indexT] # Update estimator with sample subset self.costEstimator.update(timeGroup) return # What follows is executed only if self.areSamplesSplit() ## Case: solver outputs are split # Iterate over splits of solver outputs for g, iE in enumerate(indexEst): # Assemble subset of estimators for current split estimatorGroup = [self.qoiEstimator[ie] for ie in iE] # Update for self.qoiEstimators # Iterate over subsets of events for indexS in indexSamp[g]: # Assemble subset of samples sampleGroup = [[samples[i][j][k] for j in solverRg] for i, k in indexS] # Update subset of estimators mdu.updatePartialQoiEstimators_Task(estimatorGroup, sampleGroup) # Delete future objects no longer needed # Pass an iterable over *future* objects (e.g. no list of futures) # therefore, flatten list of depth 2 delete_object(*it.chain.from_iterable(sampleGroup)) # Re-assign estimators from updated subset mdu.assignByIndex_Task(self.qoiEstimator, estimatorGroup, iE) # Delete future objects no longer needed delete_object(*estimatorGroup) # Update self.costEstimator # Iterate over subsets of events for indexT in indexTime: # Assemble subset of samples # as list expect by updateCostEstimator_Task timeGroup = [times[i] for i in indexT] # Update cost estimator with sample subset # TODO Unnecessary, we should use MomentEstimator.update mdu.updateCostEstimator_Task(self.costEstimator, timeGroup) # delete future objects no longer needed delete_object(*it.chain.from_iterable(timeGroup))