def _get_partition_specs(self, uniqueattrs): if isinstance(self.cvtype, float): n = int(self.cvtype * len(uniqueattrs)) else: n = self.cvtype if self.count is None \ or self.selection_strategy != 'random' \ or self.count >= support.ncombinations(len(uniqueattrs), n): # all combinations were requested so no need for # randomization combs = support.xunique_combinations(uniqueattrs, n) else: # due to selection_strategy=random they would be also # reshuffled by super class later on but that should be ok combs = support.xrandom_unique_combinations( uniqueattrs, n, self.count) if self.count is None or self.selection_strategy != 'random': # we are doomed to return all of them return [(None, i) for i in combs] else: # It makes sense to limit number of returned combinations # right away return [(None, i) for ind, i in enumerate(combs) if ind < self.count]
def _get_partition_specs(self, uniqueattrs): if isinstance(self.cvtype, float): n = int(self.cvtype * len(uniqueattrs)) else: n = self.cvtype if self.count is None \ or self.selection_strategy != 'random' \ or self.count >= support.ncombinations(len(uniqueattrs), n): # all combinations were requested so no need for # randomization combs = support.xunique_combinations(uniqueattrs, n) else: # due to selection_strategy=random they would be also # reshuffled by super class later on but that should be ok combs = support.xrandom_unique_combinations(uniqueattrs, n, self.count) if self.count is None or self.selection_strategy != 'random': # we are doomed to return all of them return [(None, i) for i in combs] else: # It makes sense to limit number of returned combinations # right away return [(None, i) for ind, i in enumerate(combs) if ind < self.count]
def generate(self, ds): orig_partitioning = ds.sa[self._partitions_attr].value.copy() targets = ds.sa[self._targets_attr].value testing_part = orig_partitioning == self._partitions_keep nontesting_part = np.logical_not(testing_part) utargets = np.unique(targets[testing_part]) for combination in xunique_combinations(utargets, self._k): partitioning = orig_partitioning.copy() combination_matches = [ t in combination for t in targets ] combination_nonmatches = np.logical_not(combination_matches) partitioning[np.logical_and(testing_part, combination_nonmatches)] = self._partition_assign partitioning[np.logical_and(nontesting_part, combination_matches)] = self._partition_assign pds = ds.copy(deep=False) pds.sa[self.space] = partitioning yield pds
def generate(self, ds): orig_partitioning = ds.sa[self.partitions_attr].value.copy() targets = ds.sa[self.targets_attr].value testing_part = orig_partitioning == self.partitions_keep nontesting_part = np.logical_not(testing_part) utargets = np.unique(targets[testing_part]) for combination in support.xunique_combinations(utargets, self.k): partitioning = orig_partitioning.copy() combination_matches = [t in combination for t in targets] combination_nonmatches = np.logical_not(combination_matches) partitioning[np.logical_and(testing_part, combination_nonmatches)] \ = self.partition_assign partitioning[np.logical_and(nontesting_part, combination_matches)] \ = self.partition_assign pds = ds.copy(deep=False) pds.sa[self.space] = partitioning yield pds
def _get_partition_specs(self, uniqueattrs): return [(None, i) for i in \ support.xunique_combinations(uniqueattrs, self.__cvtype)]