def iterator(): def one_epoch(): i_data = list(map(lambda x: more_itertools.chunked(x, batch_size), data)) return zip(*i_data) for i ,m in enumerate(more_itertools.repeatfunc(one_epoch, times=1)): for t in m: yield t
def from_counts(cls: Type[_T], counts: Counter[CardType] = None) -> _T: if counts is None: counts = STANDARD_DECK_COUNTS cards = list( itt.chain.from_iterable( mitt.repeatfunc(card_type.card_class, count) for card_type, count in counts.items())) random.shuffle(cards) return cls.from_cards(cards)
class DeckCases: @pytest_cases.case() def case_empty_deck(self): return Deck.from_counts(Counter()) @pytest_cases.case() @pytest_cases.parametrize(counts=mitt.repeatfunc(random_card_counts, 5)) def case_random_deck(self, counts) -> Deck: return Deck.from_counts(counts) @pytest_cases.case() def case_full_deck(self): return Deck.from_counts(STANDARD_DECK_COUNTS)
def _cui_to_icu(cxuxi: Iterable[Tuple[int, BagValDict[_T, InstrState]]], instructions: int) -> List[Dict[int, _InstrPosition]]: """Convert a CxUxI utilization map to IxCxU format. `cxuxi` is the ClockxUnitxInstruction utilization map to convert. `instructions` are the total number of instructions. """ ixcxu: List[Dict[int, _InstrPosition]] = list( more_itertools.repeatfunc(dict, instructions)) for cur_cp, uxi_util in cxuxi: _fill_cp_util(cur_cp, uxi_util.items(), ixcxu) return ixcxu
def process_forever(self, timeout=0.2): """Run an infinite loop, processing data from connections. This method repeatedly calls process_once. Arguments: timeout -- Parameter to pass to process_once. """ # This loop should specifically *not* be mutex-locked. # Otherwise no other thread would ever be able to change # the shared state of a Reactor object running this function. log.debug("process_forever(timeout=%s)", timeout) one = functools.partial(self.process_once, timeout=timeout) consume(repeatfunc(one))
def start_and_wait_for_jobs(processes, descriptions): assert len(processes) == len(descriptions) jobs = [] processes_descriptions = list(zip(processes, descriptions)) for i, (process, description) in enumerate(processes_descriptions): logging.info('Starting %s', description) jobs.append(process.run_bg()) if i != len(processes_descriptions) - 1: time.sleep(1) for running_jobs in more_itertools.repeatfunc(sum, None, (not j.ready() for j in jobs)): if running_jobs == 0: break logging.info('Still running ... {running_jobs} of {total_jobs}'.format( running_jobs=running_jobs, total_jobs=len(jobs))) time.sleep(60) logging.info('Done')
def gen(): nums = list(range(len(self.lines_a))) # 無限ジェネレーター iter = repeatfunc(random.sample, None, nums, len(nums)) for num_list in iter: for i1 in num_list: # input テキストのトークナイズ input_text = self.lines_q[i1] input_token_dic = self.tokenizer(input_text, truncation=True, max_length=512, return_tensors="tf") if len(input_token_dic['input_ids'] ) > self.config.MAX_LENGTH: input_token_dic['input_ids'] = input_token_dic[ 'input_ids'][:, -self.config.MAX_LENGTH:] input_token_dic['attention_mask'] = input_token_dic[ 'attention_mask'][:, -self.config.MAX_LENGTH:] input_token_dic['token_type_ids'] = input_token_dic[ 'token_type_ids'][:, -self.config.MAX_LENGTH:] else: input_token_dic = self.tokenizer( input_text, padding='max_length', truncation=True, max_length=self.config.MAX_LENGTH, return_tensors="tf") toot = KiriDataset.STR + self.lines_a[i1] idxs = np.zeros((self.config.MAX_CHAR_LEN + 1, ), dtype=int) for i2 in range( min([self.config.MAX_CHAR_LEN + 1, len(toot)])): if toot[i2] in self.char_idx: idxs[i2] = self.char_idx[toot[i2]] else: idxs[i2] = self.char_idx[KiriDataset.UNK] yield ((input_token_dic['input_ids'][0], input_token_dic['attention_mask'][0], idxs[:-1]), idxs[1:])
def test_finite_repeat(self): """ensure limited repeat when times is provided""" r = mi.repeatfunc(lambda: 5, times=5) self.assertEqual([5, 5, 5, 5, 5], list(r))
def test_null_times(self): """repeat 0 should return an empty iterator""" r = mi.repeatfunc(range, 0, 3) self.assertRaises(StopIteration, lambda: next(r))
def test_added_arguments(self): """ensure arguments are applied to the function""" r = mi.repeatfunc(lambda x: x, 2, 3) self.assertEqual([3, 3], list(r))
def test_simple_repeat(self): """tests simple repeated functions""" r = mi.repeatfunc(lambda: 5) self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
def test_simple_repeat(self): """test simple repeated functions""" r = mi.repeatfunc(lambda: 5) self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
from tally import Tally from more_itertools import repeatfunc, first from itertools import islice import silly import random t = Tally() def print_results(t): for i, namevotes in enumerate(islice(t.descending(), 0, 3)): name, votes = namevotes print("#{} is {} with {} votes. ".format(i + 1, name, votes), end='\t') print() an_infinite_stream_of_random_names = repeatfunc(silly.firstname) for num in an_infinite_stream_of_random_names: t.tally(num) print_results(t) if random.random() < .01: name, votes = first(t.descending()) t.remove(name) print('OH NO!!! {} JUST GOT BLUE-SHELLED'.format(name))
import pytest # noqa import pytest_cases # noqa # from ... import * imports are needed because of how fixtures are generated; # see pytest-cases#174 import loveletter.cardpile import loveletter.cards as cards from loveletter.cardpile import CardPile, Deck, STANDARD_DECK_COUNTS # noqa from loveletter.cards import CardType # noqa from test_loveletter.unit.test_cardpile_cases import * from test_loveletter.unit.test_cards_cases import * from test_loveletter.utils import collect_subclasses, random_card_counts @pytest_cases.parametrize( counts=mitt.repeatfunc(random_card_counts, 5), pile_class=collect_subclasses(CardPile, loveletter.cardpile), ) def test_pileFromCounts_counts_hasCorrectCards(pile_class, counts): pile = pile_class.from_counts(counts) empiric_counts = Counter(map(CardType, pile)) assert empiric_counts == counts assert pile.get_counts() == empiric_counts def test_deckFromCounts_default_isStandardDeck(): deck = Deck.from_counts() assert Counter(map(CardType, deck)) == STANDARD_DECK_COUNTS def test_deck_containsSetAside():