def test_stroke(keys, expected): if inspect.isclass(expected): with pytest.raises(expected): Stroke(keys) else: steno_keys, rtfcre = expected stroke = Stroke(keys) assert stroke.steno_keys == steno_keys assert stroke.rtfcre == rtfcre
def test_enable_stroke_logging(): sf = stroke_filename('/fn') log.set_stroke_filename(sf) log.stroke(Stroke(('S-', ))) log.enable_stroke_logging(True) log.stroke(Stroke(('T-', ))) log.enable_stroke_logging(False) log.stroke(Stroke(('K-', ))) assert FakeHandler.outputs == {sf: ["Stroke(T : ['T-'])"]}
def setup(system_name): system_symbols = {} system_mod = registry.get_plugin('system', system_name).obj for symbol, init in _EXPORTS.items(): system_symbols[symbol] = init(system_mod) system_symbols['NAME'] = system_name globals().update(system_symbols) Stroke.setup(KEYS, IMPLICIT_HYPHEN_KEYS, NUMBER_KEY, NUMBERS, UNDO_STROKE_STENO)
def test_enable_stroke_logging(self): stroke_filename = self._stroke_filename('/fn') self.logger.set_stroke_filename(stroke_filename) self.logger.stroke(Stroke(('S-', ))) self.logger.enable_stroke_logging(True) self.logger.stroke(Stroke(('T-', ))) self.logger.enable_stroke_logging(False) self.logger.stroke(Stroke(('K-', ))) self.assertEqual(FakeHandler.get_output(), {stroke_filename: ["Stroke(T : ['T-'])"]})
def test_stroke(): sf = stroke_filename('/fn') log.set_stroke_filename(sf) log.enable_stroke_logging(True) log.stroke(Stroke(('S-', '-T', 'T-'))) log.stroke(Stroke(('#', 'S-', '-T'))) assert FakeHandler.outputs == { sf: ["Stroke(ST-T : ['S-', 'T-', '-T'])", "Stroke(1-9 : ['1-', '-9'])"], }
def test_stroke(self): stroke_filename = self._stroke_filename('/fn') self.logger.set_stroke_filename(stroke_filename) self.logger.enable_stroke_logging(True) self.logger.stroke(Stroke(('S-', '-T', 'T-'))) self.logger.stroke(Stroke(('#', 'S-', '-T'))) self.assertEqual( FakeHandler.get_output(), { stroke_filename: [ "Stroke(ST-T : ['S-', 'T-', '-T'])", "Stroke(1-9 : ['1-', '-9'])" ] })
def test_set_filename(): sf1 = stroke_filename('/fn1') log.set_stroke_filename('/fn1') log.enable_stroke_logging(True) log.stroke(Stroke(('S-', ))) sf2 = stroke_filename('/fn2') log.set_stroke_filename('/fn2') log.stroke(Stroke(('-T', ))) log.set_stroke_filename(None) log.stroke(Stroke(('P-', ))) assert FakeHandler.outputs == { sf1: ["Stroke(S : ['S-'])"], sf2: ["Stroke(-T : ['-T'])"], }
def test_set_filename(self): stroke_filename1 = self._stroke_filename('/fn1') self.logger.set_stroke_filename('/fn1') self.logger.enable_stroke_logging(True) self.logger.stroke(Stroke(('S-', ))) stroke_filename2 = self._stroke_filename('/fn2') self.logger.set_stroke_filename('/fn2') self.logger.stroke(Stroke(('-T', ))) self.logger.set_stroke_filename(None) self.logger.stroke(Stroke(('P-', ))) self.assertEqual( FakeHandler.get_output(), { stroke_filename1: ["Stroke(S : ['S-'])"], stroke_filename2: ["Stroke(-T : ['-T'])"] })
def _repeat_last_stroke(translations): replaced = translations[len(translations) - 1:] if len(replaced) < 1: return last_stroke = replaced[0].strokes[len(replaced[0].strokes) - 1] keys = last_stroke.steno_keys[:] return Stroke(keys)
def _lookup_affixes(self, rtfcre_seq, test_pairs, prefix=False): """ Look up variations on a stroke sequence due to prefixes and/or suffixes. rtfcre_seq is a stroke sequence in RTFCRE form. The stroke under test will not be used. test_pairs are containers of (key, removed) pairs representing stroke variations: key - Affix in key form that is contained within the final stroke. removed - RTFCRE representation of the final stroke with that affix key removed. prefix - If True, test for prefixes instead of suffixes. """ # Test variations of the last stroke for suffixes, or the first for prefixes. test_index = 0 if prefix else -1 test_seq = rtfcre_seq[:] lookup = self._dictionary.lookup for key, removed in test_pairs: # Removing the key from the test stroke must produce a valid dictionary entry. test_seq[test_index] = removed dict_key = tuple(test_seq) main_mapping = lookup(dict_key) if main_mapping is None: continue # The key itself must also produce a valid dictionary entry dict_key = (Stroke([key]).rtfcre, ) affix_mapping = lookup(dict_key) if affix_mapping is None: continue # Add the prefix or suffix where it belongs in relation to the main translation. # The formatter will look for the space and apply any necessary orthography rules. if prefix: return affix_mapping + ' ' + main_mapping else: return main_mapping + ' ' + affix_mapping
def last_stroke(translator, stroke, cmdline): # Repeat last stroke translations = translator.get_state().translations if not translations: return stroke = Stroke(translations[-1].strokes[-1].steno_keys) translator.translate_stroke(stroke)
def steno_to_stroke(steno): # Check if the system changed, or # we need to perform initial setup. if steno_to_stroke.system != system.NAME: keys = [] letters = '' has_hyphen = False for k in system.KEYS: if not has_hyphen and k.startswith('-'): has_hyphen = True keys.append(None) letters += '-' keys.append(k) letters += k.strip('-') steno_to_stroke.keys = keys steno_to_stroke.letters = letters steno_to_stroke.system = system.NAME steno_to_stroke.numbers = { v.strip('-'): k.strip('-') for k, v in system.NUMBERS.items() } n = -1 keys = set() for li, l in enumerate(steno): rl = steno_to_stroke.numbers.get(l) if rl is not None: keys.add('#') l = rl n = steno_to_stroke.letters.find(l, n + 1) if n < 0: raise ValueError('invalid steno letter at index %u:\n%s\n%s^' % (li, steno, ' ' * li)) k = steno_to_stroke.keys[n] if k is not None: keys.add(k) return Stroke(keys)
def steno_to_stroke(steno): if steno_to_stroke.system != system.NAME: keys = [] letters = '' has_hyphen = False for k in system.KEYS: if not has_hyphen and k.startswith('-'): has_hyphen = True keys.append(None) letters += '-' keys.append(k) letters += k.strip('-') steno_to_stroke.keys = keys steno_to_stroke.letters = letters steno_to_stroke.system = system.NAME steno_to_stroke.numbers = { v.strip('-'): k.strip('-') for k, v in system.NUMBERS.items() } n = -1 keys = set() for l in steno: rl = steno_to_stroke.numbers.get(l) if rl is not None: keys.add('#') l = rl n = steno_to_stroke.letters.find(l, n + 1) assert n >= 0, (steno_to_stroke.letters, l, n) k = steno_to_stroke.keys[n] if k is not None: keys.add(k) return Stroke(keys)
def OnInit(self): StrokeDisplayDialog.display(None, fake_config()) #self.SetTopWindow(dlg) import random from plover.steno import Stroke keys = system.KEY_ORDER.keys() for i in range(100): num = random.randint(1, len(keys)) StrokeDisplayDialog.stroke_handler(Stroke(random.sample(keys, num))) return True
def test_force_lowercase_title(self): self.dictionary.set(('T-LT', ), '{MODE:TITLE}') self.dictionary.set(('TEFT', ), '{>}test') for keys in ( ('T-', '-L', '-T'), ('T-', '-E', '-F', '-T'), ): stroke = Stroke(keys) self.translator.translate(stroke) self.assertEqual(self.output.text, u' test')
def _lookup(self, strokes, suffixes=()): dict_key = tuple(s.rtfcre for s in strokes) result = self._dictionary.lookup(dict_key) if result != None: return result for key in suffixes: if key in strokes[-1].steno_keys: dict_key = (Stroke([key]).rtfcre, ) suffix_mapping = self._dictionary.lookup(dict_key) if suffix_mapping == None: continue keys = strokes[-1].steno_keys[:] keys.remove(key) copy = strokes[:] copy[-1] = Stroke(keys) dict_key = tuple(s.rtfcre for s in copy) main_mapping = self._dictionary.lookup(dict_key) if main_mapping == None: continue return main_mapping + ' ' + suffix_mapping return None
def _test_and_remove_each(stroke, test_keys): """ Given a set of steno keys representing a stroke and a set of test keys each usable as a prefix/suffix, return a list of tuples containing each test key present in the stroke paired with the RTFCRE representation of that stroke after removing the given key from it. """ test_pairs = [] for key in test_keys: if key in stroke: keys = set(stroke) keys.remove(key) test_pairs.append((key, Stroke(keys).rtfcre)) return test_pairs
def test_steno(self): self.assertEqual(Stroke(['S-']).rtfcre, 'S') self.assertEqual(Stroke(['S-', 'T-']).rtfcre, 'ST') self.assertEqual(Stroke(['T-', 'S-']).rtfcre, 'ST') self.assertEqual(Stroke(['-P', '-P']).rtfcre, '-P') self.assertEqual(Stroke(['-P', 'X-']).rtfcre, 'X-P') self.assertEqual(Stroke(['#', 'S-', '-T']).rtfcre, '1-9')
def test_bug471(self): # Repeat-last-stroke after typing two numbers outputs the numbers # reversed for some combos. self.dictionary.set(('R*S', ), '{*+}') # Note: the implementation of repeat-last-stroke looks at the last # stroke keys, so we can't use the same trick as for other tests. for keys in ( ('#', 'S-', 'T-'), # 12 ('R-', '*', '-S'), ): stroke = Stroke(keys) self.translator.translate(stroke) self.assertEqual(self.output.text, u' 1212')
def toggle_asterisk(translator, stroke, cmdline): # Toggle asterisk of previous stroke translations = translator.get_state().translations if not translations: return t = translations[-1] translator.untranslate_translation(t) keys = set(t.strokes[-1].steno_keys) if '*' in keys: keys.remove('*') else: keys.add('*') translator.translate_stroke(Stroke(keys))
def test_translator_state_handling(self): # Check if the translator curtailing the list of last translations # according to its dictionary longest key does no affect things # like the restrospective repeate-last-stroke command. self.dictionary.set(('TEFT', ), 'test') self.dictionary.set(('R*S', ), '{*+}') # Note: the implementation of repeat-last-stroke looks at the last # stroke keys, so we can't use the same trick as for other tests. for keys in ( ('T-', '-E', '-F', '-T'), ('R-', '*', '-S'), ): stroke = Stroke(keys) self.translator.translate(stroke) self.assertEqual(self.output.text, u' test test')
def stroke(s): keys = [] on_left = True for k in s: if k in 'EU*-': on_left = False if k == '-': continue elif k == '*': keys.append(k) elif on_left: keys.append(k + '-') else: keys.append('-' + k) return Stroke(keys)
def _on_message(self, data: dict): if data.get("secretkey", "") != self._config.secretkey: return with self._engine: forced_on = False if data.get('forced') and not self._engine._is_running: forced_on = True self._engine._is_running = True if data.get('zero_last_stroke_length'): self._engine._machine._last_stroke_key_down_count = 0 self._engine._machine._stroke_key_down_count = 0 import traceback if 'stroke' in data: steno_keys = data['stroke'] if isinstance(steno_keys, list): try: self._engine._machine_stroke_callback(steno_keys) except: traceback.print_exc() if 'translation' in data: mapping = data['translation'] if isinstance(mapping, str): try: from plover.steno import Stroke from plover.translation import _mapping_to_macro, Translation stroke = Stroke([]) # required, because otherwise Plover will try to merge the outlines together # and the outline [] (instead of [Stroke([])]) can be merged to anything macro = _mapping_to_macro(mapping, stroke) if macro is not None: self._engine._translator.translate_macro(macro) return t = ( #self._engine._translator._find_translation_helper(stroke) or #self._engine._translator._find_translation_helper(stroke, system.SUFFIX_KEYS) or Translation([stroke], mapping) ) self._engine._translator.translate_translation(t) self._engine._translator.flush() #self._engine._trigger_hook('stroked', stroke) except: traceback.print_exc() if forced_on: self._engine._is_running = False
def _toggle_asterisk(translations, undo, do): replaced = translations[len(translations) - 1:] if len(replaced) < 1: return undo.extend(replaced) redo = replaced[0].replaced do.extend(redo) translations.remove(replaced[0]) translations.extend(redo) last_stroke = replaced[0].strokes[len(replaced[0].strokes) - 1] keys = last_stroke.steno_keys[:] if '*' in keys: keys.remove('*') else: keys.append('*') return Stroke(keys)
def stroke_negative(translator, stroke, cmdline): # Toggle ALL keys of previous stroke translations = translator.get_state().translations if not translations: return t = translations[-1] translator.untranslate_translation(t) keys = set(t.strokes[-1].steno_keys) allKeys = set(system.KEYS) for key in allKeys: if key in keys: keys.remove(key) else: keys.add(key) translator.translate_stroke(Stroke(keys))
def toggle_key(translator, stroke, cmdline): # Toggle keys of previous stroke toggles = [key.strip().replace(" ","") for key in cmdline.split(',')] translations = translator.get_state().translations if not translations: return t = translations[-1] translator.untranslate_translation(t) keys = set(t.strokes[-1].steno_keys) allKeys = set(system.KEYS) for key in toggles: if key in keys: keys.remove(key) elif key in allKeys: keys.add(key) translator.translate_stroke(Stroke(keys))
def _on_stroked(self, steno_keys): stroke = Stroke(steno_keys) log.stroke(stroke) self._translator.translate(stroke) self._trigger_hook('stroked', stroke)
def steno_to_stroke(steno): stroke = Stroke(()) stroke.rtfcre = steno stroke.is_correction = steno == '*' return stroke
def steno_to_stroke(steno): stroke = Stroke(()) stroke.rtfcre = steno return stroke
def test_empty_undo(self): self.translate('*') self._check_translations([]) self._check_output([], [Translation([Stroke('*')], BACK_STRING)], None)
def test_undo_tail(self): self.s.tail = self.t('T/A/EU/L') self.translate('*') self._check_translations([]) self._check_output([], [Translation([Stroke('*')], BACK_STRING)], [self.s.tail])