def __init__(self): # assumes separate ~/.talon/user/private/ directory cwd = pathlib.Path(__file__).parent.parent.joinpath( "../private/settings/domains_private.json") self.domain_file = cwd.absolute() self.update_commands() fs.watch(self.domain_file, self.__on_fs_change)
def on_ready(): update_overrides(None, None) fs.watch(overrides_directory, update_overrides) update_launch_list() update_running_list() update_visible_list() ui.register("", ui_event)
def setup_windows_ipc(): print("platform: ", sys.platform) if sys.platform != "win32": return try: os.mkdir(WINDOWS_IPC_PATH) except Exception as e: print(e) fs.watch(WINDOWS_IPC_PATH, windows_ipc)
def __init__(self, dirs, callback): self.directories = dirs self.callback_function = callback # none = process all directories self.__update_all_snippets() for directory in self.directories.keys(): if os.path.isdir(directory): fs.watch(directory, self.__on_fs_change)
def watch_directories(self): directories = self.theme.get_watch_directories() for directory in directories: fs.watch(directory, self.reload_theme) directories = self.preferences.get_watch_directories() for directory in directories: fs.watch(directory, self.debounce_environment_change) self.watching_directories = True
def decorate(listener): listener_noop = lambda: listener(filename) def new_listener(name, flags): if Path(name).parts[-1] != filename: return listener_noop() fs.watch(str(SETTINGS_DIR), new_listener) listener_noop() return listener_noop
def on_ready(): init_csv_and_watch_changes( "special_marks", { "special_mark": special_marks_defaults, "line_direction": DEFAULT_DIRECTIONS, }, ) setup_hat_styles_csv() vscode_settings_path: Path = actions.user.vscode_settings_path().resolve() def on_watch(path, flags): global fast_reload_job, slow_reload_job cron.cancel(fast_reload_job) cron.cancel(slow_reload_job) fast_reload_job = cron.after("500ms", setup_hat_styles_csv) slow_reload_job = cron.after("10s", setup_hat_styles_csv) fs.watch(vscode_settings_path, on_watch)
overrides = {} if name is None or name == overrides_path: LOGGER.info("Updating app switcher overrides") with open(overrides_path, "r") as f: for line in f: line = line.rstrip() line = line.split(",") if len(line) == 2: overrides[line[0].lower()] = line[1].strip() update_lists() update_overrides(None, None) fs.watch(settings_dir, update_overrides) @mod.action_class class Actions: def switcher_focus(name: str): """Focus a new application by name""" wanted_app = name # we should use the capture result directly if it's already in the # list of running applications # otherwise, name is from <user.text> and we can be a bit fuzzier if name not in running_application_dict: # don't process silly things like "focus i"
with open(homophones_file, "r") as f: for line in f: words = line.rstrip().split(",") canonical_list.append(max(words, key=len)) for word in words: word = word.lower() old_words = phones.get(word, []) phones[word] = sorted(set(old_words + words)) global all_homophones all_homophones = phones ctx.lists["self.homophones_canonicals"] = canonical_list update_homophones(homophones_file, None) fs.watch(cwd, update_homophones) active_word_list = None is_selection = False def close_homophones(): gui.hide() actions.mode.disable("user.homophones") def raise_homophones(word, forced=False, selection=False): global quick_replace global active_word_list global show_help global force_raise global is_selection
overrides = {} if name is None or name == override_file_path: print("update_overrides") with open(override_file_path, "r") as f: for line in f: line = line.rstrip() line = line.split(",") if len(line) == 2: overrides[line[0].lower()] = line[1].strip() update_lists() update_overrides(None, None) fs.watch(overrides_directory, update_overrides) @mod.action_class class Actions: def switcher_focus(name: str): """Focus a new application by name""" wanted_app = name # we should use the capture result directly if it's already in the # list of running applications # otherwise, name is from <user.text> and we can be a bit fuzzier if name not in running_application_dict: # don't process silly things like "focus i"
Note the list must be declared separately. """ global _mapped_lists with _settings_lock: _update_list(list_name, csv_name, csv_headers, default_values) # If there were no errors, we can register it permanently. _mapped_lists[list_name] = (csv_name, csv_headers, default_values) def bind_word_map_to_csv(csv_name: str, csv_headers: Tuple[str, str], default_values: Dict[str, str] = {}) -> None: """Like `bind_list_to_csv`, but for the `dictate.word_map` setting. Since it is a setting, not a list, it has to be handled separately. """ global _word_map_params # TODO: Maybe a generic system for binding the dicts to settings? Only # implement if it's needed. with _settings_lock: _update_word_map(csv_name, csv_headers, default_values) # If there were no errors, we can register it permanently. _word_map_params = (csv_name, csv_headers, default_values) fs.watch(str(SETTINGS_DIR), _update_lists) fs.watch(str(PRIVATE_SETTINGS_DIR), _update_lists)
def watch_walkthrough_file(self, watch=True): if self.current_walkthrough_title is not None and self.current_walkthrough_title in self.walkthrough_files: current_walkthrough_file = self.walkthrough_files[ self.current_walkthrough_title ] fs.unwatch(current_walkthrough_file, self.reload_walkthrough) if watch: fs.watch(current_walkthrough_file, self.reload_walkthrough)
def watch_documentation_file(self, watch=True): if self.current_title is not None: fs.unwatch(self.files[self.current_title], self.debounce_reload_documentation) if watch: fs.watch(self.files[self.current_title], self.debounce_reload_documentation)
def __init__(self): cwd = os.path.dirname(os.path.realpath(__file__)) self.personal_info_file = os.path.join(cwd, "personal_info.json") self.update_commands() fs.watch(self.personal_info_file, self.__on_fs_change)
return words context = Context() def _update_custom_words(*args) -> None: global context LOGGER.info("Updating custom words") words = parse_dict_file(CUSTOM_WORDS_PATH) context.lists["user.custom_words"] = words return words _update_custom_words() fs.watch(SETTINGS_DIR, _update_custom_words) @module.action def update_custom_words(): """Reload the custom words from disk.""" words = _update_custom_words() app.notify(f"{len(words)} word(s) loaded.") def join_punctuation(words: List[str]) -> str: # TODO: Implement proper punctuation joining. return " ".join(words) def extract_dictation(phrase) -> List[str]:
current_tab_id = None @mod.action_class class Actions: def surf(hints: List[str]): """Surf to hint""" with (VOICESURF_PATH / "output" / "v0").open("w") as fp: # TODO(kvakil): use a temporary file to make this atomic? json.dump( {"Query": {"query": " ".join(hints), "tabId": current_tab_id}}, fp ) def update_surf(_, _2): global current_tab_id with (VOICESURF_PATH / "input" / "v0").open() as fp: message = json.loads(fp) hints = message["UpdateTalonRequest"]["words"] current_tab_id = message["UpdateTalonRequest"]["tabId"] ctx.lists["self.hints"] = {hint_text: str(hint_text) for hint_text in hints} @mod.capture(rule="{self.hints}+") def hints(m) -> List[str]: return m.hints_list fs.watch(str(VOICESURF_PATH / "input"), update_surf)
## # Startup ## # Add all the known lists for all the loaded namespaces for ns in json_codeword_table.values(): add_namespace_list(ns, 'names', 'types') add_namespace_list(ns, 'templates', 'templates') # Set up watch for new files # TODO this doesn't actually redefine the grammar, so it doesn't really work without forcing a reload some other way. def on_json_change(path, exists): newfile = pathlib.Path(path).relative_to(taxonomy_path) if newfile.exists(): loaded_namespaces = load_json(newfile) fs.watch(str(taxonomy_path), on_json_change) ## # Module/Context declarations ## @mod.capture def cpp_known_namespaces(m) -> Dict: "Returns a JSON dict of the namespace." @ctx.capture('self.cpp_known_namespaces', rule="{self.cpp_known_namespaces}") def cpp_known_namespaces(m) -> Dict: return json_namespace_table[m.cpp_known_namespaces] @mod.capture
def init_csv_and_watch_changes( filename: str, default_values: dict[str, dict], extra_ignored_values: list[str] = None, allow_unknown_values: bool = False, default_list_name: Optional[str] = None, headers: list[str] = [ SPOKEN_FORM_HEADER, CURSORLESS_IDENTIFIER_HEADER ], ctx: Context = Context(), no_update_file: bool = False, ): """ Initialize a cursorless settings csv, creating it if necessary, and watch for changes to the csv. Talon lists will be generated based on the keys of `default_values`. For example, if there is a key `foo`, there will be a list created called `user.cursorless_foo` that will contain entries from the original dict at the key `foo`, updated according to customization in the csv at actions.path.talon_user() / "cursorless-settings" / filename Note that the settings directory location can be customized using the `user.cursorless_settings_directory` setting. Args: filename (str): The name of the csv file to be placed in `cursorles-settings` dir default_values (dict[str, dict]): The default values for the lists to be customized in the given csv extra_ignored_values list[str]: Don't throw an exception if any of these appear as values; just ignore them and don't add them to any list allow_unknown_values bool: If unknown values appear, just put them in the list default_list_name Optional[str]: If unknown values are allowed, put any unknown values in this list no_update_file Optional[bool]: Set this to `TRUE` to indicate that we should not update the csv. This is used generally in case there was an issue coming up with the default set of values so we don't want to persist those to disk """ if extra_ignored_values is None: extra_ignored_values = [] file_path = get_full_path(filename) super_default_values = get_super_values(default_values) file_path.parent.mkdir(parents=True, exist_ok=True) def on_watch(path, flags): if file_path.match(path): current_values, has_errors = read_file( file_path, headers, super_default_values.values(), extra_ignored_values, allow_unknown_values, ) update_dicts( default_values, current_values, extra_ignored_values, allow_unknown_values, default_list_name, ctx, ) fs.watch(file_path.parent, on_watch) if file_path.is_file(): current_values = update_file( file_path, headers, super_default_values, extra_ignored_values, allow_unknown_values, no_update_file, ) update_dicts( default_values, current_values, extra_ignored_values, allow_unknown_values, default_list_name, ctx, ) else: if not no_update_file: create_file(file_path, headers, super_default_values) update_dicts( default_values, super_default_values, extra_ignored_values, allow_unknown_values, default_list_name, ctx, ) def unsubscribe(): fs.unwatch(file_path.parent, on_watch) return unsubscribe