def load_config_json(filename, default=dict): try: f = resource.open(filename, "r") f.close() except FileNotFoundError: print("creating missing resource file ", filename) with resource.open(filename, "w") as f: f.write(json.dumps(default())) try: return json.load(resource.open(filename)) except Exception as e: print(f"error opening {filename}: {e}") return default()
def _load_saved_macros(): try: with resource.open("saved_macros.json") as f: saved_macros = json.load(f) saved_macros = {k: v for k, v in saved_macros.items() if k} return saved_macros except Exception: return {}
def load_config_json(filename): if not os.path.isfile(filename): with open(filename, "w") as f: f.write("{}") try: return json.load(resource.open(filename)) except Exception as e: print(f"error opening {filename}: {e}") return {}
def load_json(path): loaded_namespaces = [] with resource.open(str(pathlib.Path('taxonomy') / path), 'r') as f: j = json.load(f) for ns in j: ns['joiner'] = ns.get('joiner', '::') json_namespace_table[ns['namespace']] = ns json_codeword_table[ns['codeword']] = ns loaded_namespaces.append(ns) return loaded_namespaces
def get_list_from_csv(filename: str, headers: Tuple[str, str], default: Dict[str, str] = {}): """Retrieves list from CSV""" path = SETTINGS_DIR / filename assert filename.endswith(".csv") if not path.is_file(): with open(path, "w", encoding="utf-8", newline="") as file: writer = csv.writer(file) writer.writerow(headers) for key, value in default.items(): writer.writerow([key] if key == value else [value, key]) # Now read via resource to take advantage of talon's # ability to reload this script for us when the resource changes with resource.open(str(path), "r") as f: rows = list(csv.reader(f)) # print(str(rows)) mapping = {} if len(rows) >= 2: actual_headers = rows[0] if not actual_headers == list(headers): print(f'"{filename}": Malformed headers - {actual_headers}.' + f" Should be {list(headers)}. Ignoring row.") for row in rows[1:]: if len(row) == 0: # Windows newlines are sometimes read as empty rows. :champagne: continue if len(row) == 1: output = spoken_form = row[0] else: output, spoken_form = row[:2] if len(row) > 2: print( f'"{filename}": More than two values in row: {row}.' + " Ignoring the extras.") # Leading/trailing whitespace in spoken form can prevent recognition. spoken_form = spoken_form.strip() mapping[spoken_form] = output return mapping
from talon.voice import Str, press import talon.clip as clip from talon import resource from .bundle_groups import FILETYPE_SENSITIVE_BUNDLES import json # overrides are used as a last resort to override the output. Some uses: # - frequently misheard words # - force homophone preference (alternate homophones can be accessed with homophones command) # To add an override, add the word to override as the key and desired replacement as value in overrides.json mapping = json.load(resource.open("overrides.json")) # used for auto-spacing punctuation = set(".,-!?") def remove_dragon_junk(word): return str(word).lstrip("\\").split("\\", 1)[0] def parse_word(word): word = remove_dragon_junk(word) word = mapping.get(word.lower(), word) return word def join_words(words, sep=" "): out = "" for i, word in enumerate(words): if i > 0 and word not in punctuation:
mapping = { "semicolon": ";", "new-line": "\n", "new-paragraph": "\n\n", "dot": ".", "…": "...", "comma": ",", "question": "?", "exclamation": "!", "dash": "-", } punctuation = set(".,-!?") try: vocab_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "vocab.json") with resource.open(vocab_path) as fh: vocab = json.load(fh) except FileNotFoundError: vocab = [] def add_vocab(words): global vocab vocab += [re.sub("[^a-zA-Z0-9]+", "", w) for w in words] vocab = sorted(list(set(vocab))) with open(vocab_path, "w") as f: json.dump(vocab, f, indent=0) def parse_word(w): w = str(w).lstrip("\\").split("\\", 1)[0]
import collections from talon import clip, resource from talon.voice import Context, Str, press from time import sleep import json import os from .bundle_groups import TERMINAL_BUNDLES, FILETYPE_SENSITIVE_BUNDLES VIM_IDENTIFIER = "(Vim)" INCLUDE_TEENS_IN_NUMERALS = False INCLUDE_TENS_IN_NUMERALS = False # mapping = json.load(open(os.path.join(os.path.dirname(__file__), "replace_words.json"))) mapping = json.load(resource.open("replace_words.json")) mappings = collections.defaultdict(dict) for k, v in mapping.items(): mappings[len(k.split(" "))][k] = v punctuation = set(".,-!?/") def local_filename(file, name): return os.path.join(os.path.dirname(os.path.realpath(file)), name) def parse_word(word, force_lowercase=True): if force_lowercase: word = word.lower() word = mapping.get(word, word)
ctx = Context("file_management", func=context) def copy_path_to_clipboard(m): x = applescript.run(""" tell application "Finder" set sel to the selection as text set the clipboard to POSIX path of sel end tell """) folders = {} folders_filename = utils.local_filename(__file__, "named_folders.json") folders.update(json.load(resource.open(folders_filename))) def add_named_folder(m): x = copy_path_to_clipboard(None) path = clip.get() # Stop if it isn't a folder if path[-1] != '/': return filename = path.split('/')[-2] keyword = filename.split(' ')[0] with FileInput(files=[folders_filename], inplace=True) as f: for line in f: line = line.rstrip() if line in '{}' or line[-1] is ',':
# pulled from community/misc/basic_keys.py digits = {str(i): str(i) for i in range(10)} # pulled from community/utils.py def parse_word(word, force_lowercase=True): if force_lowercase: word = word.lower() # word = mapping.get(word, word) return word phones = {} canonical = [] with resource.open(homophones_file, "r") as f: for h in f: h = h.rstrip() h = h.split(",") canonical.append(max(h, key=len)) for w in h: w = w.lower() others = phones.get(w, None) if others is None: phones[w] = sorted(h) else: # if there are multiple hits, collapse them into one list others += h others = set(others) others = sorted(others) phones[w] = others
"talon home": TALON_HOME, "talon user": TALON_USER, "talon plug-ins": TALON_PLUGINS, "talon community": "~/.talon/user/talon_community", } def cd_directory_shortcut(m): directory = directory_shortcuts[m[1]] insert(f"cd {directory}; ls") for _ in range(4): press("left") try: servers = json.load(resource.open("servers.json")) except Exception as e: print(f"error opening servers.json: {e}") servers = {} def get_server(m): return servers[" ".join(m["global_terminal.servers"])] def mosh_servers(m): insert(f"mosh {get_server(m)}") def ssh_servers(m): insert(f"ssh {get_server(m)}")
import string import time from time import sleep from talon import clip, resource from talon.voice import Context, Str, Key, press from ..config import vocab from ..config.bundle_groups import FILETYPE_SENSITIVE_BUNDLES, TERMINAL_BUNDLES VIM_IDENTIFIER = "(Vim)" INCLUDE_TEENS_IN_NUMERALS = True INCLUDE_TENS_IN_NUMERALS = True # mapping = json.load(open(os.path.join(os.path.dirname(__file__), "replace_words.json"))) mapping = json.load(resource.open("../config/replace_words.json")) mapping.update({k.lower(): v for k, v in vocab.vocab_alternate.items()}) mappings = collections.defaultdict(dict) for k, v in mapping.items(): mappings[len(k.split(" "))][k] = v punctuation = set(".,-!?/") def local_filename(file, name): return os.path.join(os.path.dirname(os.path.realpath(file)), name) def parse_word(word, force_lowercase=True): if force_lowercase: word = word.lower()
def save_config_json(filename, config): with resource.open(filename, "w") as f: f.write(json.dumps(config, indent=4))
ON_WINDOWS = platform.system() == "Windows" ON_LINUX = platform.system() == "Linux" ON_MAC = platform.system() == "Darwin" # TODO: Switch this to `actions.path.talon_user` when possible user_dir = Path(__file__).parents[1] module = Module() # overrides are used as a last resort to override the output. Some uses: # - frequently misheard words # - force homophone preference (alternate homophones can be accessed with homophones command) # To add an override, add the word to override as the key and desired replacement as value in overrides.json try: with resource.open(user_dir / "overrides.json") as f: mapping = json.load(f) except Exception as e: app.notify(str(e)) mapping = {} # used for auto-spacing punctuation = set(".,-!?") # TODO: Remove all the old functions here I'm no longer using. # TODO: Probably remove def remove_dragon_junk(word): return str(word).lstrip("\\").split("\\", 1)[0]
import os import json import time import contextlib from talon import ui, resource, ctrl, cron from talon.voice import Key, Context, press from .. import utils # from . import last_phrase single_digits = "0123456789" NAMED_DESKTOPS = {digit: int(digit) for digit in single_digits} desktops_filename = utils.local_filename(__file__, "named_desktops.json") NAMED_DESKTOPS.update(json.load(resource.open(desktops_filename))) @contextlib.contextmanager def drag_window(win=None): if win is None: win = ui.active_window() fs = win.children.find(AXSubrole="AXFullScreenButton")[0] rect = fs.AXFrame["$rect2d"] x = rect["x"] + rect["width"] + 5 y = rect["y"] + rect["height"] / 2 ctrl.mouse_move(x, y) ctrl.mouse_click(button=0, down=True) yield time.sleep(0.1) ctrl.mouse_click(button=0, up=True)
import json import os.path import re from talon import resource, app import talon.clip as clip from talon.voice import Context, Word, press from ..utils import parse_word, surround, vocab, parse_words, insert jargon_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "jargon.json") jargon_substitutions = {} with resource.open(jargon_path) as fh: jargon_substitutions.update(json.load(fh)) ACRONYM = (True, lambda i, word, _: word[0:1].upper()) FIRST_THREE = (True, lambda i, word, _: word[0:3]) FIRST_FOUR = (True, lambda i, word, _: word[0:4]) DUNDER = ( True, lambda i, word, last: ("__%s" % word if i == 0 else word) + ("__" if last else ""), ) CAMELCASE = (True, lambda i, word, _: word if i == 0 else word.capitalize()) SLASH_SEPARATED = (True, lambda i, word, _: "/" + word) DOT_SEPARATED = (True, lambda i, word, _: word if i == 0 else "." + word) GOLANG_PRIVATE = ( True, lambda i, word, _: word.lower() if i == 0 else word if word.upper() == word else word.capitalize(),
import json import os from collections import defaultdict from talon.voice import Context from talon import app, ctrl, ui, resource from .. import utils warps_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "warps.json") with resource.open(warps_file) as fh: resource_data = json.load(fh) class MouseWarp: def __init__(self): self.data = defaultdict(dict) self.data.update(resource_data) def mark(self, name): name = name.lower() window = ui.active_window() bundle = window.app.bundle x, y = ctrl.mouse_pos() rect = window.rect center_x, center_y = rect.center x_offset = x - (rect.left if x < center_x else rect.right) y_offset = y - (rect.top if y < center_y else rect.bot) app.notify(f"Marked: {name}") # self.load()
numeral_map["oh"] = 0 # synonym for zero numeral_map["and"] = None # drop me numerals = " (" + " | ".join(sorted(numeral_map.keys())) + ")+" optional_numerals = " (" + " | ".join(sorted(numeral_map.keys())) + ")*" # mapping = { # "semicolon": ";", # "new-line": "\n", # "new-paragraph": "\n\n", # "yamel": "yaml", # } # mapping = json.load(open(os.path.join(os.path.dirname(__file__), "replace_words.json"))) with resource.open('replace_words.json') as f: mapping = json.load(f) mappings = collections.defaultdict(dict) for k, v in mapping.items(): mappings[len(k.split(" "))][k] = v punctuation = set(".,-!?") # def parse_word(word): # word = str(word).lstrip("\\").split("\\", 1)[0].lower() # return word # token_replace = { # <-- add this block in std.py # 'i\\pronoun': 'I', # 'i\'m': 'I\'m',
import json from talon import resource try: config = json.load(resource.open("config.json")) except Exception: config = {}
print("creating missing resource file ", filename) with resource.open(filename, "w") as f: f.write(json.dumps(default())) try: return json.load(resource.open(filename)) except Exception as e: print(f"error opening {filename}: {e}") return default() def save_config_json(filename, config): with resource.open(filename, "w") as f: f.write(json.dumps(config, indent=4)) openai.api_key = json.load(resource.open("openai_key.json", "rb"))["api_key"] def make_input_output_context(patterns, tag): prompt = "" for pattern in patterns: if tag in pattern["tags"]: prompt += "input: {input}\noutput: {output}\n\n".format(**pattern) return prompt last_input = None last_tag = None last_output = None