def filter(self, keep: gkeepapi.Keep) -> List[gkeepapi.node.TopLevelNode]: return keep.find( query=self.query, labels=self.labels, colors=self.colors, pinned=self.pinned, archived=self.archived, trashed=self.trashed, )
def add_todo(keep: Keep, todo: Todo, labels: list[node.Label]) -> node.List: items = [] for item in todo.items: items.append((item, False)) note = keep.createList(todo.title, items) note.color = todo.color for label in labels: note.labels.add(label) return note
def find(args: argparse.Namespace, keep: gkeepapi.Keep, config: dict): _sync(args, keep, config, True) notes = keep.find(query=args.query, labels=args.labels, colors=args.colors, pinned=args.pinned, archived=args.archived, trashed=args.trashed) for note in notes: print(note.id)
def test_sync(self, perform_master_login, perform_oauth): keep = Keep() k_api, r_api, m_api = mock_keep(keep) perform_master_login.return_value = { 'Token': 'FAKETOKEN', } perform_oauth.return_value = { 'Auth': 'FAKEAUTH', } k_api.request().json.side_effect = [ resp('keep-00'), ] r_api.request().json.side_effect = [ resp('reminder-00'), resp('reminder-01'), ] keep.login('user', 'pass') self.assertEqual(39, len(keep.all()))
def load_metric_datapoints(keep: Keep, m: Metric) -> Metric: notes = keep.find(query=m.keyword) for note in notes: if not hasattr(note, 'items'): continue # strip keyword and spaces from title id = note.title.removeprefix(m.keyword).removesuffix(m.keyword).strip() m.add_data_point(id, len(note.checked), len(note.unchecked)) return m
def __init__(self): threading.Thread.__init__(self) self.__running = False self.__file_queue = deque() # This is for ensuring safe access to the file queue, # which is updated whenever a new note file is created. # deques are thread-safe/atomic, but I wanted to prevent # any weird stuff from happening if the upload loop # is running and a new file is added at the same time. self.__file_queue_lock = threading.Lock() # Using this event we pause the thread, allowing it to # execute only when there are new files to be synced. self.__event = threading.Event() self.__file_queue_path = "%s/file_queue.pickle" % (Controller.WORKING_DIRECTORY) self.__notes_path = "%s/notes" % (Controller.WORKING_DIRECTORY) self.__auth_path = "%s/google_auth/.auth" % (Controller.WORKING_DIRECTORY) self.__keep = Keep() self.__logged_in = False
def plan(config: ConfigParser, keep: Keep, from_date: str, to_date: str): print('Planning TODOs') if from_date: from_date = date.fromisoformat(from_date) else: from_date = date.today() if to_date: to_date = date.fromisoformat(to_date) else: to_date = date.today() if from_date > to_date: print('--to-date should be later than --from-date') return formats = get_formats(config) templates = get_templates_from_config(config, formats) for key in templates: tpl = templates[key] print(f'Template {tpl.name}') labels = [] for name in tpl.labels: label = keep.findLabel(name) if label: labels.append(label) todos = tpl.generate(from_date, to_date) for t in todos: print(t.title, t.items, t.labels) add_todo(keep, t, labels) keep.sync() print('----')
def create_card( keep: Keep, assign: list[str], color: str, labels: list[gkeepapi.node.Label], pin: bool, title: str, body: str, ) -> gkeepapi.node.Note: note = keep.createNote(title=title, text=body) note.color = getattr(ColorValue, color) note.pinned = pin for label in labels: note.labels.add(label) for email in assign: note.collaborators.add(email) return note
def export(args: argparse.Namespace, keep: gkeepapi.Keep, config: dict): _sync(args, keep, config, True) _init_export_dir(args.dir) _, untracked_files, existing_files = _enum_export_fns(args.dir, keep) # Move deleted files to the "deleted" directory for deleted_file in untracked_files: fn = os.path.basename(deleted_file) os.rename(deleted_file, os.path.join(args.dir, "deleted", fn)) logger.warning("Removed deleted note: %s", deleted_file) # Sync down existing notes for note in keep.all(): # Determine target filename path = _get_export_path(note, args.dir) # Move existing file to new location if necessary if note.id in existing_files and path != existing_files[note.id]: os.rename(existing_files[note.id], path) # Write out the markdown file with open(path, "w") as fh: _write_export_file(fh, note)
class Synchronizer(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.__running = False self.__file_queue = deque() # This is for ensuring safe access to the file queue, # which is updated whenever a new note file is created. # deques are thread-safe/atomic, but I wanted to prevent # any weird stuff from happening if the upload loop # is running and a new file is added at the same time. self.__file_queue_lock = threading.Lock() # Using this event we pause the thread, allowing it to # execute only when there are new files to be synced. self.__event = threading.Event() self.__file_queue_path = "%s/file_queue.pickle" % (Controller.WORKING_DIRECTORY) self.__notes_path = "%s/notes" % (Controller.WORKING_DIRECTORY) self.__auth_path = "%s/google_auth/.auth" % (Controller.WORKING_DIRECTORY) self.__keep = Keep() self.__logged_in = False def run(self): if not os.path.exists(self.__notes_path): os.mkdir(self.__notes_path) if os.path.exists(self.__file_queue_path): with open(self.__file_queue_path, 'rb') as f: self.__file_queue = pickle.load(f) self.__running = True while self.__running: if not self.__logged_in: self.__logged_in = self.__login() if self.__logged_in: self.__upload_file_queue() self.__event.wait() # Try one last time to upload unsynced files if len(self.__file_queue) > 0: pass with open(self.__file_queue_path, 'wb') as f: pickle.dump(self.__file_queue, f, pickle.HIGHEST_PROTOCOL) # Add a filename to be synchronized with Keep # Each call to this method will be run in its own # unique Thread by Controller. def alert_new_file(self, filename): with self.__file_queue_lock: self.__file_queue.append(filename) self.__event.set() self.__event.clear() # Allow thread to join def close(self): self.__running = False self.__event.set() # Try to upload each queued file once. # Requeue if something fails # We never want to get stuck in here or this thread can't join, # so ensure we don't by putting timeouts on the sync attempts def __upload_file_queue(self): with self.__file_queue_lock: for i in range(len(self.__file_queue)): self.__upload_file(self.__file_queue.pop()) # Try to upload the contents of a file to Google Notes # Use the filename as the title # If default filename was used, do no title def __upload_file(self, filename): file_path = "%s/%s.txt" % (self.__notes_path, filename) if os.path.exists(file_path): try: with open(file_path, 'r') as f: print("Uploading: %s" % (filename)) title = "" if match("[0-9]{2}-[0-9]{2}-[0-9]{4}-[0-9]{6}", filename) else filename note = self.__keep.createNote(title, f.read()) self.__keep.sync() except: self.__file_queue.appendleft(title) # Use provided authentication to get an instance of the Keep API def __login(self): user = "" pswrd = "" if os.path.exists(self.__auth_path): try: with open(self.__auth_path, 'r') as f: user_pass = f.readline()[:-1].split(',') user = user_pass[0] pswrd = user_pass[1] except: return False try: return self.__keep.login(user, pswrd) except: return False
def _test_sync(self, perform_master_login): keep = Keep() k_api, r_api, m_api = mock_keep(keep) k_api.request.return_value = {} keep.login('user', 'pass') keep.sync()
help='Dry run, do not write any files') argparser.add_argument('-v', '--verbose', action='count', default=0, help='Print verbose output') args = argparser.parse_args() config = get_config(args.config) email = config['user']['email'] if args.email: email = args.email keep = Keep() def handle_stats(): resume(keep, email) stats(config, keep, from_date=args.from_date, to_date=args.to_date, dry=args.dry, verbose=args.verbose) def handle_login(): login(keep, email)
def _read_export_file( keep: gkeepapi.Keep, fh: io.IOBase, note: Optional[gkeepapi.node.TopLevelNode] = None, ) -> gkeepapi.node.TopLevelNode: lines = fh.readlines() title = "" color = gkeepapi.node.ColorValue.White pinned = False archived = False labels = set() items = [] # Extract the title i = 0 m = TITLE_RE.search(lines[i]) if m: title = m.group(1) i += 1 # Extract all the options options = [] while i < len(lines): m = OPTION_RE.search(lines[i]) if not m: break options.append(m.group(1)) i += 1 # Process the options for option in options: parts = option.split(" ", 1) if parts[0] == "pinned": pinned = True elif parts[0] == "archived": archived = True elif parts[0] == "color": if len(parts) == 2: try: color = gkeepapi.node.ColorValue(parts[1].upper()) except ValueError: logger.warning("Unknown color option: %s", parts[1]) elif parts[0] == "label": labels.add(parts[1]) else: logger.warning("Unknown option: %s", parts[0]) # Initialize note (if necessary) if note is None: labels.add(args.label) if len(lines) > i and LISTITEM_RE.search(lines[i]): note = gkeepapi.node.List() else: note = gkeepapi.node.Note() # Extract content if isinstance(note, gkeepapi.node.List): # Extract list items first = True item = [] indented = False checked = False while i < len(lines): m = LISTITEM_RE.search(lines[i]) if not m: if first: logger.warning("Invalid listitem entry: %s", lines[i]) else: item.append(lines[i]) else: if not first: items.append((indented, checked, "\n".join(item))) item = [] indented_str, checked_str, content = m.groups() indented = bool(indented_str) checked = " " != checked_str item.append(content) first = False i += 1 if not first: items.append((indented, checked, "\n".join(item))) # Sync up items to the list i = 0 list_items = note.items sort = random.randint(1000000000, 9999999999) while True: a_ok = i < len(items) b_ok = i < len(list_items) # Update an existing item if a_ok and b_ok: indented, checked, content = items[i] list_item = list_items[i] if indented != list_item.indented: list_item.indented = indented if checked != list_item.checked: list_item.checked = checked if content != list_item.text: list_item.text = content sort = int(list_item.sort) # Create a new item elif a_ok: indented, checked, content = items[i] list_item = note.add(content, checked, sort) if indented: list_item.indent() sort -= gkeepapi.node.List.SORT_DELTA # Remove a deleted item elif b_ok: list_items[i].delete() else: break i += 1 else: text = "\n".join(lines[i:]) if note.text != text: note.text = text # Apply labels note_labels = set((label.name for label in note.labels.all())) new_labels = labels - note_labels del_labels = note_labels - labels for label in new_labels: note.labels.add(keep.findLabel(label, True)) for label in del_labels: note.labels.remove(keep.findLabel(label)) # Apply all other changes if note.title != title: note.title = title if note.pinned != pinned: note.pinned = pinned if note.archived != archived: note.archived = archived if note.color != color: note.color = color return note
def _ensure_note(keep: gkeepapi.Keep, note: str) -> gkeepapi.node.TopLevelNode: note = keep.get(note) if note is None: logger.error('Note not found') sys.exit(2) return note