def save_parent_for_position(self, position): """Save the parent id for text at given position and move the text to the corresponding folder :param position: A non-negative integer position of item to be changed """ item = self.get_item(position) with db.connect() as connection: item.last_modified = db.get_datetime() id = item.id parent = item.parent_id text = data.text_for_id(connection, id) text_file_name = text['hash_id'] text_file_parents = text['parents'] group_dir_parents = [] if parent is not None: group = data.group_for_id(connection, parent) group_dir_name = group['hash_id'] group_dir_parents = group['parents'] group_dir_parents.append(group_dir_name) # update group just to update its last modified status data.update_group(connection, parent, group) file.move_file(text_file_name, text_file_parents, group_dir_parents) item.parents = group_dir_parents data.update_text(connection, id, item.to_dict()) self.dequeue_final_save(id) if self._parent_group and parent != self._parent_group: self.remove(position)
def do_work(self): """Loop over the queue and for each item perform `execution_fn` operation.""" if self.execution_fn is None: self.active = False return while True: id, values = self.dequeue() if not (id and values): break with db.connect() as connection: if self.fetch_fn: try: current_values = self.fetch_fn(connection, id) last_modified = db.get_datetime_from_string( current_values['last_modified']) new_last_modified = db.get_datetime_from_string( values['last_modified']) if last_modified > new_last_modified: continue except Exception as e: # (notify): maybe text with given id does not exist, # or some other sqlite exception; regardless it is # unsafe to proceed with update execution -- skip this. continue self.execution_fn(connection, id, values) self.active = False
def restore_item_at_position(self, position): """Restore an item from trash, assuming its there already :param position: integer position at which item is located """ if not self.trashed_texts_only: return item = self.get_item(position) id = item.id hash_id = item.hash_id parent_hashes = item.parents item.last_modified = db.get_datetime() self.remove(position) with db.connect() as connection: if item.parent_id: group = data.group_for_id(connection, item.parent_id) if group['in_trash']: item.parent_id = None parent_hashes = [] file.trash_file(hash_id, parent_hashes, untrash=True) data.update_text(connection, id, item.to_dict()) self.dequeue_final_save(id)
def _obtain_group_index_dirname(group_id): """Obtain the directory where index is stored for the given group_id""" if group_id is None: return file.BASE_INDEX_DIR with db.connect() as connection: group = data.group_for_id(connection, group_id) dirname = file.create_index_dir(group['hash_id'], group['parents']) return dirname
def finalize_group_creation(self, treeiter, name): """Finalize creation of the decoy group at @treeiter with the new @name""" parent_id = self[treeiter][Column.PARENT_ID] with db.connect() as connection: group_id = data.create_group(connection, name, parent_id) group = data.group_for_id(connection, group_id) file.create_dir(group['hash_id'], group['parents']) self._update_group(treeiter, group) return group
def count_groups_for_iter(self, treeiter): """Count the number of groups contained within group at @treeiter""" group = self.get_group_for_iter(treeiter) group_id = group['id'] in_trash = group['in_trash'] with db.connect() as connection: if self._iter_is_top_level(treeiter) and in_trash: return self.iter_n_children(self._top_level_iter) return data.count_groups(connection, group_id, in_trash)
def new_text_request(self): """Request for a new text""" if self._parent_group is None: return with db.connect() as connection: text_id = data.create_text(connection, _("Untitled"), self._parent_group['id']) text = data.text_for_id(connection, text_id) text_row = self._row_data_for_text(text) return self.append(text_row)
def count_texts_for_iter(self, treeiter): """Count the number of texts contained within group at @treeiter""" group = self.get_group_for_iter(treeiter) group_id = group['id'] in_trash = group['in_trash'] count = 0 with db.connect() as connection: count += data.count_texts(connection, group_id, in_trash) if self._iter_is_top_level(treeiter) and in_trash: count += data.count_texts_in_trash_but_not_parent(connection) return count
def get_parent_for_position(self, position): """Obtain parent group data for the given position :param position: integer position at which item is located :returns: group metadata as a dictionary :rtype: dict """ item = self.get_item(position) if item.parent_id: with db.connect() as connection: group = data.group_for_id(connection, item.parent_id) return group return None
def do_work(self): """Loop over the queue and perform `deletion_fn` for each of the items""" if self.deletion_fn is None: self.active = False return while True: id, values = self.dequeue() if id is None: break with db.connect() as connection: self.deletion_fn(connection, id) self.active = False
def _load_texts(self): """Asks db to fetch the set of texts according to init conditions""" self.remove_all() with db.connect() as connection: load_fn = None kwargs = {} load_orphan_trash = False if self._list_type == TextListType.GROUP_TEXTS: if self._parent_group['id']: load_fn = data.texts_in_group kwargs = { 'conn': connection, 'group_id': self._parent_group['id'] } else: load_fn = data.texts_not_in_groups kwargs = {'conn': connection} if self.trashed_texts_only: load_orphan_trash = True elif self._list_type == TextListType.RECENT_TEXTS: load_fn = data.texts_recently_modified kwargs = {'conn': connection} elif self._list_type == TextListType.RESULT_TEXTS: def texts_in_results(conn, results): for text_id in results: text_id = int(text_id) yield data.text_for_id(conn, text_id) load_fn = texts_in_results kwargs = {'conn': connection, 'results': self._results} else: load_fn = data.fetch_texts kwargs = {'conn': connection} for text in load_fn(**kwargs): row = self._row_data_for_text(text) if self.trashed_texts_only and row.in_trash: self.append(row) elif not self.trashed_texts_only and not row.in_trash: self.append(row) if load_orphan_trash: for text in data.texts_in_trash_but_not_parent(connection): row = self._row_data_for_text(text) self.append(row)
def permanently_delete_group_at_iter(self, treeiter): """Remove the row @treeiter from model and delete the group for this entry from the DB as well, unless its the root node.""" values = self.get_group_for_iter(treeiter) group_id = values['id'] texts_to_be_deleted = [] with db.connect() as connection: texts_to_be_deleted = self._all_texts_in_group(connection, group_id, in_trash=True) for text_name in texts_to_be_deleted: file.delete_file_permanently(text_name) if group_id is not None: db.async_group_deleter.enqueue(group_id, None) if not self._iter_is_top_level(treeiter): self.remove(treeiter)
def delete_item_at_postion(self, position): """Delete item at ``position`` in model :param position: integer position at which item is located """ item = self.get_item(position) id = item.id hash_id = item.hash_id parent_hashes = item.parents item.last_modified = db.get_datetime() self.remove(position) file.trash_file(hash_id, parent_hashes) with db.connect() as connection: self.queue_save(item) self.dequeue_final_save(id)
def create_index_for_group(group_id, in_trash): """Create an index of texts contained in given group or its children""" with db.connect() as connection: dirname = _obtain_group_index_dirname(group_id) ix = index.create_in(dirname, schema, INDEX_NAME) writer = ix.writer() def add_all_texts_in_group(group_id, in_trash=False): def add_text(text): if text['in_trash'] == in_trash: writer.add_document(id=str(text['id']), title=text['title'], tags=' '.join(text['tags'])) contents = file.read_from_file(text['hash_id'], text['parents'], in_trash) if contents: writer.update_document(id=str(text['id']), content=contents) def add_texts_in_group(id): for text in data.texts_in_group(connection, id): add_text(text) for group in data.groups_in_group(connection, id): add_texts_in_group(group['id']) def add_all_texts_in_library(): for text in data.fetch_texts(connection): add_text(text) if group_id is not None: add_texts_in_group(group_id) else: add_all_texts_in_library() add_all_texts_in_group(group_id, in_trash) writer.commit() return ix
def request_save_html_for_group(group): """Saves all texts within a group as appended html content. The group while exporting is flattened, i.e. all texts within subgroups are appended one after another, treated as belonging to one single group.""" group_id = group['id'] texts_in_group = {} with db.connect() as connection: def append_texts_in_group(id): texts = data.texts_in_group(connection, id) if id is None: texts = data.texts_not_in_groups(connection) for text in texts: if text['in_trash'] == False: texts_in_group[text['hash_id']] = text['parents'] for group in data.groups_in_group(connection, id): append_texts_in_group(group['id']) append_texts_in_group(group_id) request_save_html_for_files(texts_in_group, group['name'])
def migrate_db(desired_version): """Updates db scehma to match the version that can be used by @desired_version of the application; arg value is the version of the application, not db version.""" desired_db_version = db_versions[desired_version] current_db_version = db.version() # default action is to upgrade scehma action = 'up' key_offset = 0 if desired_db_version < current_db_version: action = 'down' key_offset = 1 while current_db_version != desired_db_version: # the correct key for migration_scripts scriptkey = current_db_version - key_offset with db.connect() as conn: cursor = conn.cursor() cursor.executescript(migration_scripts[scriptkey][action]) current_db_version = db.version()
def set_prop_for_iter(self, treeiter, prop, value): """Set the property @prop to @value for the row given by @treeiter. This method does not make changes to the model if the parent id is changed. Instead, use `move_to_group` method which along with making necessary changed, will also automatically call this method for update to db.""" if self._iter_is_top_level(treeiter): return assert prop in ['name', 'last_modified', 'in_trash', 'parent_id'] old_parent = self[treeiter][Column.PARENT_ID] new_parent = self[treeiter][Column.PARENT_ID] trashed = False if prop == 'name': self[treeiter][Column.NAME] = value elif prop == 'last_modified': self[treeiter][Column.LAST_MODIFIED] = value elif prop == 'in_trash': self[treeiter][Column.IN_TRASH] = trashed = value elif prop == 'parent_id': self[treeiter][Column.PARENT_ID] = new_parent = value values = self._dict_for_row(treeiter) group_id = values['id'] with db.connect() as connection: if old_parent != new_parent: group = data.group_for_id(connection, group_id) group_dir_name = group['hash_id'] group_dir_parents = group['parents'] new_parent_dir_parents = [] if new_parent: new_parent_group = data.group_for_id( connection, new_parent) new_parent_dir_name = new_parent_group['hash_id'] new_parent_dir_parents = new_parent_group['parents'] new_parent_dir_parents.append(new_parent_dir_name) # update the new parent's last modified status data.update_group(connection, new_parent, new_parent_group) if old_parent: # update the old parent's last modified status old_parent_group = data.group_for_id( connection, old_parent) data.update_group(connection, old_parent, old_parent_group) file.move_file(group_dir_name, group_dir_parents, new_parent_dir_parents) if trashed and self.tree_type == GroupTreeType.COLLECTION_GROUPS: group = data.group_for_id(connection, group_id) texts_to_be_trashed = self._all_texts_in_group(connection, group['id'], in_trash=False) for text in texts_to_be_trashed: parent_list = texts_to_be_trashed[text] file.trash_file(text, parent_list) if not trashed and self.tree_type == GroupTreeType.TRASHED_GROUPS: group = data.group_for_id(connection, group_id) if group['parent_id'] is not None: parent_group = data.group_for_id(connection, group['parent_id']) # if a group is being restored whose parents are still in # trash, then make this group exist on its own in the main # collection i.e., with no parents. if parent_group['in_trash']: values['parent_id'] = None texts_to_be_restored = self._all_texts_in_group(connection, group['id'], in_trash=True) for text in texts_to_be_restored: parent_list = texts_to_be_restored[text] # if group's parent is being set to NULL, then restore the # files within to correct locations accordingly if not values['parent_id']: parent_list = parent_list[parent_list. index(group['hash_id']):] file.trash_file(text, parent_list, untrash=True) data.update_group(connection, group_id, values) if trashed and self.tree_type != GroupTreeType.TRASHED_GROUPS: self.remove(treeiter) if not trashed and self.tree_type == GroupTreeType.TRASHED_GROUPS: self.remove(treeiter)
def get_last_modified_parent_id(self): with db.connect() as connection: parent_id = data.group_for_last_modified_text(connection) if parent_id is not None: return parent_id[0] return None
def _load_data(self): """Load group data into tree model, according to groupt tree type""" with db.connect() as connection: for parent_group in data.groups_not_in_groups(connection): self._append_group_and_children(connection, parent_group)