def _update_inlinks(self, added_outlinks, removed_outlinks): # handle added links updates = [] for rel, titles in added_outlinks.items(): for title in titles: page = WikiPage.get_by_title(title, follow_redirect=True) page.add_inlink(self.title, rel) updates.append(page) if updates: ndb.put_multi(updates) for page in updates: caching.del_rendered_body(page.title) caching.del_hashbangs(page.title) # handle removed links updates = [] deletes = [] for rel, titles in removed_outlinks.items(): for title in titles: page = WikiPage.get_by_title(title, follow_redirect=True) page.del_inlink(self.title, rel) if len(page.inlinks) == 0 and page.revision == 0 and page.key: deletes.append(page.key) else: updates.append(page) if updates: ndb.put_multi(updates) if deletes: ndb.delete_multi(deletes) for page in updates + deletes: caching.del_rendered_body(page.title) caching.del_hashbangs(page.title)
def _update_redirected_links(self, new_redir, old_redir): """Change in/out links of self and related pages according to new redirect metadata""" if old_redir == new_redir: return source = WikiPage.get_by_title(old_redir, follow_redirect=True) if old_redir else self if len(source.inlinks) == 0: return target = WikiPage.get_by_title(new_redir, follow_redirect=True) if new_redir else self updates = [source, target] for rel, titles in source.inlinks.items(): for t in titles: page = WikiPage.get_by_title(t) page.del_outlink(source.title, rel) page.add_outlink(target.title, rel) updates.append(page) target.add_inlinks(source.inlinks[rel], rel) del source.inlinks[rel] for p in updates: p.save() for page in updates: caching.del_rendered_body(page.title) caching.del_hashbangs(page.title)
def _update_redirected_links(self, new_redir, old_redir): """Change in/out links of self and related pages according to new redirect metadata""" if old_redir == new_redir: return source = WikiPage.get_by_title( old_redir, follow_redirect=True) if old_redir else self if len(source.inlinks) == 0: return target = WikiPage.get_by_title( new_redir, follow_redirect=True) if new_redir else self updates = [source, target] for rel, titles in source.inlinks.items(): for t in titles: page = WikiPage.get_by_title(t) page.del_outlink(source.title, rel) page.add_outlink(target.title, rel) updates.append(page) target.add_inlinks(source.inlinks[rel], rel) del source.inlinks[rel] ndb.put_multi(updates) for page in updates: caching.del_rendered_body(page.title) caching.del_hashbangs(page.title)
def _update_content_all(self, body, base_revision, comment, user, force_update, dont_create_rev, dont_defer): # do not update if the body is not changed if not force_update and self.body == body: return False # validate and prepare new contents new_data, new_md = self.validate_new_content(base_revision, body, user) new_body = self._merge_if_needed(base_revision, body) # get old data and metadata old_md = self.metadata.copy() old_data = self.data.copy() # delete caches caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) caching.del_metadata(self.title) caching.del_data(self.title) # update model and save self.body = new_body self.modifier = user self.description = PageOperationMixin.make_description(new_body) self.acl_read = new_md.get('read', '') self.acl_write = new_md.get('write', '') self.comment = comment self.itemtype_path = schema.get_itemtype_path(new_md['schema']) self._update_pub_state(new_md, old_md) if not dont_create_rev: self.revision += 1 if not force_update: self.updated_at = datetime.now() self.put() # create revision if not dont_create_rev: rev_key = self._rev_key() rev = WikiPageRevision(parent=rev_key, title=self.title, body=self.body, created_at=self.updated_at, revision=self.revision, comment=self.comment, modifier=self.modifier, acl_read=self.acl_read, acl_write=self.acl_write) rev.put() # update inlinks, outlinks and schema data index self.update_links_and_data(old_md.get('redirect'), new_md.get('redirect'), old_data, new_data, dont_defer) # delete config cache if self.title == '.config': caching.del_config() # delete title cache if it's a new page if self.revision == 1: caching.del_titles() return True
def _unpublish(self, save): if self.published_at is None: return caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) if self.newer_title: caching.del_rendered_body(self.newer_title) caching.del_hashbangs(self.newer_title) if self.older_title: caching.del_rendered_body(self.older_title) caching.del_hashbangs(self.older_title) older = WikiPage.get_by_title(self.older_title) newer = WikiPage.get_by_title(self.newer_title) if self.older_title is not None and self.newer_title is not None: newer.older_title = self.older_title older.newer_title = self.newer_title newer.save() older.save() elif self.older_title is not None: older.newer_title = None older.save() elif self.newer_title is not None: newer.older_title = None newer.save() self.published_at = None self.published_to = None self.older_title = None self.newer_title = None if save: self.save()
def _publish(self, title, save): if self.published_at is not None and self.published_to == title: return posts = WikiPage.get_posts_of(title, index=0, count=1) if len(posts) > 0: latest = posts[0] latest.newer_title = self.title latest.save() self.older_title = latest.title self.published_to = title self.published_at = datetime.utcnow().replace(tzinfo=utc) if save: self.save() caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) if self.newer_title: caching.del_rendered_body(self.newer_title) caching.del_hashbangs(self.newer_title) if self.older_title: caching.del_rendered_body(self.older_title) caching.del_hashbangs(self.older_title)
def _unpublish(self, save): if self.published_at is None: return caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) if self.newer_title: caching.del_rendered_body(self.newer_title) caching.del_hashbangs(self.newer_title) if self.older_title: caching.del_rendered_body(self.older_title) caching.del_hashbangs(self.older_title) older = WikiPage.get_by_title(self.older_title) newer = WikiPage.get_by_title(self.newer_title) if self.older_title is not None and self.newer_title is not None: newer.older_title = self.older_title older.newer_title = self.newer_title newer.put() older.put() elif self.older_title is not None: older.newer_title = None older.put() elif self.newer_title is not None: newer.older_title = None newer.put() self.published_at = None self.published_to = None self.older_title = None self.newer_title = None if save: self.put()
def _publish(self, title, save): if self.published_at is not None and self.published_to == title: return posts = WikiPage.get_posts_of(title, index=0, count=1) if len(posts) > 0: latest = posts[0] latest.newer_title = self.title latest.put() self.older_title = latest.title self.published_to = title self.published_at = datetime.now() if save: self.put() caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) if self.newer_title: caching.del_rendered_body(self.newer_title) caching.del_hashbangs(self.newer_title) if self.older_title: caching.del_rendered_body(self.older_title) caching.del_hashbangs(self.older_title)
def _publish(self, title, save): if self.published_at is not None and self.published_to == title: return posts = WikiPage.get_posts_of(title, 1) if len(posts) > 0: latest = posts[0] latest.newer_title = self.title latest.put() self.older_title = latest.title self.published_to = title self.published_at = datetime.now() if save: self.put() caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) if self.newer_title: caching.del_rendered_body(self.newer_title) caching.del_hashbangs(self.newer_title) if self.older_title: caching.del_rendered_body(self.older_title) caching.del_hashbangs(self.older_title)
def _update_inlinks(self, added_outlinks, removed_outlinks): # handle added links updates = [] for rel, titles in added_outlinks.items(): for title in titles: page = WikiPage.get_by_title(title, follow_redirect=True) page.add_inlink(self.title, rel) page.save() updates.append(page) if updates: for page in updates: caching.del_rendered_body(page.title) caching.del_hashbangs(page.title) # handle removed links updates = [] deletes = [] for rel, titles in removed_outlinks.items(): for title in titles: page = WikiPage.get_by_title(title, follow_redirect=True) page.del_inlink(self.title, rel) if len(page.inlinks) == 0 and page.revision == 0 and page.id: deletes.append(page) else: updates.append(page) for page in updates + deletes: caching.del_rendered_body(page.title) caching.del_hashbangs(page.title) for page in updates: page.save() for page in deletes: page.set_cur_user(self.cur_user) page.delete(self.cur_user)
def _update_content_all(self, body, base_revision, comment, user, force_update, dont_create_rev): # do not update if the body is not changed if not force_update and self.body == body: return False now = datetime.utcnow().replace(tzinfo=utc) # validate and prepare new contents new_data, new_md = self.validate_new_content(base_revision, body, user) new_body = self._merge_if_needed(base_revision, body) # get old data and metadata try: old_md = self.metadata.copy() except ValueError: old_md = {} try: old_data = self.data.copy() except ValueError: old_data = {} # delete caches caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) caching.del_metadata(self.title) caching.del_data(self.title) # update model and save self.body = new_body if user and not user.is_anonymous(): self.modifier = user self.description = PageOperationMixin.make_description(new_body) self.acl_read = new_md.get('read', '') self.acl_write = new_md.get('write', '') self.comment = comment self.itemtype_path = schema.get_itemtype_path(new_md['schema']) self._update_pub_state(new_md, old_md) if not dont_create_rev: self.revision += 1 if not force_update: self.updated_at = now self.save() # create revision if not dont_create_rev: rev = WikiPageRevision(page=self, title=self.title, body=self.body, created_at=self.updated_at, revision=self.revision, comment=self.comment, modifier=self.modifier) rev.save() self.update_links_and_data(old_md.get('redirect'), new_md.get('redirect'), old_data, new_data) # delete config cache if self.title == '.config': caching.del_config() # delete title cache if it's a new page if self.revision == 1: caching.del_titles() return True
def _update_content_all(self, body, base_revision, comment, user, force_update, dont_create_rev, dont_defer): # do not update if the body is not changed if not force_update and self.body == body: return False now = datetime.now() # validate and prepare new contents new_data, new_md = self.validate_new_content(base_revision, body, user) new_body = self._merge_if_needed(base_revision, body) # get old data and metadata try: old_md = self.metadata.copy() except ValueError: old_md = {} try: old_data = self.data.copy() except ValueError: old_data = {} # delete caches caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) caching.del_metadata(self.title) caching.del_data(self.title) # update model and save self.body = new_body self.modifier = user self.description = PageOperationMixin.make_description(new_body) self.acl_read = new_md.get('read', '') self.acl_write = new_md.get('write', '') self.comment = comment self.itemtype_path = schema.get_itemtype_path(new_md['schema']) self._update_pub_state(new_md, old_md) if not dont_create_rev: self.revision += 1 if not force_update: self.updated_at = now self.put() # create revision if not dont_create_rev: rev_key = self._rev_key() rev = WikiPageRevision(parent=rev_key, title=self.title, body=self.body, created_at=self.updated_at, revision=self.revision, comment=self.comment, modifier=self.modifier, acl_read=self.acl_read, acl_write=self.acl_write) rev.put() # update inlinks, outlinks and schema data index self.update_links_and_data(old_md.get('redirect'), new_md.get('redirect'), old_data, new_data, dont_defer) # delete config cache if self.title == '.config': caching.del_config() # delete title cache if it's a new page if self.revision == 1: caching.del_titles() return True