def validate_new_content(self, base_revision, new_body, user): # check metadata new_md = PageOperationMixin.parse_metadata(new_body) ## prevent self-revoke acl_r = new_md.get('read', '') acl_r = acl_r.split(',') if acl_r else [] acl_w = new_md.get('write', '') acl_w = acl_w.split(',') if acl_w else [] if not self.can_read(user, acl_r=acl_r, acl_w=acl_w): raise ValueError('Cannot restrict your permission') if not self.can_write(user, acl_r=acl_r, acl_w=acl_w): raise ValueError('Cannot restrict your permission') ## prevent circular-redirection try: WikiPage._follow_redirect(self, new_md.get(u'redirect')) except ValueError as e: raise e # check data new_data = PageOperationMixin.parse_data(self.title, new_body, new_md['schema']) if any(type(value) == schema.InvalidProperty for value in new_data.values()): raise ValueError('Invalid schema data') # check revision if self.revision < base_revision: raise ValueError('Invalid revision number: %d' % base_revision) # check headings if not TocGenerator(md.convert(new_body)).validate(): raise ValueError("Duplicate paths not allowed") return new_data, new_md
def get_preview_instance(self, preview_body): page = PageOperationMixin() page.body = preview_body page.title = self.title page.revision = self.revision page.inlinks = self.inlinks page.outlinks = self.outlinks page.related_links = self.related_links page.older_title = self.older_title page.newer_title = self.newer_title return page
def get_preview_instance(self, preview_body): page = PageOperationMixin() page.body = preview_body page.title = self.title page.revision = self.revision page.inlinks = self.inlinks page.outlinks = self.outlinks page.related_links = self.related_links page.older_title = self.older_title page.newer_title = self.newer_title page.updated_at = self.updated_at return page
def validate_new_content(self, base_revision, new_body, user): # check metadata new_md = PageOperationMixin.parse_metadata(new_body) ## prevent self-revoke acl_r = new_md.get('read', '') acl_r = acl_r.split(',') if acl_r else [] acl_w = new_md.get('write', '') acl_w = acl_w.split(',') if acl_w else [] if not self.can_read(user, acl_r=acl_r, acl_w=acl_w): raise ValueError('Cannot restrict your permission') if not self.can_write(user, acl_r=acl_r, acl_w=acl_w): raise ValueError('Cannot restrict your permission') ## prevent circular-redirection try: WikiPage._follow_redirect(self, new_md.get(u'redirect')) except ValueError as e: raise e # check data new_data = PageOperationMixin.parse_data(self.title, new_body, new_md['schema']) if any( type(value) == schema.InvalidProperty for value in new_data.values()): invalid_keys = [ key for key, value in new_data.iteritems() if type(value) == schema.InvalidProperty ] raise ValueError('Invalid schema data: %s' % ', '.join(invalid_keys)) # check revision if self.revision < base_revision: raise ValueError('Invalid revision number: %d' % base_revision) # check headings invalid_reason = TocGenerator(md.convert(new_body)).is_invalid() if invalid_reason: raise ValueError(invalid_reason) return new_data, new_md
def test_additional_sections(self): data = PageOperationMixin.parse_sections( u'Hello\n\nsection1::---\n\nHello\n\nthere\n\nsection2::---\n\nGood\n\nbye\n' ) self.assertEqual({'articleBody', 'section1', 'section2'}, set(data.keys())) self.assertEqual(u'Hello', data['articleBody']) self.assertEqual(u'Hello\n\nthere', data['section1']) self.assertEqual(u'Good\n\nbye', data['section2'])
def test_normal(self): expected = { u'hello': u'a b c', u'x': None, u'z': u'what?', } expected.update(self.default_md) actual = PageOperationMixin.parse_metadata(u'.hello a b c\n.x\n.z what?\nblahblah') self.assertEqual(expected, actual)
def test_normal(self): expected = { u'hello': u'a b c', u'x': None, u'z': u'what?', } expected.update(self.default_md) actual = PageOperationMixin.parse_metadata( u'.hello a b c\n.x\n.z what?\nblahblah') self.assertEqual(expected, actual)
def _update_content_all(self, body, base_revision, comment, user, force_update, dont_create_rev, dont_defer): # do not update if the body is not changed if not force_update and self.body == body: return False # validate and prepare new contents new_data, new_md = self.validate_new_content(base_revision, body, user) new_body = self._merge_if_needed(base_revision, body) # get old data and metadata old_md = self.metadata.copy() old_data = self.data.copy() # delete caches caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) caching.del_metadata(self.title) caching.del_data(self.title) # update model and save self.body = new_body self.modifier = user self.description = PageOperationMixin.make_description(new_body) self.acl_read = new_md.get('read', '') self.acl_write = new_md.get('write', '') self.comment = comment self.itemtype_path = schema.get_itemtype_path(new_md['schema']) self._update_pub_state(new_md, old_md) if not dont_create_rev: self.revision += 1 if not force_update: self.updated_at = datetime.now() self.put() # create revision if not dont_create_rev: rev_key = self._rev_key() rev = WikiPageRevision(parent=rev_key, title=self.title, body=self.body, created_at=self.updated_at, revision=self.revision, comment=self.comment, modifier=self.modifier, acl_read=self.acl_read, acl_write=self.acl_write) rev.put() # update inlinks, outlinks and schema data index self.update_links_and_data(old_md.get('redirect'), new_md.get('redirect'), old_data, new_data, dont_defer) # delete config cache if self.title == '.config': caching.del_config() # delete title cache if it's a new page if self.revision == 1: caching.del_titles() return True
def get_config(cls): result = caching.get_config() if result is None: result = main.DEFAULT_CONFIG try: config_page = cls.get_by_title('.config') user_config = yaml.load(PageOperationMixin.remove_metadata(config_page.body)) except: user_config = None user_config = user_config or {} def merge_dict(target_dict, source_dict): for (key, value) in source_dict.iteritems(): if type(value) != dict: target_dict[key] = value else: merge_dict(target_dict.setdefault(key, {}), value) merge_dict(result, user_config) caching.set_config(result) return result
def get_config(cls): result = caching.get_config() if result is None: result = main.DEFAULT_CONFIG try: config_page = cls.get_by_title('.config') user_config = yaml.load( PageOperationMixin.remove_metadata(config_page.body)) except: user_config = None user_config = user_config or {} def merge_dict(target_dict, source_dict): for (key, value) in source_dict.iteritems(): if type(value) != dict: target_dict[key] = value else: merge_dict(target_dict.setdefault(key, {}), value) merge_dict(result, user_config) caching.set_config(result) return result
def test_specifying_default_section(self): data = PageOperationMixin.parse_sections(u'Hello', u'longText') self.assertEqual({'longText'}, set(data.keys())) self.assertEqual(u'Hello', data['longText'])
def test_should_ignore_yaml_schema_block(self): self.assertEqual(u'Hello', PageOperationMixin.make_description(u'.schema Book\n #!yaml/schema\n author: A\n\nHello', 20))
def test_cut_off(self): self.assertEqual(u'Hi Hello World Se...', PageOperationMixin.make_description(u'Hi Hello World Sentences.', 20))
def test_try_newline(self): self.assertEqual(u'Hello', PageOperationMixin.make_description(u'Hello\nWorld', 20))
def test_empty_string(self): expected = {} expected.update(self.default_md) actual = PageOperationMixin.parse_metadata(u'') self.assertEqual(expected, actual)
def test_no_schema(self): self.assertEqual({}, PageOperationMixin.parse_schema_yaml(u'Hello'))
def test_re_match(self): body = u'''\t#!yaml/schema\n url: "http://anotherfam.kr/"\n\n\n[[\uc81c\uc791\ub450\ub808]]\ub97c ...\n''' data = PageOperationMixin.parse_schema_yaml(body) self.assertEqual(data['url'], 'http://anotherfam.kr/')
def test_try_newline(self): self.assertEqual( u'Hello', PageOperationMixin.make_description(u'Hello\nWorld', 20))
def test_no_metadata(self): expected = {} expected.update(self.default_md) actual = PageOperationMixin.parse_metadata(u'Hello\nThere') self.assertEqual(expected, actual)
def test_multiple_authors(self): data = PageOperationMixin.parse_data( u'Hello', u'[[author::AK]] and [[author::TK]]', u'Book') self.assertEqual([u'AK', u'TK'], [v.pvalue for v in data['author']])
def absolute_url(self): return u'/%s?rev=%d' % (PageOperationMixin.title_to_path(self.title), int(self.revision))
def test_no_data(self): data = PageOperationMixin.parse_data(u'Hello', u'Hello') self.assertEqual(['articleBody', 'name', 'schema'], data.keys()) self.assertEqual(u'Hello', data['name'].pvalue) self.assertEqual(u'Thing/CreativeWork/Article/', data['schema'].pvalue) self.assertEqual(u'Hello', data['articleBody'].pvalue)
def test_multiple_authors(self): data = PageOperationMixin.parse_data(u'Hello', u'[[author::AK]] and [[author::TK]]', u'Book') self.assertEqual([u'AK', u'TK'], [v.pvalue for v in data['author']])
def test_try_period(self): self.assertEqual( u'Hi. Hello. World.', PageOperationMixin.make_description( u'Hi. Hello. World. Sentences.', 20))
def assertRenderedText(self, markdown, html): self.assertEqual(html, PageOperationMixin.render_body(u'Hello', markdown))
def test_cut_off(self): self.assertEqual( u'Hi Hello World Se...', PageOperationMixin.make_description(u'Hi Hello World Sentences.', 20))
def test_should_ignore_metadata(self): self.assertEqual( u'Hello', PageOperationMixin.make_description(u'.pub\n\nHello', 20))
def test_should_ignore_yaml_schema_block(self): self.assertEqual( u'Hello', PageOperationMixin.make_description( u'.schema Book\n #!yaml/schema\n author: A\n\nHello', 20))
def test_try_period(self): self.assertEqual(u'Hi. Hello. World.', PageOperationMixin.make_description(u'Hi. Hello. World. Sentences.', 20))
def test_author_and_isbn(self): data = PageOperationMixin.parse_data( u'Hello', u'[[author::AK]]\n{{isbn::1234567890}}', u'Book') self.assertEqual(u'AK', data['author'].pvalue) self.assertEqual(u'1234567890', data['isbn'].pvalue)
def test_should_ignore_metadata(self): self.assertEqual(u'Hello', PageOperationMixin.make_description(u'.pub\n\nHello', 20))
def test_tab_and_space_mixed(self): body = u'\t#!yaml/schema\n alternateName: hi\n\turl: http://x.com\n name: "Hello"\n' data = PageOperationMixin.parse_schema_yaml(body) self.assertEqual(data['name'], u'Hello') self.assertEqual(data['alternateName'], u'hi') self.assertEqual(data['url'], u'http://x.com')
def test_default_section(self): data = PageOperationMixin.parse_sections(u'Hello') self.assertEqual({'articleBody'}, set(data.keys())) self.assertEqual(u'Hello', data['articleBody'])
def test_additional_sections(self): data = PageOperationMixin.parse_sections(u'Hello\n\nsection1::---\n\nHello\n\nthere\n\nsection2::---\n\nGood\n\nbye\n') self.assertEqual({'articleBody', 'section1', 'section2'}, set(data.keys())) self.assertEqual(u'Hello', data['articleBody']) self.assertEqual(u'Hello\n\nthere', data['section1']) self.assertEqual(u'Good\n\nbye', data['section2'])
def test_author_and_isbn(self): data = PageOperationMixin.parse_data(u'Hello', u'[[author::AK]]\n{{isbn::1234567890}}', u'Book') self.assertEqual(u'AK', data['author'].pvalue) self.assertEqual(u'1234567890', data['isbn'].pvalue)
def absolute_url(self): return u'/%s?rev=%d' % (PageOperationMixin.title_to_path( self.title), int(self.revision))
def _update_content_all(self, body, base_revision, comment, user, force_update, dont_create_rev, dont_defer): # do not update if the body is not changed if not force_update and self.body == body: return False now = datetime.now() # validate and prepare new contents new_data, new_md = self.validate_new_content(base_revision, body, user) new_body = self._merge_if_needed(base_revision, body) # get old data and metadata try: old_md = self.metadata.copy() except ValueError: old_md = {} try: old_data = self.data.copy() except ValueError: old_data = {} # delete caches caching.del_rendered_body(self.title) caching.del_hashbangs(self.title) caching.del_metadata(self.title) caching.del_data(self.title) # update model and save self.body = new_body self.modifier = user self.description = PageOperationMixin.make_description(new_body) self.acl_read = new_md.get('read', '') self.acl_write = new_md.get('write', '') self.comment = comment self.itemtype_path = schema.get_itemtype_path(new_md['schema']) self._update_pub_state(new_md, old_md) if not dont_create_rev: self.revision += 1 if not force_update: self.updated_at = now self.put() # create revision if not dont_create_rev: rev_key = self._rev_key() rev = WikiPageRevision(parent=rev_key, title=self.title, body=self.body, created_at=self.updated_at, revision=self.revision, comment=self.comment, modifier=self.modifier, acl_read=self.acl_read, acl_write=self.acl_write) rev.put() # update inlinks, outlinks and schema data index self.update_links_and_data(old_md.get('redirect'), new_md.get('redirect'), old_data, new_data, dont_defer) # delete config cache if self.title == '.config': caching.del_config() # delete title cache if it's a new page if self.revision == 1: caching.del_titles() return True