def test_userDocumentLastChanged(): """ Note that last_changed is just a list of metadata keys; so must also create metadata.... """ user_slug = random_slug('test-user-') key = data.userDocumentLastChanged_key(user_slug) assert user_slug in key assert data.userDocumentLastChanged_list(user_slug) == [] doc_slug = random_slug('test-document-') metadata = {'doc_slug': doc_slug, 'word_count': '3000'} # <-- str for num data.userDocumentLastChanged_set(user_slug, doc_slug, doc_slug) data.userDocumentMetadata_set(user_slug, doc_slug, metadata) assert data.userDocumentLastChanged_list(user_slug) == [metadata] # Check no duplication data.userDocumentLastChanged_set(user_slug, doc_slug, doc_slug) assert data.userDocumentLastChanged_list(user_slug) == [metadata] data.userDocumentLastChanged_delete(user_slug, doc_slug) assert data.userDocumentLastChanged_list(user_slug) == [] # Clean up: data.userDocumentMetadata_delete(user_slug, doc_slug) assert not data.userDocumentMetadata_exists(user_slug, doc_slug)
def test_userDocuments(): """ UserDocuments -- Redis Hashes of {part_slug: wiki_text}. Generally speaking, userDocument_ methods manage userSet data. """ user_slug = random_slug('test-user-') doc_slug = random_slug('test-document-') key = data.userDocument_key(user_slug, doc_slug) assert user_slug in key assert doc_slug in key assert not data.userDocument_exists(user_slug, doc_slug) data.userDocument_set(user_slug, doc_slug, minimal_document) assert data.userDocument_exists(user_slug, doc_slug) assert data.userDocument_get(user_slug, doc_slug) == minimal_document assert data.userDocument_hash(user_slug) == {doc_slug: minimal_document} new_slug = data.userDocument_unique_slug(user_slug, doc_slug) assert new_slug != doc_slug data.userDocument_delete(user_slug, doc_slug) assert not data.userDocument_exists(user_slug, doc_slug) assert data.userDocument_get(user_slug, doc_slug) is None
def decorate(self, pattern, part_slug): """ When we process a new demo block it needs to be assigned a unique id_prefix as its config:document name. Micro chance of a collision; maybe replace with a singleton to track random IDs in use. """ self.settings.set('config:document', random_slug('demo')) wiki = Wiki(self.settings) options = match_demo_options(pattern) fragment = ('index' not in options) part_slug = 'index' if 'index' in options else random_slug('demo-') lines = pattern.splitlines() source = "\n".join(lines[1:-1]) output = wiki.process(None, None, {part_slug: source}, fragment) env = Environment(autoescape=True) if 'wide' in options: tpl = env.from_string( trim(""" <div class="wiki-demo-wide space"> <table> <tbody> <tr> <td> <pre>{{ source|safe }}</pre> </td> </tr> <tr> <td class="wiki-demo-output"> {{ output|safe }} </td> </tr> </tbody> </table> </div> """)) else: tpl = env.from_string( trim(""" <div class="wiki-demo space"> <table> <tbody> <tr> <td width="50%"> <pre>{{ source|safe }}</pre> </td> <td> </td> <td width="48%" class="wiki-demo-output"> {{ output|safe }} </td> </tr> </tbody> </table> </div> """)) return tpl.render(source=escape(source), output=output)
def test_import_and_export_document(): """ Generates an archive file and then turns it back into the original parts. """ doc1 = Document(data) user_slug = random_slug('test-user-') doc_slug = random_slug('test-doc-') doc1.set_parts(user_slug, doc_slug, minimal_document) file_name, file_text = doc1.export_txt_file() assert user_slug in file_name assert doc_slug in file_name doc2 = Document(data) doc2.import_txt_file(user_slug, doc_slug, file_text) assert doc1 == doc2
def html(self, renderer): html_rows = [] for row in [row.strip() for row in self.text.split("===")]: html_cells = [] for cell in [cell.strip() for cell in row.split("---")]: blocks = BlockList(clean_text(cell)) slug = random_slug('grid-') html = blocks.html([0], slug, renderer.settings, fragment=True) html_cells.append(html) html_rows.append(html_cells) env = Environment(autoescape=True) tpl = env.from_string( trim(""" <table class="table table-condensed"> <tbody> {% for html_row in html_rows %} <tr> {% for html_cell in html_row %} <td>{{ html_cell|safe }}</td> {% endfor %} </tr> {% endfor %} </tbody> </table> """)) return tpl.render(html_rows=html_rows)
def test_userDocumentCache(): user_slug = random_slug('test-user-') doc_slug = random_slug('test-document-') key = data.userDocumentCache_key(user_slug, doc_slug) assert user_slug in key assert doc_slug in key assert not data.userDocumentCache_exists(user_slug, doc_slug) html = "<article>...</article>" data.userDocumentCache_set(user_slug, doc_slug, html) assert data.userDocumentCache_exists(user_slug, doc_slug) assert data.userDocumentCache_get(user_slug, doc_slug) == html data.userDocumentCache_delete(user_slug, doc_slug) assert not data.userDocumentCache_exists(user_slug, doc_slug) assert data.userDocumentCache_get(user_slug, doc_slug) is None
def __init__(self, settings=None): "Just a thin wrapper for Placeholders; parse options in replace()." self.placeholders = Placeholders(self.regex, 'demo') if settings: self.settings = settings.copy() else: self.settings = Settings() self.settings.set('config:user', '_') self.settings.set('config:document', random_slug('demo'))
def test_utility_functions(): """ Create a hash, find its key, delete it. """ prefix = 'test-utilities-' test_slug = random_slug(prefix) data.redis.hmset(test_slug, minimal_document) assert data.redis.exists(test_slug) assert data.get_hashes([test_slug]) == [minimal_document] assert data.keys_by_prefix(prefix) == [test_slug] data.del_keys([test_slug]) assert not data.redis.exists(test_slug)
def test_userDocumentSet(): """ UserDocumentSet -- a set of doc_slugs for each user. v.0.1.0 -- SINGLE_USER, so no pagination yet. """ user_slug = random_slug('test-user-') key = data.userDocumentSet_key(user_slug) assert user_slug in key doc_slug = random_slug('test-document-') assert not data.userDocumentSet_exists(user_slug, doc_slug) data.userDocumentSet_set(user_slug, doc_slug) assert data.userDocumentSet_exists(user_slug, doc_slug) assert data.userDocumentSet_count(user_slug) == 1 assert data.userDocumentSet_list(user_slug) == [doc_slug] data.userDocumentSet_delete(user_slug, doc_slug) assert not data.userDocumentSet_exists(user_slug, doc_slug) assert data.userDocumentSet_count(user_slug) == 0 assert data.userDocumentSet_list(user_slug) == []
def test_userDocumentMetadata(): """ UserDocumentMetadata -- Redis Hashes of {key: val}. Generally speaking, userDocument_ methods manage userSet data. """ user_slug = random_slug('test-user-') doc_slug = random_slug('test-document-') key = data.userDocumentMetadata_key(user_slug, doc_slug) assert user_slug in key assert doc_slug in key assert not data.userDocumentMetadata_exists(user_slug, doc_slug) metadata = {'doc_slug': doc_slug, 'word_count': '3000'} # <-- str for num data.userDocumentMetadata_set(user_slug, doc_slug, metadata) assert data.userDocumentMetadata_get(user_slug, doc_slug) == metadata assert data.userDocumentMetadata_exists(user_slug, doc_slug) data.userDocumentMetadata_delete(user_slug, doc_slug) assert data.userDocumentMetadata_get(user_slug, doc_slug) is None assert not data.userDocumentMetadata_exists(user_slug, doc_slug)
def __init__(self, footnotes, prefix=None): """ Create bibliography entries, and setup placeholder identifiers. """ assert isinstance(footnotes, Footnotes) self.footnotes = footnotes # If no prefix, pick a random one. if prefix is not None: self.prefix = prefix else: self.prefix = random_slug('random_') regex = r'\^\[[^\]]+\][.,!?;:·]?' # <-- see decorator self.placeholders = Placeholders(regex, 'link')
def userDocument_unique_slug(self, user_slug: str, base_slug: str) -> str: """ Given a base_slug, try randomised suffixes until one is unique. Collisions should be rare, but there is a 1000-tries sanity check. """ n = 0 test_slug = base_slug while n < 1000: if self.userDocument_exists(user_slug, test_slug): test_slug = random_slug(base_slug + "-") else: return test_slug n += 1 raise ValueError('A unique doc_slug could not be created.')
def test_user_archive(): """ Generate an archive file; check it exists. Automatically remove temporary directory afterward. """ with tempfile.TemporaryDirectory() as dir_path: # import pdb; pdb.set_trace() archive_data = { 'test': minimal_document, 'test-2': minimal_document, } write_archive_dir(dir_path, minimal_document) user_slug = random_slug('test-user-') zip_name = make_zip_name(user_slug) zip_path = compress_archive_dir(dir_path, zip_name) assert os.path.exists(zip_path)
def test_userSet(): """ User Set -- A set of user_slugs: Can be used to construct keys for user dicts and other objects. Add a user to the user set; delete. v.0.1.0 -- SINGLE_USER, so no pagination yet. """ test_slug = random_slug('test-user-') count = data.userSet_count() data.userSet_set(test_slug) data.userSet_set(test_slug) # <-- no effect assert data.userSet_exists(test_slug) assert test_slug in data.userSet_list() assert count + 1 == data.userSet_count() data.userSet_delete(test_slug) assert not data.userSet_exists(test_slug) assert test_slug not in data.userSet_list() assert count == data.userSet_count()
def __init__(self, settings=None): """ Settings hold all necessary context information. """ assert isinstance(settings, Settings) or settings is None self.settings = settings self.html = Html(self.settings) self.id_prefix = self.settings.get( 'config:document', random_slug('wiki_') # <-- else ) self.outline = None self.cross_references = None self.footnotes = None self.links = None self.index = None self.tags = None self.bibliography = None self.citations = None
def test_users(): """ Users -- Redis Hashes of user names and whether they're admins. v.0.1.0 -- SINGLE_USER, so not super-important yet. """ user_slug = random_slug('test-user-') key = data.user_key(user_slug) assert user_slug in key test_user = {'slug': 'my-name', 'is_admin': 'NO', 'password': '******'} assert not data.user_exists(user_slug) data.user_set(user_slug, test_user) assert data.user_exists(user_slug) assert data.user_get(user_slug) == test_user assert data.user_get('nonexistent-user') is None assert data.user_hash()[user_slug] == test_user data.user_delete(user_slug) assert not data.user_exists(user_slug) assert data.user_get(user_slug) is None
def test_repr_save_load_delete(): """ Confirms data in data out. Builds upon data.py. """ data.redis.flushdb() user_slug = random_slug('test-user-') doc_slug = random_slug('test-doc-') doc = Document(data) doc.set_host('http://example.org') doc.set_parts(user_slug, doc_slug, minimal_document) # Create new_doc_slug = doc.save(pregenerate=True, update_doc_slug=True) assert user_slug in str(doc) assert new_doc_slug in str(doc) assert "(3 parts)" in str(doc) assert new_doc_slug == 'example-document' assert data.userSet_exists(user_slug) assert data.userDocument_exists(user_slug, new_doc_slug) latest_slugs = [ _['slug'] for _ in data.userDocumentLastChanged_list(user_slug) ] assert new_doc_slug in latest_slugs assert data.userDocumentMetadata_exists(user_slug, new_doc_slug) assert data.userDocumentCache_exists(user_slug, new_doc_slug) assert data.userDocumentSet_exists(user_slug, new_doc_slug) # Rename doc.set_index( trim(""" New Example Document Text Goes Here! """)) new_doc_slug = doc.save(pregenerate=True, update_doc_slug=True) assert new_doc_slug == "new-example-document" assert not data.userDocumentSet_exists(user_slug, doc_slug) assert not data.userDocument_exists(user_slug, doc_slug) assert not data.userDocumentMetadata_exists(user_slug, doc_slug) assert not data.userDocumentCache_exists(user_slug, doc_slug) latest_metadata = data.userDocumentLastChanged_list(user_slug) assert not any([_.get('slug') == doc_slug for _ in latest_metadata]) assert any([_.get('slug') == new_doc_slug for _ in latest_metadata]) assert data.userDocumentSet_exists(user_slug, new_doc_slug) assert data.userDocument_exists(user_slug, new_doc_slug) assert data.userDocumentMetadata_exists(user_slug, new_doc_slug) assert data.userDocumentCache_exists(user_slug, new_doc_slug) assert data.userDocumentSet_exists(user_slug, new_doc_slug) doc2 = Document(data) doc2.load(user_slug, new_doc_slug) assert doc.user_slug == doc2.user_slug assert doc.doc_slug == doc2.doc_slug assert doc.parts == doc2.parts # Delete doc.delete() assert not data.userDocument_exists(user_slug, new_doc_slug) assert not data.userDocumentSet_exists(user_slug, new_doc_slug) assert not data.userDocumentMetadata_exists(user_slug, new_doc_slug) latest_metadata = data.userDocumentLastChanged_list(user_slug) assert not any([_.get('slug') == new_doc_slug for _ in latest_metadata]) assert not data.userDocumentCache_exists(user_slug, new_doc_slug)