def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Build some new metadata for the composite document. metadata = [x.metadata.copy() for x in cnxepub.flatten_to_documents(binder)][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['created'] = None metadata['revised'] = None metadata['title'] = "Made up of other things" # Add some fake collation objects to the book. content = '<p>composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) composite_section = cnxepub.TranslucentBinder( nodes=[composite_doc], metadata={'title': "Other things"}) collated_doc_content = '<p>collated</p>' def collate(binder_model, ruleset=None, includes=None): binder_model[0][0].content = collated_doc_content binder_model.append(composite_section) return binder_model with mock.patch('cnxpublishing.collation.collate_models') as mock_collate: mock_collate.side_effect = collate errors = self.target(binder, publisher, msg) # Ensure the output of the errors. self.assertEqual(errors, []) # Ensure the original tree is intact. cursor.execute("SELECT tree_to_json(%s, %s, FALSE)::json;", (binder.id, binder.metadata['version'],)) tree = cursor.fetchone()[0] self.assertNotIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(tree)) # Ensure the tree as been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", (binder.id, binder.metadata['version'],)) collated_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(collated_tree)) # Ensure the changes to a document content were persisted. content_to_check = [ (binder[0][0], collated_doc_content,), (composite_doc, content,), ] for doc, content in content_to_check: self.assertIn(content, self._get_collated_file(cursor, doc, binder))
def get_page_ident_hash(self, page_uuid, page_version, book_uuid, book_version, latest=None): """Return the uuid of the page and full ident_hash of the page. which may or may not include the book uuid depending on whether the page is within the book. """ from cnxepub import flatten_tree_to_ident_hashes # XXX from ..utils import join_ident_hash # XXX plan = self.plpy.prepare('SELECT tree_to_json($1, $2, FALSE)::json', ('uuid', 'text')) tree = self.plpy.execute(plan, (book_uuid, book_version))[0]['tree_to_json'] pages = list(flatten_tree_to_ident_hashes(tree)) book_ident_hash = join_ident_hash(book_uuid, book_version) page_ident_hash = join_ident_hash(page_uuid, page_version) for p_ident_hash in pages: p_id, p_version = split_ident_hash(p_ident_hash) if (p_id == page_uuid and (page_version is None or page_version == p_version)): return book_uuid, '{}:{}'.format( latest and book_uuid or book_ident_hash, page_ident_hash) # The page isn't in the given book, so only return the page. return page_uuid, page_ident_hash
def test_get(self): ident_hash = '[email protected]' tree = self.target(ident_hash) expected_flattened_tree = [ u'[email protected]', u'209deb1f-1a46-4369-9e0d-18674cf58a3e@7', u'f3c9ab70-a916-4d8c-9256-42953287b4e9@3', u'd395b566-5fe3-4428-bcb2-19016e3aa3ce@4', u'c8bdbabc-62b1-4a5f-b291-982ab25756d7@6', u'5152cea8-829a-4aaf-bcc5-c58a416ecb66@7', u'5838b105-41cd-4c3d-a957-3ac004a48af3@5', u'24a2ed13-22a6-47d6-97a3-c8aa8d54ac6d@2', u'ea271306-f7f2-46ac-b2ec-1d80ff186a59@5', u'26346a42-84b9-48ad-9f6a-62303c16ad41@6', u'56f1c5c1-4014-450d-a477-2121e276beca@8', u'f6024d8a-1868-44c7-ab65-45419ef54881@3', u'7250386b-14a7-41a2-b8bf-9e9ab872f0dc@2', u'c0a76659-c311-405f-9a99-15c71af39325@5', u'ae3e18de-638d-4738-b804-dc69cd4db3a3@5', ] self.assertEqual( [x for x in cnxepub.flatten_tree_to_ident_hashes(tree)], expected_flattened_tree)
def test(self, cursor): recipes = use_cases.setup_RECIPES_in_archive(self, cursor) binder = use_cases.setup_EXERCISES_BOOK_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Call bake but store the result of collate_models for later inspection collate_results = [] from cnxpublishing.bake import collate_models def cnxepub_collate(binder, ruleset=None, includes=None): composite_doc = collate_models(binder, ruleset=ruleset, includes=includes) collate_results.append(composite_doc) return composite_doc with mock.patch('cnxpublishing.bake.collate_models') as mock_collate: mock_collate.side_effect = cnxepub_collate self.target(binder, recipes[1], publisher, msg, cursor=cursor) composite_doc = collate_results[0] # Ensure the tree has been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", (binder.id, binder.metadata['version'],)) baked_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(baked_tree)) # Ensure the exercises were pulled into the content. content = composite_doc[0].content self.assertIn('<div>What is kinematics?</div>', content) self.assertIn('No, the gravitational force is a field force and does not', content) self.assertIn('<div>What kind of physical quantity is force?</div>', content) self.assertIn('<li>Both internal and external forces</li>', content)
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Build some new metadata for the composite document. metadata = [ x.metadata.copy() for x in cnxepub.flatten_to_documents(binder) ][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['title'] = "Made up of other things" publisher = [p['id'] for p in metadata['publishers']][0] message = "Composite addition" # Add some fake collation objects to the book. content = '<p class="para">composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) from cnxpublishing.publish import publish_composite_model ident_hash = publish_composite_model(cursor, composite_doc, binder, publisher, message) # Shim the composite document into the binder. binder.append(composite_doc) tree = cnxepub.model_to_tree(binder) self.target(cursor, tree) cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", ( binder.id, binder.metadata['version'], )) collated_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(collated_tree))
def get_page_ident_hash(self, page_uuid, page_version, book_uuid, book_version, latest=None): """Return the uuid of the page and full ident_hash of the page. which may or may not include the book uuid depending on whether the page is within the book. """ from cnxepub import flatten_tree_to_ident_hashes # XXX from ..utils import join_ident_hash # XXX plan = self.plpy.prepare( 'SELECT tree_to_json($1, $2, FALSE)::json', ('uuid', 'text')) tree = self.plpy.execute( plan, (book_uuid, book_version))[0]['tree_to_json'] pages = list(flatten_tree_to_ident_hashes(tree)) book_ident_hash = join_ident_hash(book_uuid, book_version) page_ident_hash = join_ident_hash(page_uuid, page_version) for p_ident_hash in pages: p_id, p_version = split_ident_hash(p_ident_hash) if (p_id == page_uuid and (page_version is None or page_version == p_version)): return book_uuid, '{}:{}'.format( latest and book_uuid or book_ident_hash, page_ident_hash) # The page isn't in the given book, so only return the page. return page_uuid, page_ident_hash
def test_get(self): ident_hash = '[email protected]' tree = self.target(ident_hash) expected_flattened_tree = [ u'[email protected]', u'209deb1f-1a46-4369-9e0d-18674cf58a3e@7', u'[email protected]', u'f3c9ab70-a916-4d8c-9256-42953287b4e9@3', u'd395b566-5fe3-4428-bcb2-19016e3aa3ce@4', u'c8bdbabc-62b1-4a5f-b291-982ab25756d7@6', u'5152cea8-829a-4aaf-bcc5-c58a416ecb66@7', u'5838b105-41cd-4c3d-a957-3ac004a48af3@5', u'[email protected]', u'24a2ed13-22a6-47d6-97a3-c8aa8d54ac6d@2', u'ea271306-f7f2-46ac-b2ec-1d80ff186a59@5', u'26346a42-84b9-48ad-9f6a-62303c16ad41@6', u'56f1c5c1-4014-450d-a477-2121e276beca@8', u'f6024d8a-1868-44c7-ab65-45419ef54881@3', u'7250386b-14a7-41a2-b8bf-9e9ab872f0dc@2', u'c0a76659-c311-405f-9a99-15c71af39325@5', u'ae3e18de-638d-4738-b804-dc69cd4db3a3@5' ] self.assertEqual( [x for x in cnxepub.flatten_tree_to_ident_hashes(tree)], expected_flattened_tree)
def test(self, cursor): self.target(self.ident_hash, cursor=cursor) from cnxpublishing.utils import split_ident_hash id, version = split_ident_hash(self.ident_hash) # Ensure the original tree is intact. cursor.execute("SELECT tree_to_json(%s, %s, FALSE)::json;", ( id, version, )) tree = cursor.fetchone()[0] self.assertNotIn(self.composite_ident_hash, cnxepub.flatten_tree_to_ident_hashes(tree)) # Ensure the tree as been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", ( id, version, )) baked_tree = cursor.fetchone()[0] self.assertEqual(baked_tree, None) # Ensure the collated/baked files relationship is removed. cursor.execute( "SELECT * FROM collated_file_associations AS cfa NATURAL JOIN modules AS m " "WHERE ident_hash(m.uuid, m.major_version, m.minor_version) = %s", (self.ident_hash, )) with self.assertRaises(TypeError): rows = cursor.fetchone()[0]
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Build some new metadata for the composite document. metadata = [x.metadata.copy() for x in cnxepub.flatten_to_documents(binder)][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['title'] = "Made up of other things" publisher = [p['id'] for p in metadata['publishers']][0] message = "Composite addition" # Add some fake collation objects to the book. content = '<p class="para">composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) from cnxpublishing.publish import publish_composite_model ident_hash = publish_composite_model(cursor, composite_doc, binder, publisher, message) # Shim the composite document into the binder. binder.append(composite_doc) tree = cnxepub.model_to_tree(binder) self.target(cursor, tree) cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", (binder.id, binder.metadata['version'],)) collated_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(collated_tree))
def test(self, cursor): self.target(self.ident_hash, cursor=cursor) from cnxpublishing.utils import split_ident_hash id, version = split_ident_hash(self.ident_hash) # Ensure the original tree is intact. cursor.execute("SELECT tree_to_json(%s, %s, FALSE)::json;", (id, version,)) tree = cursor.fetchone()[0] self.assertNotIn(self.composite_ident_hash, cnxepub.flatten_tree_to_ident_hashes(tree)) # Ensure the tree as been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", (id, version,)) collated_tree = cursor.fetchone()[0] self.assertEqual(collated_tree, None) # Ensure the collated files relationship is removed. cursor.execute("SELECT * FROM collated_file_associations AS cfa NATURAL JOIN modules AS m WHERE m.uuid = %s AND concat_ws('.', m.major_version, m.minor_version) = %s", (id, version,)) with self.assertRaises(TypeError): rows = cursor.fetchone()[0]
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Build some new metadata for the composite document. metadata = [ x.metadata.copy() for x in cnxepub.flatten_to_documents(binder) ][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['created'] = None metadata['revised'] = None metadata['title'] = "Made up of other things" # Add some fake collation objects to the book. content = '<p>composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) composite_section = cnxepub.TranslucentBinder( nodes=[composite_doc], metadata={'title': "Other things"}) baked_doc_content = '<p>collated</p>' def cnxepub_collate(binder_model, ruleset=None, includes=None): binder_model[0][0].content = baked_doc_content binder_model.append(composite_section) return binder_model fake_recipe_id = 1 with mock.patch('cnxpublishing.bake.collate_models') as mock_collate: mock_collate.side_effect = cnxepub_collate errors = self.target(binder, fake_recipe_id, publisher, msg) # Ensure the output of the errors. self.assertEqual(errors, []) # Ensure the original tree is intact. cursor.execute("SELECT tree_to_json(%s, %s, FALSE)::json;", ( binder.id, binder.metadata['version'], )) tree = cursor.fetchone()[0] self.assertNotIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(tree)) # Ensure the tree as been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", ( binder.id, binder.metadata['version'], )) baked_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(baked_tree)) # Ensure the changes to a document content were persisted. content_to_check = [ ( binder[0][0], baked_doc_content, ), ( composite_doc, content, ), ] for doc, content in content_to_check: self.assertIn(content, self._get_baked_file(cursor, doc, binder))