def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Modify the content of a document to mock the collation changes. doc = [x for x in cnxepub.flatten_to_documents(binder)][0] # Add some fake collation objects to the book. content = '<body><p class="para">collated</p></body>' doc.content = content self.target(cursor, doc, binder) # Ensure the file entry and association entry. cursor.execute("""\ SELECT f.file FROM collated_file_associations AS cfa NATURAL JOIN files AS f, modules AS m1, -- context modules AS m2 -- item WHERE (ident_hash(m1.uuid, m1.major_version, m1.minor_version) = %s AND m1.module_ident = cfa.context) AND (ident_hash(m2.uuid, m2.major_version, m2.minor_version) = %s AND m2.module_ident = cfa.item)""", (binder.ident_hash, doc.ident_hash,)) persisted_content = cursor.fetchone()[0][:] self.assertIn(content, persisted_content)
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Build some new metadata for the composite document. metadata = [ x.metadata.copy() for x in cnxepub.flatten_to_documents(binder) ][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['title'] = "Made up of other things" publisher = [p['id'] for p in metadata['publishers']][0] message = "Composite addition" # Add some fake collation objects to the book. content = '<p class="para">composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) from cnxpublishing.publish import publish_composite_model ident_hash = publish_composite_model(cursor, composite_doc, binder, publisher, message) # Shim the composite document into the binder. binder.append(composite_doc) tree = cnxepub.model_to_tree(binder) self.target(cursor, tree) cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", ( binder.id, binder.metadata['version'], )) collated_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(collated_tree))
def _validate_required_data(model): """Does the model have the required data?""" validation_errors = [] if isinstance(model, cnxepub.Document): # Check for content... contains_content = False # Wrap the content so that we can parse it. content = u"<html><body>{}</body></html>".format(model.content) tree = etree.parse(io.StringIO(content)) for element_text in tree.xpath('/html/body//text()'): if element_text != '': contains_content = True break if not contains_content: validation_errors.append(VALIDATION_NO_CONTENT) elif isinstance(model, cnxepub.Binder): # Does the binder have documents documents_generator = cnxepub.flatten_to_documents( model, include_pointers=True) contains_docs = len([x for x in documents_generator]) >= 1 if not contains_docs: validation_errors.append(VALIDATION_NO_CONTENT) else: raise ValueError('{} is not a Document or a Binder'.format(model)) return validation_errors
def _validate_required_data(model): """Does the model have the required data?""" validation_errors = [] if isinstance(model, cnxepub.Document): # Check for content... contains_content = False # Wrap the content so that we can parse it. content = u"<html><body>{}</body></html>".format(model.content) tree = etree.parse(io.StringIO(content)) for element_text in tree.xpath('/html/body//text()'): if element_text != '': contains_content = True break if not contains_content: validation_errors.append(VALIDATION_NO_CONTENT) elif isinstance(model, cnxepub.Binder): # Does the binder have documents documents_generator = cnxepub.flatten_to_documents( model, include_pointers=True) contains_docs = len([x for x in documents_generator]) >= 1 if not contains_docs: validation_errors.append(VALIDATION_NO_CONTENT) else: raise ValueError('{} is not a Document or a Binder'.format(model)) return validation_errors
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Build some new metadata for the composite document. metadata = [x.metadata.copy() for x in cnxepub.flatten_to_documents(binder)][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['title'] = "Made up of other things" publisher = [p['id'] for p in metadata['publishers']][0] message = "Composite addition" # Add some fake collation objects to the book. content = '<p class="para">composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) from cnxpublishing.publish import publish_composite_model ident_hash = publish_composite_model(cursor, composite_doc, binder, publisher, message) # Shim the composite document into the binder. binder.append(composite_doc) tree = cnxepub.model_to_tree(binder) self.target(cursor, tree) cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", (binder.id, binder.metadata['version'],)) collated_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(collated_tree))
def test_no_change_to_contents(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Modify the content of a document to mock the collation changes. doc = [x for x in cnxepub.flatten_to_documents(binder)][0] self.target(cursor, doc, binder) # Ensure the file entry and association entry. cursor.execute( """\ SELECT f.file FROM collated_file_associations AS cfa NATURAL JOIN files AS f, modules AS m1, -- context modules AS m2 -- item WHERE (ident_hash(m1.uuid, m1.major_version, m1.minor_version) = %s AND m1.module_ident = cfa.context) AND (ident_hash(m2.uuid, m2.major_version, m2.minor_version) = %s AND m2.module_ident = cfa.item)""", ( binder.ident_hash, doc.ident_hash, )) persisted_content = cursor.fetchone()[0][:] self.assertIn(doc.content, persisted_content)
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Build some new metadata for the composite document. metadata = [x.metadata.copy() for x in cnxepub.flatten_to_documents(binder)][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['created'] = None metadata['revised'] = None metadata['title'] = "Made up of other things" # Add some fake collation objects to the book. content = '<p>composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) composite_section = cnxepub.TranslucentBinder( nodes=[composite_doc], metadata={'title': "Other things"}) collated_doc_content = '<p>collated</p>' def collate(binder_model, ruleset=None, includes=None): binder_model[0][0].content = collated_doc_content binder_model.append(composite_section) return binder_model with mock.patch('cnxpublishing.collation.collate_models') as mock_collate: mock_collate.side_effect = collate errors = self.target(binder, publisher, msg) # Ensure the output of the errors. self.assertEqual(errors, []) # Ensure the original tree is intact. cursor.execute("SELECT tree_to_json(%s, %s, FALSE)::json;", (binder.id, binder.metadata['version'],)) tree = cursor.fetchone()[0] self.assertNotIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(tree)) # Ensure the tree as been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", (binder.id, binder.metadata['version'],)) collated_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(collated_tree)) # Ensure the changes to a document content were persisted. content_to_check = [ (binder[0][0], collated_doc_content,), (composite_doc, content,), ] for doc, content in content_to_check: self.assertIn(content, self._get_collated_file(cursor, doc, binder))
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Build some new metadata for the composite document. metadata = [ x.metadata.copy() for x in cnxepub.flatten_to_documents(binder) ][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['title'] = "Made up of other things" publisher = [p['id'] for p in metadata['publishers']][0] message = "Composite addition" # Add some fake collation objects to the book. content = '<p class="para">composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) ident_hash = self.target(cursor, composite_doc, binder, publisher, message) # Ensure the model's identifiers has been set. self.assertEqual(ident_hash, composite_doc.ident_hash) self.assertEqual(ident_hash, composite_doc.get_uri('cnx-archive')) # The only thing different in the module metadata insertion is # the `portal_type` value cursor.execute( "SELECT portal_type " "FROM modules " "WHERE ident_hash(uuid, major_version, minor_version) = %s", (ident_hash, )) portal_type = cursor.fetchone()[0] self.assertEqual(portal_type, 'CompositeModule') # Ensure the file entry and association entry. cursor.execute( """\ SELECT f.file FROM collated_file_associations AS cfa NATURAL JOIN files AS f, modules AS m1, -- context modules AS m2 -- item WHERE (ident_hash(m1.uuid, m1.major_version, m1.minor_version) = %s AND m1.module_ident = cfa.context) AND (ident_hash(m2.uuid, m2.major_version, m2.minor_version) = %s AND m2.module_ident = cfa.item)""", ( binder.ident_hash, ident_hash, )) persisted_content = cursor.fetchone()[0][:] self.assertIn(content, persisted_content)
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Build some new metadata for the composite document. metadata = [x.metadata.copy() for x in cnxepub.flatten_to_documents(binder)][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['title'] = "Made up of other things" publisher = [p['id'] for p in metadata['publishers']][0] message = "Composite addition" # Add some fake collation objects to the book. content = '<body><p class="para">composite</p></body>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) ident_hash = self.target(cursor, composite_doc, binder, publisher, message) # Ensure the model's identifiers has been set. self.assertEqual(ident_hash, composite_doc.ident_hash) self.assertEqual(ident_hash, composite_doc.get_uri('cnx-archive')) # The only thing different in the module metadata insertion is # the `portal_type` value cursor.execute( "SELECT portal_type " "FROM modules " "WHERE ident_hash(uuid, major_version, minor_version) = %s", (ident_hash,)) portal_type = cursor.fetchone()[0] self.assertEqual(portal_type, 'CompositeModule') # Ensure the file entry and association entry. cursor.execute("""\ SELECT f.file FROM collated_file_associations AS cfa NATURAL JOIN files AS f, modules AS m1, -- context modules AS m2 -- item WHERE (ident_hash(m1.uuid, m1.major_version, m1.minor_version) = %s AND m1.module_ident = cfa.context) AND (ident_hash(m2.uuid, m2.major_version, m2.minor_version) = %s AND m2.module_ident = cfa.item)""", (binder.ident_hash, ident_hash,)) persisted_content = cursor.fetchone()[0][:] self.assertIn(content, persisted_content)
def setUp(self, cursor): super(RemoveBakedTestCase, self).setUp() binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Build some new metadata for the composite document. metadata = [ x.metadata.copy() for x in cnxepub.flatten_to_documents(binder) ][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['created'] = None metadata['revised'] = None metadata['title'] = "Made up of other things" # Add some fake collation objects to the book. content = '<p>composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) composite_section = cnxepub.TranslucentBinder( nodes=[composite_doc], metadata={'title': "Other things"}) baked_doc_content = '<p>collated</p>' def cnxepub_collate(binder_model, ruleset=None, includes=None): binder_model[0][0].content = baked_doc_content binder_model.append(composite_section) return binder_model with mock.patch('cnxpublishing.bake.collate_models') as mock_collate: mock_collate.side_effect = cnxepub_collate from cnxpublishing.bake import bake fake_recipe_id = 1 errors = bake(binder, fake_recipe_id, publisher, msg, cursor=cursor) self.ident_hash = binder.ident_hash self.composite_ident_hash = composite_doc.ident_hash self.baked_doc_sha1 = self._get_file_sha1(cursor, binder[0][0], binder) self.composite_doc_sha1 = self._get_file_sha1(cursor, composite_doc, binder)
def setUp(self, cursor): super(RemoveBakedTestCase, self).setUp() binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Build some new metadata for the composite document. metadata = [x.metadata.copy() for x in cnxepub.flatten_to_documents(binder)][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['created'] = None metadata['revised'] = None metadata['title'] = "Made up of other things" # Add some fake collation objects to the book. content = '<body><p>composite</p></body>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) composite_section = cnxepub.TranslucentBinder( nodes=[composite_doc], metadata={'title': "Other things"}) baked_doc_content = '<body><p>collated</p></body>' def cnxepub_collate(binder_model, ruleset=None, includes=None): binder_model[0][0].content = baked_doc_content binder_model.append(composite_section) return binder_model with mock.patch('cnxpublishing.bake.collate_models') as mock_collate: mock_collate.side_effect = cnxepub_collate from cnxpublishing.bake import bake fake_recipe_id = 1 bake(binder, fake_recipe_id, publisher, msg, cursor=cursor) self.ident_hash = binder.ident_hash self.composite_ident_hash = composite_doc.ident_hash self.baked_doc_sha1 = self._get_file_sha1(cursor, binder[0][0], binder) self.composite_doc_sha1 = self._get_file_sha1(cursor, composite_doc, binder)
def update_containment(binder, deletion = False): """updates the containment status of all draft documents in this binder""" from .storage import storage b_id = binder.id doc_ids = [] old_docs = storage.get_all(contained_in = b_id) # additions if not deletion: docs = cnxepub.flatten_to_documents(binder) for doc in docs: doc_ids.append(doc.id) # gather for subtractions below if b_id not in doc.metadata['contained_in']: doc.metadata['contained_in'].append(b_id) storage.update(doc) # subtractions for doc in old_docs: if doc.id not in doc_ids: if b_id in doc.metadata['contained_in']: doc.metadata['contained_in'].remove(b_id) storage.update(doc)
def update_containment(binder, deletion=False): """updates the containment status of all draft documents in this binder""" from .storage import storage b_id = binder.id doc_ids = [] old_docs = storage.get_all(contained_in=b_id) # additions if not deletion: docs = cnxepub.flatten_to_documents(binder) for doc in docs: doc_ids.append(doc.id) # gather for subtractions below if b_id not in doc.metadata['contained_in']: doc.metadata['contained_in'].append(b_id) storage.update(doc) # subtractions for doc in old_docs: if doc.id not in doc_ids: if b_id in doc.metadata['contained_in']: doc.metadata['contained_in'].remove(b_id) storage.update(doc)
def test_no_change_to_contents(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) # Modify the content of a document to mock the collation changes. doc = [x for x in cnxepub.flatten_to_documents(binder)][0] self.target(cursor, doc, binder) # Ensure the file entry and association entry. cursor.execute("""\ SELECT f.file FROM collated_file_associations AS cfa NATURAL JOIN files AS f, modules AS m1, -- context modules AS m2 -- item WHERE (m1.uuid||'@'||concat_ws('.', m1.major_version, m1.minor_version) = %s AND m1.module_ident = cfa.context) AND (m2.uuid||'@'||concat_ws('.', m2.major_version, m2.minor_version) = %s AND m2.module_ident = cfa.item)""", (binder.ident_hash, doc.ident_hash,)) persisted_content = cursor.fetchone()[0][:] self.assertIn(doc.content, persisted_content)
def republish_binders(cursor, models): """Republish the Binders that share Documents in the publication context. This needs to be given all the models in the publication context.""" documents = set([]) binders = set([]) history_mapping = {} # <previous-ident-hash>: <current-ident-hash> if not isinstance(models, ( list, tuple, set, )): raise TypeError("``models`` Must be a sequence of model objects." "We were given: {}".format(models)) for model in models: if isinstance(model, (cnxepub.Binder, )): binders.add(split_ident_hash(model.ident_hash)[0]) for doc in cnxepub.flatten_to_documents(model): documents.add(split_ident_hash(doc.ident_hash)) else: documents.add(split_ident_hash(model.ident_hash)) to_be_republished = [] # What binders are these documents a part of? for (uuid, version) in documents: ident_hash = join_ident_hash(uuid, version) previous_ident_hash = get_previous_publication(cursor, ident_hash) if previous_ident_hash is None: # Has no prior existence. continue else: history_mapping[previous_ident_hash] = ident_hash cursor.execute( """\ WITH RECURSIVE t(nodeid, parent_id, documentid, path) AS ( SELECT tr.nodeid, tr.parent_id, tr.documentid, ARRAY[tr.nodeid] FROM trees tr WHERE tr.documentid = ( SELECT module_ident FROM modules WHERE ident_hash(uuid, major_version, minor_version) = %s) UNION ALL SELECT c.nodeid, c.parent_id, c.documentid, path || ARRAY[c.nodeid] FROM trees c JOIN t ON (c.nodeid = t.parent_id) WHERE not c.nodeid = ANY(t.path) ) SELECT ident_hash(uuid, major_version, minor_version) FROM t JOIN latest_modules m ON (t.documentid = m.module_ident) WHERE t.parent_id IS NULL """, (previous_ident_hash, )) to_be_republished.extend( [split_ident_hash(x[0]) for x in cursor.fetchall()]) to_be_republished = set(to_be_republished) republished_ident_hashes = [] # Republish the Collections set. for (uuid, version) in to_be_republished: if uuid in binders: # This binder is already in the publication context, # don't try to publish it again. continue ident_hash = join_ident_hash(uuid, version) bumped_version = bump_version(cursor, uuid, is_minor_bump=True) republished_ident_hash = republish_collection(cursor, ident_hash, version=bumped_version) # Set the identifier history. history_mapping[ident_hash] = republished_ident_hash rebuild_collection_tree(cursor, ident_hash, history_mapping) republished_ident_hashes.append(republished_ident_hash) return republished_ident_hashes
def republish_binders(cursor, models): """Republish the Binders that share Documents in the publication context. This needs to be given all the models in the publication context.""" documents = set([]) binders = set([]) history_mapping = {} # <previous-ident-hash>: <current-ident-hash> if not isinstance(models, (list, tuple, set,)): raise TypeError("``models`` Must be a sequence of model objects." \ "We were given: {}".format(models)) for model in models: if isinstance(model, (cnxepub.Binder,)): binders.add(split_ident_hash(model.ident_hash)) for doc in cnxepub.flatten_to_documents(model): documents.add(split_ident_hash(doc.ident_hash)) else: documents.add(split_ident_hash(model.ident_hash)) to_be_republished = [] # What binders are these documents a part of? for (uuid, version) in documents: ident_hash = join_ident_hash(uuid, version) previous_ident_hash = get_previous_publication(cursor, ident_hash) if previous_ident_hash is None: # Has no prior existence. continue else: history_mapping[previous_ident_hash] = ident_hash cursor.execute("""\ WITH RECURSIVE t(nodeid, parent_id, documentid, path) AS ( SELECT tr.nodeid, tr.parent_id, tr.documentid, ARRAY[tr.nodeid] FROM trees tr WHERE tr.documentid = ( SELECT module_ident FROM modules WHERE uuid||'@'||concat_ws('.', major_version, minor_version) = %s) UNION ALL SELECT c.nodeid, c.parent_id, c.documentid, path || ARRAY[c.nodeid] FROM trees c JOIN t ON (c.nodeid = t.parent_id) WHERE not c.nodeid = ANY(t.path) ) SELECT uuid||'@'||concat_ws('.', major_version, minor_version) FROM t JOIN latest_modules m ON (t.documentid = m.module_ident) WHERE t.parent_id IS NULL """, (previous_ident_hash,)) to_be_republished.extend([split_ident_hash(x[0]) for x in cursor.fetchall()]) to_be_republished = set(to_be_republished) republished_ident_hashes = [] # Republish the Collections set. for (uuid, version) in to_be_republished: if (uuid, version,) in binders: # This binder is already in the publication context, # don't try to publish it again. continue ident_hash = join_ident_hash(uuid, version) bumped_version = bump_version(cursor, uuid, is_minor_bump=True) republished_ident_hash = republish_collection(cursor, ident_hash, version=bumped_version) # Set the identifier history. history_mapping[ident_hash] = republished_ident_hash rebuild_collection_tree(cursor, ident_hash, history_mapping) republished_ident_hashes.append(republished_ident_hash) return republished_ident_hashes
def test(self, cursor): binder = use_cases.setup_COMPLEX_BOOK_ONE_in_archive(self, cursor) cursor.connection.commit() publisher = 'ream' msg = 'part of collated publish' # Build some new metadata for the composite document. metadata = [ x.metadata.copy() for x in cnxepub.flatten_to_documents(binder) ][0] del metadata['cnx-archive-uri'] del metadata['version'] metadata['created'] = None metadata['revised'] = None metadata['title'] = "Made up of other things" # Add some fake collation objects to the book. content = '<p>composite</p>' composite_doc = cnxepub.CompositeDocument(None, content, metadata) composite_section = cnxepub.TranslucentBinder( nodes=[composite_doc], metadata={'title': "Other things"}) baked_doc_content = '<p>collated</p>' def cnxepub_collate(binder_model, ruleset=None, includes=None): binder_model[0][0].content = baked_doc_content binder_model.append(composite_section) return binder_model fake_recipe_id = 1 with mock.patch('cnxpublishing.bake.collate_models') as mock_collate: mock_collate.side_effect = cnxepub_collate errors = self.target(binder, fake_recipe_id, publisher, msg) # Ensure the output of the errors. self.assertEqual(errors, []) # Ensure the original tree is intact. cursor.execute("SELECT tree_to_json(%s, %s, FALSE)::json;", ( binder.id, binder.metadata['version'], )) tree = cursor.fetchone()[0] self.assertNotIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(tree)) # Ensure the tree as been stamped. cursor.execute("SELECT tree_to_json(%s, %s, TRUE)::json;", ( binder.id, binder.metadata['version'], )) baked_tree = cursor.fetchone()[0] self.assertIn(composite_doc.ident_hash, cnxepub.flatten_tree_to_ident_hashes(baked_tree)) # Ensure the changes to a document content were persisted. content_to_check = [ ( binder[0][0], baked_doc_content, ), ( composite_doc, content, ), ] for doc, content in content_to_check: self.assertIn(content, self._get_baked_file(cursor, doc, binder))