def test_versions_across_collections_after_publish(self): self.app.data.insert('archive_versions', self.article_versions) # Publishing an Article doc = self.articles[3] original = doc.copy() published_version_number = original[config.VERSION] + 1 get_resource_service(ARCHIVE_PUBLISH).patch(id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED, config.VERSION: published_version_number}) article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD]) self.assertIsNotNone(article_in_production) self.assertEqual(article_in_production[ITEM_STATE], CONTENT_STATE.PUBLISHED) self.assertEqual(article_in_production[config.VERSION], published_version_number) enqueue_published() lookup = {'item_id': original[config.ID_FIELD], 'item_version': published_version_number} queue_items = list(get_resource_service(PUBLISH_QUEUE).get(req=None, lookup=lookup)) assert len(queue_items) > 0, \ "Transmission Details are empty for published item %s" % original[config.ID_FIELD] lookup = {'item_id': original[config.ID_FIELD], config.VERSION: published_version_number} request = ParsedRequest() request.args = {'aggregations': 0} items_in_published_collection = list(get_resource_service(PUBLISHED).get(req=request, lookup=lookup)) assert len(items_in_published_collection) > 0, \ "Item not found in published collection %s" % original[config.ID_FIELD]
def test_versions_across_collections_after_publish(self): self.app.data.insert('archive_versions', self.article_versions) # Publishing an Article doc = self.articles[7] original = doc.copy() published_version_number = original[config.VERSION] + 1 get_resource_service(ARCHIVE_PUBLISH).patch(id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED, config.VERSION: published_version_number}) article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD]) self.assertIsNotNone(article_in_production) self.assertEqual(article_in_production[ITEM_STATE], CONTENT_STATE.PUBLISHED) self.assertEqual(article_in_production[config.VERSION], published_version_number) enqueue_published() lookup = {'item_id': original[config.ID_FIELD], 'item_version': published_version_number} queue_items = list(get_resource_service(PUBLISH_QUEUE).get(req=None, lookup=lookup)) assert len(queue_items) > 0, \ "Transmission Details are empty for published item %s" % original[config.ID_FIELD] lookup = {'item_id': original[config.ID_FIELD], config.VERSION: published_version_number} request = ParsedRequest() request.args = {'aggregations': 0} items_in_published_collection = list(get_resource_service(PUBLISHED).get(req=request, lookup=lookup)) assert len(items_in_published_collection) > 0, \ "Item not found in published collection %s" % original[config.ID_FIELD]
def test_queue_transmission_for_item_scheduled_elapsed(self): self._is_publish_queue_empty() doc = copy(self.articles[5]) doc['item_id'] = doc['_id'] schedule_date = utcnow() + timedelta(minutes=10) updates = { 'publish_schedule': schedule_date, 'schedule_settings': { 'utc_publish_schedule': schedule_date } } get_resource_service(ARCHIVE).patch(id=doc['_id'], updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch( id=doc['_id'], updates={ITEM_STATE: CONTENT_STATE.SCHEDULED}) queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(0, queue_items.count()) schedule_in_past = utcnow() + timedelta(minutes=-10) get_resource_service(PUBLISHED).update_published_items( doc['_id'], 'schedule_settings', {'utc_publish_schedule': schedule_in_past}) get_resource_service(PUBLISHED).update_published_items( doc['_id'], 'publish_schedule', schedule_in_past) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(1, queue_items.count())
def test_maintain_latest_version_for_published(self): def get_publish_items(item_id, last_version): query = { 'query': { 'filtered': { 'filter': { 'and': [{ 'term': { 'item_id': item_id } }, { 'term': { LAST_PUBLISHED_VERSION: last_version } }] } } } } request = ParsedRequest() request.args = {'source': json.dumps(query), 'aggregations': 0} return self.app.data.find(PUBLISHED, req=request, lookup=None) AppPopulateCommand().run(self.filename) get_resource_service(ARCHIVE).patch( id=self.articles[1][config.ID_FIELD], updates={'publish_schedule': None}) doc = get_resource_service(ARCHIVE).find_one( req=None, _id=self.articles[1][config.ID_FIELD]) get_resource_service(ARCHIVE_PUBLISH).patch( id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(1, queue_items.count()) request = ParsedRequest() request.args = {'aggregations': 0} published_items = self.app.data.find(PUBLISHED, request, None) self.assertEqual(1, published_items.count()) published_doc = next((item for item in published_items if item.get('item_id') == doc[config.ID_FIELD]), None) self.assertEqual(published_doc[LAST_PUBLISHED_VERSION], True) get_resource_service(ARCHIVE_CORRECT).patch( id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.CORRECTED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(2, queue_items.count()) published_items = self.app.data.find(PUBLISHED, request, None) self.assertEqual(2, published_items.count()) last_published = get_publish_items(published_doc['item_id'], True) self.assertEqual(1, last_published.count())
def test_targeted_for_includes_digital_subscribers(self): ValidatorsPopulateCommand().run(self.filename) updates = {'target_regions': [{'qcode': 'NSW', 'name': 'New South Wales', 'allow': True}]} doc_id = self.articles[9][config.ID_FIELD] get_resource_service(ARCHIVE).patch(id=doc_id, updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc_id, updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(7, queue_items.count()) expected_subscribers = ['1', '2', '3', '4', '5'] for item in queue_items: self.assertIn(item["subscriber_id"], expected_subscribers, 'item {}'.format(item))
def test_targeted_for_includes_digital_subscribers(self): ValidatorsPopulateCommand().run(self.filename) updates = {'target_regions': [{'qcode': 'NSW', 'name': 'New South Wales', 'allow': True}]} doc_id = self.articles[5][config.ID_FIELD] get_resource_service(ARCHIVE).patch(id=doc_id, updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc_id, updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(6, queue_items.count()) expected_subscribers = ['1', '2', '3', '4', '5'] for item in queue_items: self.assertIn(item["subscriber_id"], expected_subscribers, 'item {}'.format(item))
def test_delete_from_queue_by_article_id(self): self._is_publish_queue_empty() doc = copy(self.articles[7]) doc['item_id'] = doc['_id'] archive_publish = get_resource_service(ARCHIVE_PUBLISH) archive_publish.patch(id=doc['_id'], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(4, queue_items.count()) # this will delete queue transmission for the wire article not the takes package. publish_queue.PublishQueueService(PUBLISH_QUEUE, superdesk.get_backend()).delete_by_article_id(doc['_id']) self._is_publish_queue_empty()
def test_queue_transmission_for_item_scheduled_future(self): self._is_publish_queue_empty() doc = copy(self.articles[9]) doc['item_id'] = doc['_id'] schedule_date = utcnow() + timedelta(hours=2) updates = { 'publish_schedule': schedule_date, 'schedule_settings': { 'utc_publish_schedule': schedule_date } } get_resource_service(ARCHIVE).patch(id=doc['_id'], updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc['_id'], updates={ITEM_STATE: CONTENT_STATE.SCHEDULED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(0, queue_items.count())
def test_queue_transmission_for_item_scheduled_future(self): self._is_publish_queue_empty() doc = copy(self.articles[5]) doc['item_id'] = doc['_id'] schedule_date = utcnow() + timedelta(hours=2) updates = { 'publish_schedule': schedule_date, 'schedule_settings': { 'utc_publish_schedule': schedule_date } } get_resource_service(ARCHIVE).patch(id=doc['_id'], updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc['_id'], updates={ITEM_STATE: CONTENT_STATE.SCHEDULED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(0, queue_items.count())
def test_maintain_latest_version_for_published(self): def get_publish_items(item_id, last_version): query = {'query': {'filtered': {'filter': {'and': [ {'term': {'item_id': item_id}}, {'term': {LAST_PUBLISHED_VERSION: last_version}} ]}}}} request = ParsedRequest() request.args = {'source': json.dumps(query), 'aggregations': 0} return self.app.data.find(PUBLISHED, req=request, lookup=None) ValidatorsPopulateCommand().run(self.filename) get_resource_service(ARCHIVE).patch(id=self.articles[1][config.ID_FIELD], updates={'publish_schedule': None}) doc = get_resource_service(ARCHIVE).find_one(req=None, _id=self.articles[1][config.ID_FIELD]) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(2, queue_items.count()) request = ParsedRequest() request.args = {'aggregations': 0} published_items = self.app.data.find(PUBLISHED, request, None) self.assertEqual(2, published_items.count()) published_digital_doc = next((item for item in published_items if item.get(PACKAGE_TYPE) == TAKES_PACKAGE), None) published_doc = next((item for item in published_items if item.get('item_id') == doc[config.ID_FIELD]), None) self.assertEqual(published_doc[LAST_PUBLISHED_VERSION], True) self.assertEqual(published_digital_doc[LAST_PUBLISHED_VERSION], True) get_resource_service(ARCHIVE_CORRECT).patch(id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.CORRECTED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(4, queue_items.count()) published_items = self.app.data.find(PUBLISHED, request, None) self.assertEqual(4, published_items.count()) last_published_digital = get_publish_items(published_digital_doc['item_id'], True) self.assertEqual(1, last_published_digital.count()) last_published = get_publish_items(published_doc['item_id'], True) self.assertEqual(1, last_published.count())
def test_queue_transmission_for_item_scheduled_elapsed(self): self._is_publish_queue_empty() doc = copy(self.articles[9]) doc['item_id'] = doc['_id'] schedule_date = utcnow() + timedelta(minutes=10) updates = { 'publish_schedule': schedule_date, 'schedule_settings': { 'utc_publish_schedule': schedule_date } } get_resource_service(ARCHIVE).patch(id=doc['_id'], updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc['_id'], updates={ITEM_STATE: CONTENT_STATE.SCHEDULED}) queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(0, queue_items.count()) schedule_in_past = utcnow() + timedelta(minutes=-10) get_resource_service(PUBLISHED).update_published_items(doc['_id'], 'schedule_settings', {'utc_publish_schedule': schedule_in_past}) get_resource_service(PUBLISHED).update_published_items(doc['_id'], 'publish_schedule', schedule_in_past) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(1, queue_items.count())
def test_soft_timeout_gets_re_queued(self, mock): self.app.data.insert('published', self.published_items) enqueue_published() published = self.app.data.find(PUBLISHED, None, None) self.assertTrue(published[0].get('queue_state'), 'pending')