def setUp(self): super().setUp() self.init_data() self.app.data.insert('users', self.users) self.app.data.insert('desks', self.desks) self.app.data.insert('products', self.products) self.app.data.insert('subscribers', self.subscribers) self.app.data.insert(ARCHIVE, self.articles) self.filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), "validators.json") self.json_data = [ {"_id": "kill_text", "act": "kill", "type": "text", "schema": {"headline": {"type": "string"}}}, {"_id": "publish_text", "act": "publish", "type": "text", "schema": {}}, {"_id": "correct_text", "act": "correct", "type": "text", "schema": {}}, {"_id": "publish_composite", "act": "publish", "type": "composite", "schema": {}}, ] self.article_versions = self._init_article_versions() with open(self.filename, "w+") as file: json.dump(self.json_data, file) init_app(self.app) ValidatorsPopulateCommand().run(self.filename) self.app.media.url_for_media = MagicMock(return_value='url_for_media') self.app.media.put = MagicMock(return_value='media_id')
def test_maintain_latest_version_for_published(self): def get_publish_items(item_id, last_version): query = { 'query': { 'filtered': { 'filter': { 'and': [{ 'term': { 'item_id': item_id } }, { 'term': { LAST_PUBLISHED_VERSION: last_version } }] } } } } request = ParsedRequest() request.args = {'source': json.dumps(query)} return self.app.data.find(PUBLISHED, req=request, lookup=None) ValidatorsPopulateCommand().run(self.filename) get_resource_service(ARCHIVE).patch( id=self.articles[1][config.ID_FIELD], updates={'publish_schedule': None}) doc = get_resource_service(ARCHIVE).find_one( req=None, _id=self.articles[1][config.ID_FIELD]) get_resource_service(ARCHIVE_PUBLISH).patch( id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(7, queue_items.count()) published_items = self.app.data.find(PUBLISHED, None, None) self.assertEqual(2, published_items.count()) published_digital_doc = next( (item for item in published_items if item.get(PACKAGE_TYPE) == TAKES_PACKAGE), None) published_doc = next((item for item in published_items if item.get('item_id') == doc[config.ID_FIELD]), None) self.assertEqual(published_doc[LAST_PUBLISHED_VERSION], True) self.assertEqual(published_digital_doc[LAST_PUBLISHED_VERSION], True) get_resource_service(ARCHIVE_CORRECT).patch( id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.CORRECTED}) queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(14, queue_items.count()) published_items = self.app.data.find(PUBLISHED, None, None) self.assertEqual(4, published_items.count()) last_published_digital = get_publish_items( published_digital_doc['item_id'], True) self.assertEqual(1, last_published_digital.count()) last_published = get_publish_items(published_doc['item_id'], True) self.assertEqual(1, last_published.count())
def test_targeted_for_includes_digital_subscribers(self): ValidatorsPopulateCommand().run(self.filename) updates = {'target_regions': [{'qcode': 'NSW', 'name': 'New South Wales', 'allow': True}]} doc_id = self.articles[9][config.ID_FIELD] get_resource_service(ARCHIVE).patch(id=doc_id, updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch(id=doc_id, updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) enqueue_published() queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(7, queue_items.count()) expected_subscribers = ['1', '2', '3', '4', '5'] for item in queue_items: self.assertIn(item["subscriber_id"], expected_subscribers, 'item {}'.format(item))
def setUp(self): super().setUp() self._init_data() self.app.data.insert('users', self.users) self.app.data.insert('desks', self.desks) self.app.data.insert('vocabularies', self.vocabularies) self.app.data.insert('subscribers', self.subscribers) self.app.data.insert(ARCHIVE, self.articles) self.filename = os.path.join( os.path.abspath(os.path.dirname(__file__)), "validators.json") self.json_data = [ { "_id": "kill_text", "act": "kill", "type": "text", "schema": { "headline": { "type": "string" } } }, { "_id": "publish_text", "act": "publish", "type": "text", "schema": {} }, { "_id": "correct_text", "act": "correct", "type": "text", "schema": {} }, { "_id": "publish_composite", "act": "publish", "type": "composite", "schema": {} }, ] with open(self.filename, "w+") as file: json.dump(self.json_data, file) init_app(self.app) ValidatorsPopulateCommand().run(self.filename) self.package_service = PackageService()
def test_targeted_for_excludes_digital_subscribers(self): ValidatorsPopulateCommand().run(self.filename) updates = { 'targeted_for': [{ 'name': 'New South Wales', 'allow': True }] } doc_id = self.articles[9][config.ID_FIELD] get_resource_service(ARCHIVE).patch(id=doc_id, updates=updates) get_resource_service(ARCHIVE_PUBLISH).patch( id=doc_id, updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}) queue_items = self.app.data.find(PUBLISH_QUEUE, None, None) self.assertEqual(4, queue_items.count()) expected_subscribers = ['1', '2'] self.assertIn(queue_items[0]["subscriber_id"], expected_subscribers) self.assertIn(queue_items[1]["subscriber_id"], expected_subscribers) self.assertIn(queue_items[2]["subscriber_id"], expected_subscribers)