def test_populate_vocabularies(self):
        cmd = AppPopulateCommand()
        cmd.run(self.filename)
        service = get_resource_service("vocabularies")

        for item in self.json_data:
            data = service.find_one(_id=item["_id"], req=None)
            self.assertEqual(data["_id"], item["_id"])
            self.assertListEqual(data["items"], item["items"])
Пример #2
0
    def test_populate_validators(self):
        cmd = AppPopulateCommand()
        with self.app.test_request_context(self.app.config.get("URL_PREFIX")):
            cmd.run(self.filename)
            service = get_resource_service("validators")

            for item in self.json_data:
                data = service.find_one(_id=item["_id"], req=None)
                self.assertEqual(data["_id"], item["_id"])
                self.assertDictEqual(data["schema"], item["schema"])
Пример #3
0
    def test_populate_types(self):
        cmd = AppPopulateCommand()
        with self.app.app_context():
            service = get_resource_service("planning_types")
            cmd.run(self.filename)

            for item in self.json_data:
                data = service.find_one(_id=item['_id'], req=None)
                self.assertEqual(data["_id"], item["_id"])
                self.assertEqual(data["editor"]['definition_long'], item["editor"]['definition_long'])
                self.assertDictEqual(data["schema"]['definition_long'], item["schema"]['definition_long'])
Пример #4
0
def setup_ntb_vocabulary(context):
    with context.app.app_context():
        # prepopulate vocabularies
        voc_file = os.path.join(
            os.path.abspath(os.path.dirname(os.path.dirname(ntb.__file__))),
            'data', 'vocabularies.json')
        AppPopulateCommand().run(voc_file)
Пример #5
0
    def test_maintain_latest_version_for_published(self):
        def get_publish_items(item_id, last_version):
            query = {
                'query': {
                    'filtered': {
                        'filter': {
                            'and': [{
                                'term': {
                                    'item_id': item_id
                                }
                            }, {
                                'term': {
                                    LAST_PUBLISHED_VERSION: last_version
                                }
                            }]
                        }
                    }
                }
            }
            request = ParsedRequest()
            request.args = {'source': json.dumps(query), 'aggregations': 0}
            return self.app.data.find(PUBLISHED, req=request, lookup=None)

        AppPopulateCommand().run(self.filename)
        get_resource_service(ARCHIVE).patch(
            id=self.articles[1][config.ID_FIELD],
            updates={'publish_schedule': None})

        doc = get_resource_service(ARCHIVE).find_one(
            req=None, _id=self.articles[1][config.ID_FIELD])
        get_resource_service(ARCHIVE_PUBLISH).patch(
            id=doc[config.ID_FIELD],
            updates={ITEM_STATE: CONTENT_STATE.PUBLISHED})

        enqueue_published()

        queue_items = self.app.data.find(PUBLISH_QUEUE, None, None)
        self.assertEqual(1, queue_items.count())
        request = ParsedRequest()
        request.args = {'aggregations': 0}
        published_items = self.app.data.find(PUBLISHED, request, None)
        self.assertEqual(1, published_items.count())
        published_doc = next((item for item in published_items
                              if item.get('item_id') == doc[config.ID_FIELD]),
                             None)
        self.assertEqual(published_doc[LAST_PUBLISHED_VERSION], True)

        get_resource_service(ARCHIVE_CORRECT).patch(
            id=doc[config.ID_FIELD],
            updates={ITEM_STATE: CONTENT_STATE.CORRECTED})

        enqueue_published()

        queue_items = self.app.data.find(PUBLISH_QUEUE, None, None)
        self.assertEqual(2, queue_items.count())
        published_items = self.app.data.find(PUBLISHED, request, None)
        self.assertEqual(2, published_items.count())
        last_published = get_publish_items(published_doc['item_id'], True)
        self.assertEqual(1, last_published.count())
Пример #6
0
 def test_check_similar(self):
     f = get_resource_service('filter_conditions')
     filter_condition1 = {
         'field': 'urgency',
         'operator': 'in',
         'value': '2'
     }
     filter_condition2 = {
         'field': 'urgency',
         'operator': 'in',
         'value': '3'
     }
     filter_condition3 = {
         'field': 'urgency',
         'operator': 'in',
         'value': '1'
     }
     filter_condition4 = {
         'field': 'urgency',
         'operator': 'in',
         'value': '5'
     }
     filter_condition5 = {
         'field': 'urgency',
         'operator': 'nin',
         'value': '5'
     }
     filter_condition6 = {
         'field': 'headline',
         'operator': 'like',
         'value': 'tor'
     }
     with self.app.app_context():
         cmd = AppPopulateCommand()
         filename = os.path.join(
             os.path.abspath(
                 os.path.dirname(
                     "apps/prepopulate/data_init/vocabularies.json")),
             "vocabularies.json")
         cmd.run(filename)
         self.assertTrue(len(f.check_similar(filter_condition1)) == 2)
         self.assertTrue(len(f.check_similar(filter_condition2)) == 1)
         self.assertTrue(len(f.check_similar(filter_condition3)) == 0)
         self.assertTrue(len(f.check_similar(filter_condition4)) == 3)
         self.assertTrue(len(f.check_similar(filter_condition5)) == 1)
         self.assertTrue(len(f.check_similar(filter_condition6)) == 1)
 def test_check_similar(self):
     f = get_resource_service("filter_conditions")
     filter_condition1 = {
         "field": "urgency",
         "operator": "in",
         "value": "2"
     }
     filter_condition2 = {
         "field": "urgency",
         "operator": "in",
         "value": "3"
     }
     filter_condition3 = {
         "field": "urgency",
         "operator": "in",
         "value": "1"
     }
     filter_condition4 = {
         "field": "urgency",
         "operator": "in",
         "value": "5"
     }
     filter_condition5 = {
         "field": "urgency",
         "operator": "nin",
         "value": "5"
     }
     filter_condition6 = {
         "field": "headline",
         "operator": "like",
         "value": "tor"
     }
     with self.app.app_context():
         cmd = AppPopulateCommand()
         filename = os.path.join(
             os.path.abspath(
                 os.path.dirname(
                     "apps/prepopulate/data_init/vocabularies.json")),
             "vocabularies.json")
         cmd.run(filename)
         self.assertTrue(len(f.check_similar(filter_condition1)) == 2)
         self.assertTrue(len(f.check_similar(filter_condition2)) == 1)
         self.assertTrue(len(f.check_similar(filter_condition3)) == 0)
         self.assertTrue(len(f.check_similar(filter_condition4)) == 3)
         self.assertTrue(len(f.check_similar(filter_condition5)) == 1)
         self.assertTrue(len(f.check_similar(filter_condition6)) == 1)
Пример #8
0
 def setUp(self):
     super().setUp()
     # load vocabularies
     with self.app.app_context():
         voc_file = os.path.join(
             os.path.abspath(os.path.dirname(os.path.dirname(
                 anp.__file__))), 'data', 'vocabularies.json')
         AppPopulateCommand().run(voc_file)
     # load fixtures
     self.fixtures = {}
     dirname = os.path.dirname(os.path.realpath(__file__))
     # sources
     sources = os.path.normpath(
         os.path.join(dirname, '../fixtures', 'anp_news_api-sources.json'))
     with open(sources, 'r') as f:
         self.fixtures['sources'] = json.load(f)
     # items
     for source_id in ('5af9a2e4-3825-45d6-8445-419b1cb365dc',
                       '3dc77946-38dc-4469-b0a9-c10519035824',
                       '5b8bc05d-2421-454d-b363-b043d503c6b7',
                       '03b7a184-f6f4-4879-85f6-b43f21acb940',
                       '03b7a184-f6f4-4879-85f6-b43f21acb940',
                       '4ad32715-3221-49b1-b93b-30b02c1c6eb6'):
         _path = os.path.normpath(
             os.path.join(dirname, '../fixtures',
                          'anp_news_api-items-{}.json'.format(source_id)))
         with open(_path, 'r') as f:
             self.fixtures.setdefault('items', {})[source_id] = json.load(f)
     # items details
     for item_id in ('ac3dc857e87ea0a0b98635b314941d12',
                     'bd34da5aa71ea490639e5601f98b238a',
                     'ac47563d3fe56f62972f0f7e55d323cd',
                     '7404db79e88ae6483f56941204943a4a',
                     '38bdbbbdae1320f77049b5a32538e09c',
                     'c4b893fec041a87ee340b513e8b11860'):
         _path = os.path.normpath(
             os.path.join(
                 dirname, '../fixtures',
                 'anp_news_api-item-detail-{}.json'.format(item_id)))
         with open(_path, 'r') as f:
             self.fixtures.setdefault('item-details',
                                      {})[item_id] = json.load(f)
     # media
     for item_id in ('38bdbbbdae1320f77049b5a32538e09c', ):
         _path = os.path.normpath(
             os.path.join(dirname, '../fixtures',
                          'anp_news_api-media-{}.json'.format(item_id)))
         with open(_path, 'r') as f:
             self.fixtures.setdefault('item-media',
                                      {})[item_id] = json.load(f)
     # image
     for item_id in ('38bdbbbdae1320f77049b5a32538e09c', ):
         _path = os.path.normpath(
             os.path.join(dirname, '../fixtures',
                          'image-{}.jpeg'.format(item_id)))
         with open(_path, 'rb') as f:
             self.fixtures.setdefault('image', {})[item_id] = f.read()
Пример #9
0
def setup_before_scenario(context, scenario, config, app_factory):
    if scenario.status != 'skipped' and 'notesting' in scenario.tags:
        config['SUPERDESK_TESTING'] = False

    tests.setup(context, config, app_factory, bool(config))

    context.headers = [('Content-Type', 'application/json'),
                       ('Origin', 'localhost')]

    if 'dbauth' in scenario.tags and LDAP_SERVER:
        scenario.mark_skipped()

    if 'ldapauth' in scenario.tags and not LDAP_SERVER:
        scenario.mark_skipped()

    if 'alchemy' in scenario.tags and not context.app.config.get(
            'KEYWORDS_KEY_API'):
        scenario.mark_skipped()

    if 'clean_snapshots' in scenario.tags:
        tests.use_snapshot.cache.clear()

    setup_search_provider(context.app)

    if scenario.status != 'skipped' and 'auth' in scenario.tags:
        setup_auth_user(context)

    if scenario.status != 'skipped' and 'provider' in scenario.tags:
        setup_providers(context)

    if scenario.status != 'skipped' and 'vocabulary' in scenario.tags:
        with context.app.app_context():
            cmd = AppPopulateCommand()
            filename = os.path.join(
                os.path.abspath(os.path.dirname("features/steps/fixtures/")),
                "vocabularies.json")
            cmd.run(filename)

    if scenario.status != 'skipped' and 'content_type' in scenario.tags:
        with context.app.app_context():
            cmd = AppPopulateCommand()
            filename = os.path.join(
                os.path.abspath(os.path.dirname("features/steps/fixtures/")),
                "content_types.json")
            cmd.run(filename)

    if scenario.status != 'skipped' and 'notification' in scenario.tags:
        tests.setup_notification(context)

    if scenario.status != 'skipped' and 'app_init' in scenario.tags:
        with context.app.app_context():
            command = AppInitializeWithDataCommand()
            command.run()
Пример #10
0
def setup_before_scenario(context, scenario, config, app_factory):
    if scenario.status != "skipped" and "notesting" in scenario.tags:
        config["SUPERDESK_TESTING"] = False

    tests.setup(context, config, app_factory, bool(config))

    context.headers = [("Content-Type", "application/json"),
                       ("Origin", "localhost")]

    if "dbauth" in scenario.tags and LDAP_SERVER:
        scenario.mark_skipped()

    if "ldapauth" in scenario.tags and not LDAP_SERVER:
        scenario.mark_skipped()

    if "alchemy" in scenario.tags and not context.app.config.get(
            "KEYWORDS_KEY_API"):
        scenario.mark_skipped()

    if "clean_snapshots" in scenario.tags:
        tests.use_snapshot.cache.clear()

    setup_search_provider(context.app)

    if scenario.status != "skipped" and "auth" in scenario.tags:
        setup_auth_user(context)

    if scenario.status != "skipped" and "provider" in scenario.tags:
        setup_providers(context)

    if scenario.status != "skipped" and "vocabulary" in scenario.tags:
        with context.app.app_context():
            cmd = AppPopulateCommand()
            filename = os.path.join(
                os.path.abspath(os.path.dirname("features/steps/fixtures/")),
                "vocabularies.json")
            cmd.run(filename)

    if scenario.status != "skipped" and "content_type" in scenario.tags:
        with context.app.app_context():
            cmd = AppPopulateCommand()
            filename = os.path.join(
                os.path.abspath(os.path.dirname("features/steps/fixtures/")),
                "content_types.json")
            cmd.run(filename)

    if scenario.status != "skipped" and "notification" in scenario.tags:
        tests.setup_notification(context)

    if scenario.status != "skipped" and "app_init" in scenario.tags:
        with context.app.app_context():
            command = AppInitializeWithDataCommand()
            command.run()
Пример #11
0
    def setUp(self):
        self.init_data()

        self.app.data.insert('users', self.users)
        self.app.data.insert('desks', self.desks)
        self.app.data.insert('products', self.products)
        self.app.data.insert('subscribers', self.subscribers)
        self.app.data.insert(ARCHIVE, self.articles)

        self.article_versions = self._init_article_versions()

        with tempfile.TemporaryDirectory() as tmp:
            json_data = [
                {
                    "_id": "kill_text",
                    "act": "kill",
                    "type": "text",
                    "schema": {
                        "headline": {
                            "type": "string"
                        }
                    }
                },
                {
                    "_id": "publish_text",
                    "act": "publish",
                    "type": "text",
                    "schema": {}
                },
                {
                    "_id": "correct_text",
                    "act": "correct",
                    "type": "text",
                    "schema": {}
                },
                {
                    "_id": "publish_composite",
                    "act": "publish",
                    "type": "composite",
                    "schema": {}
                },
            ]

            filename = os.path.join(tmp, 'validators.json')
            with open(filename, 'w') as file:
                json.dump(json_data, file)

            init_app(self.app)
            AppPopulateCommand().run(filename)

        self.app.media.url_for_media = MagicMock(return_value='url_for_media')
        self._put = self.app.media.put
        self.app.media.put = MagicMock(return_value='media_id')
Пример #12
0
    def test_get_subscribers_by_filter_condition(self):
        filter_condition1 = {
            "field": "urgency",
            "operator": "in",
            "value": "2"
        }
        filter_condition2 = {
            "field": "urgency",
            "operator": "in",
            "value": "1"
        }
        filter_condition3 = {
            "field": "headline",
            "operator": "like",
            "value": "tor"
        }
        filter_condition4 = {
            "field": "urgency",
            "operator": "nin",
            "value": "3"
        }

        with self.app.app_context():
            cmd = AppPopulateCommand()
            filename = os.path.join(
                os.path.abspath(
                    os.path.dirname(
                        "apps/prepopulate/data_init/vocabularies.json")),
                "vocabularies.json")
            cmd.run(filename)
            r1 = self.s._get_subscribers_by_filter_condition(filter_condition1)
            r2 = self.s._get_subscribers_by_filter_condition(filter_condition2)
            r3 = self.s._get_subscribers_by_filter_condition(filter_condition3)
            r4 = self.s._get_subscribers_by_filter_condition(filter_condition4)
            self.assertTrue(len(r1[0]["selected_subscribers"]) == 1)
            self.assertTrue(len(r2[0]["selected_subscribers"]) == 0)
            self.assertTrue(len(r3[0]["selected_subscribers"]) == 2)
            self.assertTrue(len(r4[0]["selected_subscribers"]) == 1)
Пример #13
0
    def test_get_subscribers_by_filter_condition(self):
        filter_condition1 = {
            'field': 'urgency',
            'operator': 'in',
            'value': '2'
        }
        filter_condition2 = {
            'field': 'urgency',
            'operator': 'in',
            'value': '1'
        }
        filter_condition3 = {
            'field': 'headline',
            'operator': 'like',
            'value': 'tor'
        }
        filter_condition4 = {
            'field': 'urgency',
            'operator': 'nin',
            'value': '3'
        }

        with self.app.app_context():
            cmd = AppPopulateCommand()
            filename = os.path.join(
                os.path.abspath(
                    os.path.dirname(
                        "apps/prepopulate/data_init/vocabularies.json")),
                "vocabularies.json")
            cmd.run(filename)
            r1 = self.s._get_subscribers_by_filter_condition(filter_condition1)
            r2 = self.s._get_subscribers_by_filter_condition(filter_condition2)
            r3 = self.s._get_subscribers_by_filter_condition(filter_condition3)
            r4 = self.s._get_subscribers_by_filter_condition(filter_condition4)
            self.assertTrue(len(r1[0]['selected_subscribers']) == 1)
            self.assertTrue(len(r2[0]['selected_subscribers']) == 0)
            self.assertTrue(len(r3[0]['selected_subscribers']) == 2)
            self.assertTrue(len(r4[0]['selected_subscribers']) == 1)
Пример #14
0
    def setUp(self):
        super().setUp()
        # we need to prepopulate vocabularies to get qcodes
        voc_file = os.path.join(
            os.path.abspath(os.path.dirname(os.path.dirname(ntb.__file__))),
            'data', 'vocabularies.json')
        AppPopulateCommand().run(voc_file)

        # settings are needed in order to get into account NITF_MAPPING
        for key in dir(settings):
            if key.isupper():
                setattr(config, key, getattr(settings, key))

        self._run_parse()
Пример #15
0
 def setUp(self):
     super().setUp()
     # load vocabularies
     with self.app.app_context():
         voc_file = os.path.join(
             os.path.abspath(os.path.dirname(os.path.dirname(
                 anp.__file__))), 'data', 'vocabularies.json')
         AppPopulateCommand().run(voc_file)
     # load fixture
     dirname = os.path.dirname(os.path.realpath(__file__))
     fixture_path = os.path.normpath(
         os.path.join(dirname, '../fixtures', self.filename))
     with open(fixture_path, 'r') as f:
         self.article = json.loads(f.read())['data']
     # parse article
     parser = ANPNewsApiFeedParser()
     provider = {'name': 'test'}
     self.item = parser.parse(self.article, provider)
Пример #16
0
    def test_targeted_for_includes_digital_subscribers(self):
        AppPopulateCommand().run(self.filename)
        updates = {
            'target_regions': [{
                'qcode': 'NSW',
                'name': 'New South Wales',
                'allow': True
            }]
        }
        doc_id = self.articles[5][config.ID_FIELD]
        get_resource_service(ARCHIVE).patch(id=doc_id, updates=updates)

        get_resource_service(ARCHIVE_PUBLISH).patch(
            id=doc_id, updates={ITEM_STATE: CONTENT_STATE.PUBLISHED})
        enqueue_published()
        queue_items = self.app.data.find(PUBLISH_QUEUE, None, None)
        self.assertEqual(6, queue_items.count())
        expected_subscribers = ['1', '2', '3', '4', '5']
        for item in queue_items:
            self.assertIn(item["subscriber_id"], expected_subscribers,
                          'item {}'.format(item))
Пример #17
0
    def setUp(self):
        super().setUp()

        with self.app.app_context():
            # prepopulate vocabularies
            voc_file = os.path.join(
                os.path.abspath(os.path.dirname(os.path.dirname(
                    ntb.__file__))), 'data', 'vocabularies.json')
            AppPopulateCommand().run(voc_file)
            # by default events resource is not available
            init_events_app(self.app)

        # NTBEventsApiFeedingService does 4 request during 1 update,
        # to mock returning of different results (requests.get) self._side_effect is used
        self.feeds = []
        for i in range(4):
            dirname = os.path.dirname(os.path.realpath(__file__))
            fixture = os.path.normpath(
                os.path.join(dirname, '../fixtures', 'ntb_events_api',
                             '{}.xml'.format(i)))

            with open(fixture, 'rb') as f:
                self.feeds.append(f.read())
Пример #18
0
 def setUp(self):
     # we need to prepopulate vocabularies to get qcodes
     voc_file = os.path.join(
         os.path.abspath(os.path.dirname(os.path.dirname(belga.__file__))),
         'data', 'vocabularies.json')
     AppPopulateCommand().run(voc_file)