def _get_field_values(self):
     values = {}
     vocabularies_resource = get_resource_service('vocabularies')
     values['anpa_category'] = vocabularies_resource.find_one(req=None, _id='categories')['items']
     req = ParsedRequest()
     req.where = json.dumps({'$or': [{"schema_field": "genre"}, {"_id": "genre"}]})
     genre = vocabularies_resource.get(req=req, lookup=None)
     if genre.count():
         values['genre'] = genre[0]['items']
     values['urgency'] = vocabularies_resource.find_one(req=None, _id='urgency')['items']
     values['priority'] = vocabularies_resource.find_one(req=None, _id='priority')['items']
     values['type'] = vocabularies_resource.find_one(req=None, _id='type')['items']
     subject = vocabularies_resource.find_one(req=None, schema_field='subject')
     if subject:
         values['subject'] = subject['items']
     else:
         values['subject'] = get_subjectcodeitems()
     values['desk'] = list(get_resource_service('desks').get(None, {}))
     values['stage'] = self._get_stage_field_values(values['desk'])
     values['sms'] = [{'qcode': 0, 'name': 'False'}, {'qcode': 1, 'name': 'True'}]
     values['embargo'] = [{'qcode': 0, 'name': 'False'}, {'qcode': 1, 'name': 'True'}]
     req = ParsedRequest()
     req.where = json.dumps({'$or': [{"schema_field": "place"}, {"_id": "place"}, {"_id": "locators"}]})
     place = vocabularies_resource.get(req=req, lookup=None)
     if place.count():
         values['place'] = place[0]['items']
     values['ingest_provider'] = list(get_resource_service('ingest_providers').get(None, {}))
     return values
Esempio n. 2
0
    def get(self, req, lookup):
        """Filter out personal activity on personal items if inquired by another user."""
        if req is None:
            req = ParsedRequest()
        user = getattr(g, 'user', None)
        if not user:
            raise SuperdeskApiError.notFoundError('Can not determine user')
        where_cond = {}
        if req.where:
            if req.where[0] != '{':
                req.where = '{' + req.where + '}'
            where_cond = json.loads(req.where)
        for_user = where_cond.get('user', str(user.get('_id')))
        if for_user != str(user.get('_id')):
            where_item = {
                '$and': [{
                    'desk': {
                        '$ne': None
                    }
                }, {
                    'desk': {
                        '$exists': True
                    }
                }, {
                    'resource': 'archive'
                }]
            }
            where_cond['$or'] = [where_item, {'resource': {'$ne': 'archive'}}]
            req.where = json.dumps(where_cond)

        return self.backend.get(self.datasource, req=req, lookup=lookup)
Esempio n. 3
0
    def get(self, req, lookup):
        """Filter out personal activity on personal items if inquired by another user."""
        if req is None:
            req = ParsedRequest()
        user = getattr(g, "user", None)
        if not user:
            raise SuperdeskApiError.notFoundError("Can not determine user")
        where_cond = {}
        if req.where:
            if req.where[0] != "{":
                req.where = "{" + req.where + "}"
            where_cond = json.loads(req.where)
        for_user = where_cond.get("user", str(user.get("_id")))
        if for_user != str(user.get("_id")):
            where_item = {
                "$and": [{
                    "desk": {
                        "$ne": None
                    }
                }, {
                    "desk": {
                        "$exists": True
                    }
                }, {
                    "resource": "archive"
                }]
            }
            where_cond["$or"] = [where_item, {"resource": {"$ne": "archive"}}]
            req.where = json.dumps(where_cond)

        return self.backend.get(self.datasource, req=req, lookup=lookup)
Esempio n. 4
0
 def _get_field_values(self):
     values = {}
     vocabularies_resource = get_resource_service("vocabularies")
     categories_cv = vocabularies_resource.find_one(req=None, _id="categories")
     values["anpa_category"] = categories_cv.get("items") if categories_cv else []
     req = ParsedRequest()
     req.where = json.dumps({"$or": [{"schema_field": "genre"}, {"_id": "genre"}]})
     genre = vocabularies_resource.get(req=req, lookup=None)
     if genre.count():
         values["genre"] = genre[0]["items"]
     for voc_id in ("urgency", "priority", "type"):
         try:
             values[voc_id] = vocabularies_resource.find_one(req=None, _id=voc_id)["items"]
         except TypeError:
             values[voc_id] = []
     subject = vocabularies_resource.find_one(req=None, schema_field="subject")
     if subject:
         values["subject"] = subject["items"]
     else:
         values["subject"] = get_subjectcodeitems()
     values["desk"] = list(get_resource_service("desks").get(None, {}))
     values["stage"] = self._get_stage_field_values(values["desk"])
     values["sms"] = [{"qcode": 0, "name": "False"}, {"qcode": 1, "name": "True"}]
     values["embargo"] = [{"qcode": 0, "name": "False"}, {"qcode": 1, "name": "True"}]
     req = ParsedRequest()
     req.where = json.dumps({"$or": [{"schema_field": "place"}, {"_id": "place"}, {"_id": "locators"}]})
     place = vocabularies_resource.get(req=req, lookup=None)
     if place.count():
         values["place"] = place[0]["items"]
     else:
         values["place"] = []
     values["ingest_provider"] = list(get_resource_service("ingest_providers").get(None, {}))
     values["featuremedia"] = [{"qcode": 1, "name": "True"}, {"qcode": 0, "name": "False"}]
     return values
 def _get_field_values(self):
     values = {}
     vocabularies_resource = get_resource_service('vocabularies')
     values['anpa_category'] = vocabularies_resource.find_one(req=None, _id='categories')['items']
     req = ParsedRequest()
     req.where = json.dumps({'$or': [{"schema_field": "genre"}, {"_id": "genre"}]})
     genre = vocabularies_resource.get(req=req, lookup=None)
     if genre.count():
         values['genre'] = genre[0]['items']
     values['urgency'] = vocabularies_resource.find_one(req=None, _id='urgency')['items']
     values['priority'] = vocabularies_resource.find_one(req=None, _id='priority')['items']
     values['type'] = vocabularies_resource.find_one(req=None, _id='type')['items']
     subject = vocabularies_resource.find_one(req=None, schema_field='subject')
     if subject:
         values['subject'] = subject['items']
     else:
         values['subject'] = get_subjectcodeitems()
     values['desk'] = list(get_resource_service('desks').get(None, {}))
     values['stage'] = self._get_stage_field_values(values['desk'])
     values['sms'] = [{'qcode': 0, 'name': 'False'}, {'qcode': 1, 'name': 'True'}]
     values['embargo'] = [{'qcode': 0, 'name': 'False'}, {'qcode': 1, 'name': 'True'}]
     req = ParsedRequest()
     req.where = json.dumps({'$or': [{"schema_field": "place"}, {"_id": "place"}, {"_id": "locators"}]})
     place = vocabularies_resource.get(req=req, lookup=None)
     if place.count():
         values['place'] = place[0]['items']
     values['ingest_provider'] = list(get_resource_service('ingest_providers').get(None, {}))
     return values
    def get_items(self, now):
        """Get the items from the archive collection that have expiry in future
        and state is published, corrected, killed

        :param datetime now: current date time
        :return list: list of expired items
        """
        logger.info('Fetching expired items from archive collection.')
        now = now + timedelta(minutes=self.expiry_minutes)

        query = {
            'expiry': {
                '$gte': date_to_str(now)
            },
            ITEM_STATE: {
                '$in': [
                    CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED,
                    CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED
                ]
            }
        }

        req = ParsedRequest()
        req.sort = '[("unique_id", 1)]'
        req.where = json.dumps(query)
        cursor = get_resource_service(ARCHIVE).get_from_mongo(req=req,
                                                              lookup=None)
        count = cursor.count()
        no_of_pages = 0
        if count:
            no_of_pages = len(range(0, count, self.default_page_size))
            unique_id = cursor[0]['unique_id']
            logger.info('Number of items to modify: {}, pages={}'.format(
                count, no_of_pages))
        else:
            logger.info('No items to modify.')

        for page in range(0, no_of_pages):
            logger.info(
                'Fetching items for page number: {} unique_id: {}'.format(
                    (page + 1), unique_id))
            req = ParsedRequest()
            req.sort = '[("unique_id", 1)]'
            if page == 0:
                query['unique_id'] = {'$gte': unique_id}
            else:
                query['unique_id'] = {'$gt': unique_id}

            req.where = json.dumps(query)
            req.max_results = self.default_page_size
            cursor = get_resource_service(ARCHIVE).get_from_mongo(req=req,
                                                                  lookup=None)
            items = list(cursor)
            if len(items) > 0:
                unique_id = items[len(items) - 1]['unique_id']

            logger.info('Fetched No. of Items: {} for page: {}'.format(
                len(items), (page + 1)))
            yield items
Esempio n. 7
0
    def get_publish_queue_items(self, page_size, expired_items=None):
        """Get publish queue items that are not moved to legal

        :param int page_size: batch size
        :param list expired_items:
        :return list: publish queue items
        """
        if expired_items is None:
            expired_items = []

        query = {"moved_to_legal": False}

        if expired_items:
            query["item_id"] = {"$in": expired_items}
        else:
            query["state"] = {
                "$in": [
                    QueueState.SUCCESS.value, QueueState.CANCELED.value,
                    QueueState.FAILED.value
                ]
            }

        service = get_resource_service("publish_queue")
        req = ParsedRequest()
        req.sort = '[("_id", 1)]'
        req.where = json.dumps(query)
        cursor = service.get(req=req, lookup=None)
        count = cursor.count()
        no_of_pages = 0
        if count:
            no_of_pages = len(range(0, count, page_size))
            queue_id = cursor[0][config.ID_FIELD]
        logger.info(
            "Number of items to move to legal archive publish queue: {}, pages={}"
            .format(count, no_of_pages))

        for page in range(0, no_of_pages):
            logger.info("Fetching publish queue items "
                        "for page number: {}. queue_id: {}".format((page + 1),
                                                                   queue_id))
            req = ParsedRequest()
            req.sort = '[("_id", 1)]'
            query["_id"] = {"$gte": str(queue_id)}
            req.where = json.dumps(query)
            req.max_results = page_size
            cursor = service.get(req=req, lookup=None)
            items = list(cursor)
            if len(items) > 0:
                queue_id = items[len(items) - 1][config.ID_FIELD]
            logger.info("Fetched No. of Items: {} for page: {} "
                        "For import in to legal archive publish_queue.".format(
                            len(items), (page + 1)))
            yield items
Esempio n. 8
0
    def get_items(self, now):
        """Get the items from the archive collection that have expiry in future
        and state is published, corrected, killed

        :param datetime now: current date time
        :return list: list of expired items
        """
        logger.info('Fetching expired items from archive collection.')
        now = now + timedelta(minutes=self.expiry_minutes)

        query = {
            'expiry': {'$gte': date_to_str(now)},
            ITEM_STATE: {'$in': [
                CONTENT_STATE.PUBLISHED,
                CONTENT_STATE.CORRECTED,
                CONTENT_STATE.KILLED,
                CONTENT_STATE.RECALLED
            ]}
        }

        req = ParsedRequest()
        req.sort = '[("unique_id", 1)]'
        req.where = json.dumps(query)
        cursor = get_resource_service(ARCHIVE).get_from_mongo(req=req, lookup=None)
        count = cursor.count()
        no_of_pages = 0
        if count:
            no_of_pages = len(range(0, count, self.default_page_size))
            unique_id = cursor[0]['unique_id']
            logger.info('Number of items to modify: {}, pages={}'.format(count, no_of_pages))
        else:
            logger.info('No items to modify.')

        for page in range(0, no_of_pages):
            logger.info('Fetching items for page number: {} unique_id: {}'. format((page + 1), unique_id))
            req = ParsedRequest()
            req.sort = '[("unique_id", 1)]'
            if page == 0:
                query['unique_id'] = {'$gte': unique_id}
            else:
                query['unique_id'] = {'$gt': unique_id}

            req.where = json.dumps(query)
            req.max_results = self.default_page_size
            cursor = get_resource_service(ARCHIVE).get_from_mongo(req=req, lookup=None)
            items = list(cursor)
            if len(items) > 0:
                unique_id = items[len(items) - 1]['unique_id']

            logger.info('Fetched No. of Items: {} for page: {}'.format(len(items), (page + 1)))
            yield items
Esempio n. 9
0
    def get(self, req, lookup):
        """
        Overriding to pass user as a search parameter
        """
        session_user = str(get_user_id(required=True))
        if not req:
            req = ParsedRequest()

        if lookup:
            req.where = json.dumps({'$or': [{'is_global': True}, {'user': session_user}, lookup]})
        else:
            req.where = json.dumps({'$or': [{'is_global': True}, {'user': session_user}]})

        return super().get(req, lookup=None)
    def _get_items(self):
        """Get items from the LegalArchive that belong to the COMMISSION desk

        :return: list: list of legal archive content
        """
        logger.info('Fetching legal archive content for the {} desk'.format(
            self.default_desk))

        query = {'task.desk': self.default_desk, 'type': 'text'}
        req = ParsedRequest()
        req.where = json.dumps(query)

        legal_archive_service = get_resource_service(LEGAL_ARCHIVE_NAME)

        legal_items = list(
            legal_archive_service.get_from_mongo(req=req, lookup=None))

        if legal_items:
            logger.info(
                'Found {} items in the legal archive for the {} desk'.format(
                    len(legal_items), self.default_desk))
        else:
            logger.warning(
                'Failed to find any {} desk items in the legal archive'.format(
                    self.default_desk))
            legal_items = []

        return legal_items
Esempio n. 11
0
    def get(self, req, lookup):
        """Retrieve a list of items that match the filter criteria (if any)
        pssed along the HTTP request.

        :param req: object representing the HTTP request
        :type req: `eve.utils.ParsedRequest`
        :param dict lookup: sub-resource lookup from the endpoint URL

        :return: database results cursor object
        :rtype: `pymongo.cursor.Cursor`
        """
        if req is None:
            req = ParsedRequest()

        request_params = req.args or {}
        allowed_params = ('q',)

        for param in request_params.keys():
            if param not in allowed_params:
                raise UnexpectedParameterError(
                    desc="Unexpected parameter ({})".format(param)
                )

        if 'q' in request_params:
            req.where = request_params['q']

        return super().get(req, lookup)
Esempio n. 12
0
    def run(self, page_size=None):
        logger.info("Import to Legal Archive")
        lock_name = get_lock_id("legal_archive", "import_to_legal_archive")
        page_size = int(page_size) if page_size else self.default_page_size
        if not lock(lock_name, "", expire=1800):
            return
        try:
            legal_archive_import = LegalArchiveImport()
            publish_queue = get_resource_service("publish_queue")
            for items in self.get_expired_items(page_size):
                for item in items:
                    try:
                        legal_archive_import.upsert_into_legal_archive(item.get("item_id"))
                        req = ParsedRequest()
                        req.where = json.dumps({"item_id": item["item_id"]})
                        queue_items = list(publish_queue.get(req=req, lookup=None))
                        if queue_items:
                            try:
                                logger.info("Import to Legal Publish Queue")
                                legal_archive_import.process_queue_items(queue_items, True)
                            except:
                                logger.exception(
                                    "Failed to import into legal publish queue  "
                                    "archive via command {}.".format(item.get("item_id"))
                                )
                    except:
                        logger.exception(
                            "Failed to import into legal " "archive via command {}.".format(item.get("item_id"))
                        )

        except:
            logger.exception("Failed to import into legal archive.")
        finally:
            unlock(lock_name, "")
    def _get_items(self):
        """Get items from the LegalArchive that belong to the COMMISSION desk

        :return: list: list of legal archive content
        """
        logger.info('Fetching legal archive content for the {} desk'.format(self.default_desk))

        query = {'task.desk': self.default_desk, 'type': 'text'}
        req = ParsedRequest()
        req.where = json.dumps(query)

        legal_archive_service = get_resource_service(LEGAL_ARCHIVE_NAME)

        legal_items = list(legal_archive_service.get_from_mongo(req=req, lookup=None))

        if legal_items:
            logger.info(
                'Found {} items in the legal archive for the {} desk'.format(
                    len(legal_items),
                    self.default_desk)
            )
        else:
            logger.warning('Failed to find any {} desk items in the legal archive'.format(self.default_desk))
            legal_items = []

        return legal_items
Esempio n. 14
0
    def get_history_items(self, last_id, gte, item_id, chunk_size=0):
        history_service = get_resource_service('archive_history')

        last_processed_id = last_id

        while True:
            req = ParsedRequest()
            req.sort = '[("_id", 1), ("version", 1)]'

            query = {'$and': []}

            if gte:
                query['$and'].append({'_created': {'$gte': date_to_str(gte)}})

            if item_id:
                query['$and'].append({'item_id': str(item_id)})

            if last_processed_id:
                query['$and'].append({'_id': {'$gt': str(last_processed_id)}})

            req.where = json.dumps(query)

            if chunk_size > 0:
                req.max_results = int(chunk_size)

            items = list(history_service.get(req=req, lookup=None))

            if len(items) < 1:
                break

            last_processed_id = items[-1][config.ID_FIELD]
            yield items
Esempio n. 15
0
    def update_media_references(self, updates, original, published=False):
        """Update the media references collection.
         When item (media item or associated media) is updated or created,
         media_references are created. These media_references are updated to published state
         once the item is published.

        :param dict updates: Updates of the item
        :param dict original: Original item
        :param boolean published: True if publishing the item else False
        """
        item_id = original.get(config.ID_FIELD)
        references = {}
        if updates.get('renditions', original.get('renditions', {})):
            references = {
                item_id: updates.get('renditions', original.get('renditions', {}))
            }

        if original.get(ITEM_TYPE) not in MEDIA_TYPES:
            associations = updates.get(ASSOCIATIONS) or original.get(ASSOCIATIONS)
            if not associations:
                return

            references = {assoc.get(config.ID_FIELD): assoc.get('renditions')
                          for assoc in associations.values() if assoc and assoc.get('renditions')}

        if not references:
            return

        for assoc_id, renditions in references.items():
            associated_id = assoc_id if assoc_id != item_id else None
            for rendition in renditions.values():
                if not rendition.get('media'):
                    continue

                media = str(rendition.get('media'))
                reference = get_resource_service('media_references').find_one(req=None, item_id=item_id,
                                                                              media_id=media)
                if not reference:
                    try:
                        get_resource_service('media_references').post([{'item_id': item_id,
                                                                        'media_id': media,
                                                                        'associated_id': associated_id,
                                                                        'published': False}])
                    except:
                        logger.exception('Failed to insert media reference item {} media {}'.format(item_id, media))

        # item is publish
        if not published:
            return

        req = ParsedRequest()
        req.where = json.dumps({'item_id': item_id, 'published': False})
        refs = list(get_resource_service('media_references').get(req=req, lookup=None))
        for ref in refs:
            try:
                get_resource_service('media_references').patch(ref.get(config.ID_FIELD),
                                                               updates={'published': True})
            except:
                logger.exception('Failed to update media '
                                 'reference item {} media {}'.format(ref.get("item_id"), ref.get("media_id")))
Esempio n. 16
0
    def update_media_references(self, updates, original, published=False):
        """Update the media references collection.
         When item (media item or associated media) is updated or created,
         media_references are created. These media_references are updated to published state
         once the item is published.

        :param dict updates: Updates of the item
        :param dict original: Original item
        :param boolean published: True if publishing the item else False
        """
        item_id = original.get(config.ID_FIELD)
        references = {}
        if updates.get('renditions', original.get('renditions', {})):
            references = {
                item_id: updates.get('renditions', original.get('renditions', {}))
            }

        if original.get(ITEM_TYPE) not in MEDIA_TYPES:
            associations = updates.get(ASSOCIATIONS) or original.get(ASSOCIATIONS)
            if not associations:
                return

            references = {assoc.get(config.ID_FIELD): assoc.get('renditions')
                          for assoc in associations.values() if assoc and assoc.get('renditions')}

        if not references:
            return

        for assoc_id, renditions in references.items():
            associated_id = assoc_id if assoc_id != item_id else None
            for rendition in renditions.values():
                if not rendition.get('media'):
                    continue

                media = str(rendition.get('media'))
                reference = get_resource_service('media_references').find_one(req=None, item_id=item_id,
                                                                              media_id=media)
                if not reference:
                    try:
                        get_resource_service('media_references').post([{'item_id': item_id,
                                                                        'media_id': media,
                                                                        'associated_id': associated_id,
                                                                        'published': False}])
                    except Exception:
                        logger.exception('Failed to insert media reference item {} media {}'.format(item_id, media))

        # item is publish
        if not published:
            return

        req = ParsedRequest()
        req.where = json.dumps({'item_id': item_id, 'published': False})
        refs = list(get_resource_service('media_references').get(req=req, lookup=None))
        for ref in refs:
            try:
                get_resource_service('media_references').patch(ref.get(config.ID_FIELD),
                                                               updates={'published': True})
            except Exception:
                logger.exception('Failed to update media '
                                 'reference item {} media {}'.format(ref.get("item_id"), ref.get("media_id")))
Esempio n. 17
0
    def test_search_capi(self):
        subscriber = {"_id": "sub1"}

        self.content_api.publish(
            {
                "_id": "foo",
                "guid": "foo",
                "type": "text",
                "anpa_category": [{"qcode": "i", "name": "International News"}],
                "headline": "Man bites dog",
            },
            [subscriber],
        )
        self.content_api.publish({"_id": "bar", "guid": "bar", "type": "text"}, [{"_id": "sub2"}])

        test = superdesk.get_resource_service("search_capi")
        req = ParsedRequest()
        req.args = MultiDict([("subscribers", "sub1")])
        resp = test.get(req=req, lookup=None)
        self.assertEqual(resp.count(), 1)

        resp = test.get(req=None, lookup=None)
        self.assertEqual(resp.count(), 2)

        req = ParsedRequest()
        req.args = MultiDict()
        req.where = '{"headline":"dog"}'
        resp = test.get(req=req, lookup=None)
        self.assertEqual(resp.count(), 1)
Esempio n. 18
0
    def get(self, req, lookup):
        """
        Overriding because of a different resource URL and user_id is part of the URL
        """

        req = ParsedRequest()
        req.where = json.dumps(lookup)
        return super().get(req, lookup=None)
Esempio n. 19
0
    def get(self, req, lookup):
        """
        Overriding because of a different resource URL and user_id is part of the URL
        """

        req = ParsedRequest()
        req.where = json.dumps(lookup)
        return super().get(req, lookup=None)
Esempio n. 20
0
def test_find_where(data_layer):
    resource = 'musicians'
    sub_resource_lookup = None
    req = ParsedRequest()
    req.where = 'name == "Bill Evans"'
    req.max_results = 1
    results = data_layer.find(resource, req, sub_resource_lookup)
    assert len(results) == 1
    assert results[0]['name'] == 'Bill Evans'
Esempio n. 21
0
    def get(self, req, lookup):
        """
        Overriding because of a different resource URL and user_id is part of the URL
        """
        if not req:
            req = ParsedRequest()

        req.where = json.dumps({'$or': [lookup, {'is_global': True}]})
        return super().get(req, lookup=None)
Esempio n. 22
0
    def get(self, req, lookup):
        """
        Overriding because of a different resource URL and user_id is part of the URL
        """
        if not req:
            req = ParsedRequest()

        req.where = json.dumps({'$or': [lookup, {'is_global': True}]})
        return super().get(req, lookup=None)
Esempio n. 23
0
    def get_publish_queue_items(self, page_size, expired_items=[]):
        """Get publish queue items that are not moved to legal

        :param int page_size: batch size
        :param list expired_items:
        :return list: publish queue items
        """
        query = {"moved_to_legal": False}

        if expired_items:
            query["item_id"] = {"$in": expired_items}
        else:
            query["state"] = {"$in": [QueueState.SUCCESS.value, QueueState.CANCELED.value, QueueState.FAILED.value]}

        service = get_resource_service("publish_queue")
        req = ParsedRequest()
        req.sort = '[("_id", 1)]'
        req.where = json.dumps(query)
        cursor = service.get(req=req, lookup=None)
        count = cursor.count()
        no_of_pages = 0
        if count:
            no_of_pages = len(range(0, count, page_size))
            queue_id = cursor[0][config.ID_FIELD]
        logger.info("Number of items to move to legal archive publish queue: {}, pages={}".format(count, no_of_pages))

        for page in range(0, no_of_pages):
            logger.info(
                "Fetching publish queue items " "for page number: {}. queue_id: {}".format((page + 1), queue_id)
            )
            req = ParsedRequest()
            req.sort = '[("_id", 1)]'
            query["_id"] = {"$gte": str(queue_id)}
            req.where = json.dumps(query)
            req.max_results = page_size
            cursor = service.get(req=req, lookup=None)
            items = list(cursor)
            if len(items) > 0:
                queue_id = items[len(items) - 1][config.ID_FIELD]
            logger.info(
                "Fetched No. of Items: {} for page: {} "
                "For import in to legal archive publish_queue.".format(len(items), (page + 1))
            )
            yield items
    def test_import_into_legal_archive(self):
        archive_publish = get_resource_service('archive_publish')
        archive_correct = get_resource_service('archive_correct')
        legal_archive = get_resource_service('legal_archive')
        archive = get_resource_service('archive_publish')
        published = get_resource_service('published')
        publish_queue = get_resource_service('publish_queue')

        self.original_method = LegalArchiveImport.upsert_into_legal_archive
        LegalArchiveImport.upsert_into_legal_archive = MagicMock()

        for item in self.archive_items:
            archive_publish.patch(item['_id'], {'headline': 'publishing', 'abstract': 'publishing'})

        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item['_id'])
            self.assertIsNone(legal_item, 'Item: {} is not none.'.format(item['_id']))

        archive_correct.patch(self.archive_items[1]['_id'], {'headline': 'correcting', 'abstract': 'correcting'})

        LegalArchiveImport.upsert_into_legal_archive = self.original_method
        self.class_under_test().run(1)

        # items are not expired
        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item['_id'])
            self.assertIsNone(legal_item, 'Item: {} is not none.'.format(item['_id']))

        # expire the items
        for item in self.archive_items:
            original = archive.find_one(req=None, _id=item['_id'])
            archive.system_update(item['_id'], {'expiry': utcnow() - timedelta(minutes=30)}, original)
            published.update_published_items(item['_id'], 'expiry', utcnow() - timedelta(minutes=30))

        # run the command after expiry
        self.class_under_test().run(1)

        # items are expired
        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item['_id'])
            self.assertEqual(legal_item['_id'], item['_id'], 'item {} not imported to legal'.format(item['_id']))

        # items are moved to legal
        for item in self.archive_items:
            published_items = list(published.get_other_published_items(item['_id']))
            for published_item in published_items:
                self.assertEqual(published_item['moved_to_legal'], True)

        # items are moved to legal publish queue
        for item in self.archive_items:
            req = ParsedRequest()
            req.where = json.dumps({'item_id': item['_id']})
            queue_items = list(publish_queue.get(req=req, lookup=None))
            self.assertGreaterEqual(len(queue_items), 1)
            for queue_item in queue_items:
                self.assertEqual(queue_item['moved_to_legal'], True)
Esempio n. 25
0
    def test_import_into_legal_archive(self):
        archive_publish = get_resource_service('archive_publish')
        archive_correct = get_resource_service('archive_correct')
        legal_archive = get_resource_service('legal_archive')
        archive = get_resource_service('archive_publish')
        published = get_resource_service('published')
        publish_queue = get_resource_service('publish_queue')

        self.original_method = LegalArchiveImport.upsert_into_legal_archive
        LegalArchiveImport.upsert_into_legal_archive = MagicMock()

        for item in self.archive_items:
            archive_publish.patch(item['_id'], {'headline': 'publishing', 'abstract': 'publishing'})

        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item['_id'])
            self.assertIsNone(legal_item, 'Item: {} is not none.'.format(item['_id']))

        archive_correct.patch(self.archive_items[1]['_id'], {'headline': 'correcting', 'abstract': 'correcting'})

        LegalArchiveImport.upsert_into_legal_archive = self.original_method
        self.class_under_test().run(1)

        # items are not expired
        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item['_id'])
            self.assertIsNone(legal_item, 'Item: {} is not none.'.format(item['_id']))

        # expire the items
        for item in self.archive_items:
            original = archive.find_one(req=None, _id=item['_id'])
            archive.system_update(item['_id'], {'expiry': utcnow() - timedelta(minutes=30)}, original)
            published.update_published_items(item['_id'], 'expiry', utcnow() - timedelta(minutes=30))

        # run the command after expiry
        self.class_under_test().run(1)

        # items are expired
        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item['_id'])
            self.assertEqual(legal_item['_id'], item['_id'], 'item {} not imported to legal'.format(item['_id']))

        # items are moved to legal
        for item in self.archive_items:
            published_items = list(published.get_other_published_items(item['_id']))
            for published_item in published_items:
                self.assertEqual(published_item['moved_to_legal'], True)

        # items are moved to legal publish queue
        for item in self.archive_items:
            req = ParsedRequest()
            req.where = json.dumps({'item_id': item['_id']})
            queue_items = list(publish_queue.get(req=req, lookup=None))
            self.assertGreaterEqual(len(queue_items), 1)
            for queue_item in queue_items:
                self.assertEqual(queue_item['moved_to_legal'], True)
Esempio n. 26
0
def internal_destination_auto_publish(item, **kwargs):
    """Auto publish the item using internal destination

    :param dict item: item to be published
    :param kwargs:
    :raises StopDuplication: to indicate the superdesk.internal_destination.handle_item_published
    to stop duplication as duplication is handle by this method.
    """
    if item.get(ITEM_STATE) not in PUBLISH_STATES:
        raise InvalidStateTransitionError(message='Internal Destination auto publish macro can '
                                                  'only be called after publishing the item.')
    operation = item.get(ITEM_OPERATION)
    archive_action_service = get_resource_service(publish_services.get(operation))
    archive_service = get_resource_service('archive')
    extra_fields = [PUBLISH_SCHEDULE, SCHEDULE_SETTINGS]
    # if any macro is doing publishing then we need the duplicate item that was published earlier
    req = ParsedRequest()
    req.where = json.dumps({
        '$and': [
            {PROCESSED_FROM: item.get(config.ID_FIELD)},
            {'task.desk': str(item.get('task').get('desk'))}
        ]
    })
    req.max_results = 1
    overwrite_item = next((archive_service.get_from_mongo(req=req, lookup=None)), None)

    # keep pubslish_schedule and schedule_settings in updates so that state can be set to scheduled
    updates = {
        PUBLISH_SCHEDULE: item[PUBLISH_SCHEDULE],
        SCHEDULE_SETTINGS: item[SCHEDULE_SETTINGS]
    }
    if item.get(ITEM_STATE) == CONTENT_STATE.PUBLISHED or not overwrite_item:
        new_id = archive_service.duplicate_content(item, state='routed', extra_fields=extra_fields)
        updates[ITEM_STATE] = item.get(ITEM_STATE)
        updates[PROCESSED_FROM] = item[config.ID_FIELD]

        get_resource_service('archive_publish').patch(id=new_id, updates=updates)
    else:
        if overwrite_item:
            # get the schema fields
            schema_item = apply_schema(deepcopy(item))
            keys_to_delete = ['source', 'unique_id', 'unique_name', 'original_id',
                              'expiry', 'correction_sequence']
            # remove the keys
            archive_service.remove_after_copy(schema_item, delete_keys=keys_to_delete)
            # get the diff
            updates.update({key: val for key, val in schema_item.items()
                            if overwrite_item.get(key) != val and not key.startswith("_")})

            archive_action_service.patch(id=overwrite_item[config.ID_FIELD],
                                         updates=updates)

    # raise stop duplication on successful completion so that
    # internal destination superdesk.internal_destination.handle_item_published
    # will not duplicate the item.
    raise StopDuplication()
    def test_import_into_legal_archive(self):
        archive_publish = get_resource_service("archive_publish")
        archive_correct = get_resource_service("archive_correct")
        legal_archive = get_resource_service("legal_archive")
        archive = get_resource_service("archive_publish")
        published = get_resource_service("published")
        publish_queue = get_resource_service("publish_queue")

        self.original_method = LegalArchiveImport.upsert_into_legal_archive
        LegalArchiveImport.upsert_into_legal_archive = MagicMock()

        for item in self.archive_items:
            archive_publish.patch(item["_id"], {"headline": "publishing", "abstract": "publishing"})

        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item["_id"])
            self.assertIsNone(legal_item, "Item: {} is not none.".format(item["_id"]))

        archive_correct.patch(self.archive_items[1]["_id"], {"headline": "correcting", "abstract": "correcting"})

        LegalArchiveImport.upsert_into_legal_archive = self.original_method
        self.class_under_test().run(1)

        # items are not expired
        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item["_id"])
            self.assertIsNone(legal_item, "Item: {} is not none.".format(item["_id"]))

        # expire the items
        for item in self.archive_items:
            original = archive.find_one(req=None, _id=item["_id"])
            archive.system_update(item["_id"], {"expiry": utcnow() - timedelta(minutes=30)}, original)
            published.update_published_items(item["_id"], "expiry", utcnow() - timedelta(minutes=30))

        # run the command after expiry
        self.class_under_test().run(1)

        # items are expired
        for item in self.archive_items:
            legal_item = legal_archive.find_one(req=None, _id=item["_id"])
            self.assertEqual(legal_item["_id"], item["_id"], "item {} not imported to legal".format(item["_id"]))

        # items are moved to legal
        for item in self.archive_items:
            published_items = list(published.get_other_published_items(item["_id"]))
            for published_item in published_items:
                self.assertEqual(published_item["moved_to_legal"], True)

        # items are moved to legal publish queue
        for item in self.archive_items:
            req = ParsedRequest()
            req.where = json.dumps({"item_id": item["_id"]})
            queue_items = list(publish_queue.get(req=req, lookup=None))
            self.assertGreaterEqual(len(queue_items), 1)
            for queue_item in queue_items:
                self.assertEqual(queue_item["moved_to_legal"], True)
Esempio n. 28
0
    def get(self, req, lookup):
        """Filter out personal activity on personal items if inquired by another user."""
        if req is None:
            req = ParsedRequest()
        user = getattr(g, 'user', None)
        if not user:
            raise SuperdeskApiError.notFoundError('Can not determine user')
        where_cond = {}
        if req.where:
            if req.where[0] != '{':
                req.where = '{' + req.where + '}'
            where_cond = json.loads(req.where)
        for_user = where_cond.get('user', str(user.get('_id')))
        if for_user != str(user.get('_id')):
            where_item = {'$and': [{'desk': {'$ne': None}}, {'desk': {'$exists': True}},
                                   {'resource': 'archive'}]}
            where_cond['$or'] = [where_item, {'resource': {'$ne': 'archive'}}]
            req.where = json.dumps(where_cond)

        return self.backend.get(self.datasource, req=req, lookup=lookup)
Esempio n. 29
0
    def get(self, req, lookup):
        """
        Overriding because of a different resource URL and user_id is part of the URL
        """

        req = ParsedRequest()
        req.where = json.dumps({'$or': [lookup, {'is_global': True}]})
        items = list(super().get(req, lookup=None))
        for item in items:
            item['filter'] = json.loads(item.get('filter'))

        return ListCursor(items)
Esempio n. 30
0
    def get(self, req, lookup):
        """
        Overriding because of a different resource URL and user_id is part of the URL
        """

        req = ParsedRequest()
        req.where = json.dumps({'$or': [lookup, {'is_global': True}]})
        items = list(super().get(req, lookup=None))
        for item in items:
            item['filter'] = decode_filter(item.get('filter'))

        return ListCursor(items)
Esempio n. 31
0
    def find(self, endpoint_name, where, max_results=0):
        """Find items for given endpoint using mongo query in python dict object.

        It handles request creation here so no need to do this in service.

        :param string endpoint_name
        :param dict where
        :param int max_results
        """
        req = ParsedRequest()
        req.where = MongoJSONEncoder().encode(where)
        req.max_results = max_results
        return self.get_from_mongo(endpoint_name, req, None)
Esempio n. 32
0
    def find(self, endpoint_name, where, max_results=0):
        """Find items for given endpoint using mongo query in python dict object.

        It handles request creation here so no need to do this in service.

        :param string endpoint_name
        :param dict where
        :param int max_results
        """
        req = ParsedRequest()
        req.where = MongoJSONEncoder().encode(where)
        req.max_results = max_results
        return self.get_from_mongo(endpoint_name, req, None)
Esempio n. 33
0
    def get_expired_items(self,
                          expiry_datetime=None,
                          expiry_days=None,
                          max_results=None,
                          include_children=True):
        """Get the expired items.

        Returns a generator for the list of expired items, sorting by `_id` and returning `max_results` per iteration.

        :param datetime expiry_datetime: Expiry date/time used to retrieve the list of items, defaults to `utcnow()`
        :param int expiry_days: Number of days content expires, defaults to `CONTENT_API_EXPIRY_DAYS`
        :param int max_results: Maximum results to retrieve per iteration, defaults to `MAX_EXPIRY_QUERY_LIMIT`
        :param boolean include_children: Include only root item if False, otherwise include the entire item chain
        :return list: expired content_api items
        """

        if expiry_datetime is None:
            expiry_datetime = utcnow()

        if expiry_days is None:
            expiry_days = app.settings["CONTENT_API_EXPIRY_DAYS"]

        if max_results is None:
            max_results = app.settings["MAX_EXPIRY_QUERY_LIMIT"]

        last_id = None
        expire_at = date_to_str(expiry_datetime - timedelta(days=expiry_days))

        while True:
            query = {"$and": [{"_updated": {"$lte": expire_at}}]}

            if last_id is not None:
                query["$and"].append({"_id": {"$gt": last_id}})

            if not include_children:
                query["$and"].append({"ancestors": {"$exists": False}})

            req = ParsedRequest()
            req.sort = "_id"
            req.where = json.dumps(query)
            req.max_results = max_results

            items = list(self.get_from_mongo(req=req, lookup=None))

            if not items:
                break

            last_id = items[-1]["_id"]
            yield items
Esempio n. 34
0
    def get_expired_items(self, expiry_datetime, invalid_only=False):
        """Get the expired items.

        Where content state is not scheduled and the item matches given parameters

        :param datetime expiry_datetime: expiry datetime
        :param bool invalid_only: True only invalid items
        :return pymongo.cursor: expired non published items.
        """
        unique_id = 0

        while True:
            req = ParsedRequest()
            req.sort = 'unique_id'
            query = {
                '$and': [{
                    'expiry': {
                        '$lte': date_to_str(expiry_datetime)
                    }
                }, {
                    '$or': [{
                        'task.desk': {
                            '$ne': None
                        }
                    }, {
                        ITEM_STATE: CONTENT_STATE.SPIKED,
                        'task.desk': None
                    }]
                }]
            }

            query['$and'].append({'unique_id': {'$gt': unique_id}})

            if invalid_only:
                query['$and'].append({'expiry_status': 'invalid'})
            else:
                query['$and'].append({'expiry_status': {'$ne': 'invalid'}})

            req.where = json.dumps(query)

            req.max_results = config.MAX_EXPIRY_QUERY_LIMIT
            items = list(self.get_from_mongo(req=req, lookup=None))

            if not len(items):
                break

            unique_id = items[-1]['unique_id']
            yield items
Esempio n. 35
0
    def get_expired_items(self, expiry_datetime=None, expiry_days=None, max_results=None, include_children=True):
        """Get the expired items.

        Returns a generator for the list of expired items, sorting by `_id` and returning `max_results` per iteration.

        :param datetime expiry_datetime: Expiry date/time used to retrieve the list of items, defaults to `utcnow()`
        :param int expiry_days: Number of days content expires, defaults to `CONTENT_API_EXPIRY_DAYS`
        :param int max_results: Maximum results to retrieve per iteration, defaults to `MAX_EXPIRY_QUERY_LIMIT`
        :param boolean include_children: Include only root item if False, otherwise include the entire item chain
        :return list: expired content_api items
        """

        if expiry_datetime is None:
            expiry_datetime = utcnow()

        if expiry_days is None:
            expiry_days = app.settings['CONTENT_API_EXPIRY_DAYS']

        if max_results is None:
            max_results = app.settings['MAX_EXPIRY_QUERY_LIMIT']

        last_id = None
        expire_at = date_to_str(expiry_datetime - timedelta(days=expiry_days))

        while True:
            query = {'$and': [{'_updated': {'$lte': expire_at}}]}

            if last_id is not None:
                query['$and'].append({'_id': {'$gt': last_id}})

            if not include_children:
                query['$and'].append({'ancestors': {'$exists': False}})

            req = ParsedRequest()
            req.sort = '_id'
            req.where = json.dumps(query)
            req.max_results = max_results

            items = list(self.get_from_mongo(req=req, lookup=None))

            if not items:
                break

            last_id = items[-1]['_id']
            yield items
Esempio n. 36
0
def hook_on_updated(resource, updates, original):
    """ called after to run on update operation """
    config = get_config(resource)

    # couple of situations where we have to do nothing because
    # `PATH_BUILD_FROM` field has not changed
    if config['PATH_BUILD_FROM'] not in updates:
        return

    if updates[config['PATH_BUILD_FROM']] ==\
            original[config['PATH_BUILD_FROM']]:
        return

    # get all children that belong to the path of the document
    path = original[config['PATH']] +\
        original[config['PATH_BUILD_FROM']] +\
        config['SEPARATOR']

    r = ParsedRequest()
    r.where = dumps({config['PATH']: {'$regex': "^{}".format(path)}})
    r.max_resuls = 0
    children = current_app.data.find(resource, r, None)

    # Then update each child, where the old path has to be updated
    # with the right one.
    path_build_from = updates[config['PATH_BUILD_FROM']]
    level_path_parent = len(original[config['PATH']].split(".")) - 1\
        if original[config['PATH']] else 0

    for child in children:
        # build the new path using the old path as a first part
        # and the last part, only replacement of the field value changed
        old_path = child[config['PATH']].split(config['SEPARATOR']) or []
        new_path = config['SEPARATOR'].join(
            old_path[:level_path_parent] +
            [path_build_from] +
            old_path[level_path_parent+1:])

        # finally udpate the document
        current_app.data.update(
            resource,
            child[current_app.config['ID_FIELD']],
            {config['PATH']: new_path},
            child
        )
    def get_desk():
        """
        Search for a desk on the system with the name "Copytakers"
        :return:
        """
        logger.info('Fetching the ObjectID for the desk "Copytakers".')
        query = {'name': 'Copytakers'}
        req = ParsedRequest()
        req.where = json.dumps(query)

        desk_service = get_resource_service('desks')
        desk_item = list(desk_service.get_from_mongo(req=req, lookup=None))
        if not desk_item:
            raise ('Failed to find the a desk called "Copytakers".')

        desk_id = desk_item[0]['_id']
        logger.info('ObjectID for the desk Copytakers is {}.'.format(desk_id))
        return desk_item[0]
Esempio n. 38
0
    def _get_future_events(self, event):
        """Utility method to get future events for the supplied event"""
        query = {
            '$and': [
                # All the events created from the same recurring rules
                {'recurrence_id': event['recurrence_id']},

                # Only future events
                {'dates.start': {'$gt': date_to_str(event['dates']['start'])}},

                # Except the provided event
                {'_id': {'$ne': event[config.ID_FIELD]}}
            ]
        }

        req = ParsedRequest()
        req.sort = '[("dates.start", 1)]'
        req.where = json.dumps(query)
        return self.get_from_mongo(req, {})
    def _get_desk_id(self):
        """Returns the ObjectID of the desk

        :return str: The ObjectID for the desk provided, or None if the desk was not found
        """
        logger.info('Fetching the ObjectID for the desk {}.'.format(self.default_desk))
        query = {'name': self.default_desk}
        req = ParsedRequest()
        req.where = json.dumps(query)

        desk_service = get_resource_service('desks')
        desk_item = list(desk_service.get_from_mongo(req=req, lookup=None))
        if not desk_item:
            logger.error('Failed to find the ObjectID for the provided desk {}'.format(self.default_desk))
            return None

        desk_id = desk_item[0]['_id']
        logger.info('ObjectID for the desk {} is {}.'.format(self.default_desk, desk_id))
        return desk_id
Esempio n. 40
0
    def get_expired_items(self, expiry_datetime, invalid_only=False):
        """Get the expired items.

        Where content state is not scheduled and the item matches given parameters

        :param datetime expiry_datetime: expiry datetime
        :param bool invalid_only: True only invalid items
        :return pymongo.cursor: expired non published items.
        """
        unique_id = 0

        while True:
            req = ParsedRequest()
            req.sort = 'unique_id'
            query = {
                '$and': [
                    {'expiry': {'$lte': date_to_str(expiry_datetime)}},
                    {'$or': [
                        {'task.desk': {'$ne': None}},
                        {ITEM_STATE: CONTENT_STATE.SPIKED, 'task.desk': None}
                    ]}
                ]
            }

            query['$and'].append({'unique_id': {'$gt': unique_id}})

            if invalid_only:
                query['$and'].append({'expiry_status': 'invalid'})
            else:
                query['$and'].append({'expiry_status': {'$ne': 'invalid'}})

            req.where = json.dumps(query)

            req.max_results = config.MAX_EXPIRY_QUERY_LIMIT
            items = list(self.get_from_mongo(req=req, lookup=None))

            if not len(items):
                break

            unique_id = items[-1]['unique_id']
            yield items
Esempio n. 41
0
    def get_series(self, query, sort, max_results):
        page = 1

        while True:
            # Get the results from mongo
            req = ParsedRequest()
            req.sort = sort
            req.where = json.dumps(query)
            req.max_results = max_results
            req.page = page
            results = self.get_from_mongo(req=req, lookup=None)

            docs = list(results)
            if not docs:
                break

            page += 1

            # Yield the results for iteration by the callee
            for doc in docs:
                yield doc
Esempio n. 42
0
    def test_search_capi(self):
        subscriber = {'_id': 'sub1'}

        self.content_api.publish({'_id': 'foo', 'guid': 'foo', 'type': 'text',
                                  'anpa_category': [{'qcode': 'i', 'name': 'International News'}],
                                  'headline': 'Man bites dog'}, [subscriber])
        self.content_api.publish({'_id': 'bar', 'guid': 'bar', 'type': 'text'}, [{'_id': 'sub2'}])

        test = superdesk.get_resource_service('search_capi')
        req = ParsedRequest()
        req.args = MultiDict([('subscribers', 'sub1')])
        resp = test.get(req=req, lookup=None)
        self.assertEqual(resp.count(), 1)

        resp = test.get(req=None, lookup=None)
        self.assertEqual(resp.count(), 2)

        req = ParsedRequest()
        req.args = MultiDict()
        req.where = '{"headline":"dog"}'
        resp = test.get(req=req, lookup=None)
        self.assertEqual(resp.count(), 1)
Esempio n. 43
0
    def get(self, req, lookup):
        """Retrieve a list of items that match the filter criteria (if any)
        pssed along the HTTP request.

        :param req: object representing the HTTP request
        :type req: `eve.utils.ParsedRequest`
        :param dict lookup: sub-resource lookup from the endpoint URL

        :return: database results cursor object
        :rtype: `pymongo.cursor.Cursor`
        """
        if req is None:
            req = ParsedRequest()

        allowed_params = (
            'q', 'start_date', 'end_date',
            'include_fields', 'exclude_fields'
        )
        self._check_for_unknown_params(req, whitelist=allowed_params)

        # set the "q" filter
        request_params = req.args or {}
        query_filter = {}

        if 'q' in request_params:
            # TODO: add validation for the "q" parameter when we define its
            # format and implement the corresponding actions
            query_filter = json.loads(request_params['q'])

        # set the date range filter
        start_date, end_date = self._get_date_range(request_params)
        date_filter = self._create_date_range_filter(start_date, end_date)
        query_filter.update(date_filter)

        req.where = json.dumps(query_filter)

        self._set_fields_filter(req)  # Eve's "projection"

        return super().get(req, lookup)
Esempio n. 44
0
    def test_search_capi(self):
        subscriber = {'_id': 'sub1'}

        self.content_api.publish({'_id': 'foo', 'guid': 'foo', 'type': 'text',
                                  'anpa_category': [{'qcode': 'i', 'name': 'International News'}],
                                  'headline': 'Man bites dog'}, [subscriber])
        self.content_api.publish({'_id': 'bar', 'guid': 'bar', 'type': 'text'}, [{'_id': 'sub2'}])

        test = superdesk.get_resource_service('search_capi')
        req = ParsedRequest()
        req.args = MultiDict([('subscribers', 'sub1')])
        resp = test.get(req=req, lookup=None)
        self.assertEqual(resp.count(), 1)

        resp = test.get(req=None, lookup=None)
        self.assertEqual(resp.count(), 2)

        req = ParsedRequest()
        req.args = MultiDict()
        req.where = '{"headline":"dog"}'
        resp = test.get(req=req, lookup=None)
        self.assertEqual(resp.count(), 1)
 def _get_field_values(self):
     values = {}
     vocabularies_resource = get_resource_service("vocabularies")
     values["anpa_category"] = vocabularies_resource.find_one(req=None, _id="categories")["items"]
     req = ParsedRequest()
     req.where = json.dumps({"$or": [{"schema_field": "genre"}, {"_id": "genre"}]})
     genre = vocabularies_resource.get(req=req, lookup=None)
     if genre.count():
         values["genre"] = genre[0]["items"]
     values["urgency"] = vocabularies_resource.find_one(req=None, _id="urgency")["items"]
     values["priority"] = vocabularies_resource.find_one(req=None, _id="priority")["items"]
     values["type"] = vocabularies_resource.find_one(req=None, _id="type")["items"]
     subject = vocabularies_resource.find_one(req=None, schema_field="subject")
     if subject:
         values["subject"] = subject["items"]
     else:
         values["subject"] = get_subjectcodeitems()
     values["desk"] = list(get_resource_service("desks").get(None, {}))
     values["stage"] = self._get_stage_field_values(values["desk"])
     values["sms"] = [{"qcode": 0, "name": "False"}, {"qcode": 1, "name": "True"}]
     values["place"] = vocabularies_resource.find_one(req=None, _id="locators")["items"]
     return values
    def _get_desk_id(self):
        """Returns the ObjectID of the desk

        :return str: The ObjectID for the desk provided, or None if the desk was not found
        """
        logger.info('Fetching the ObjectID for the desk {}.'.format(
            self.default_desk))
        query = {'name': self.default_desk}
        req = ParsedRequest()
        req.where = json.dumps(query)

        desk_service = get_resource_service('desks')
        desk_item = list(desk_service.get_from_mongo(req=req, lookup=None))
        if not desk_item:
            logger.error(
                'Failed to find the ObjectID for the provided desk {}'.format(
                    self.default_desk))
            return None

        desk_id = desk_item[0]['_id']
        logger.info('ObjectID for the desk {} is {}.'.format(
            self.default_desk, desk_id))
        return desk_id
    def get(self, req, lookup):
        """
        Overriding to pass user as search parameter
        """
        session_user = str(get_user_id(required=True))

        if not req:
            req = ParsedRequest()

        where = json.loads(req.where) if req.where else {}

        if lookup:
            where.update(lookup)

        if '$or' not in where:
            where['$or'] = []

        # Restrict the saved reports to either global or owned by current user
        where['$or'].extend([{'is_global': True}, {'user': session_user}])

        req.where = json.dumps(where)

        return super().get(req, lookup=None)
Esempio n. 48
0
    def get_recurring_timeline(self, selected):
        """Utility method to get all events in the series

        This splits up the series of events into 3 separate arrays.
        Historic: event.dates.start < utcnow()
        Past: utcnow() < event.dates.start < selected.dates.start
        Future: event.dates.start > selected.dates.start
        """
        historic = []
        past = []
        future = []

        selected_start = selected.get('dates', {}).get('start', utcnow())

        req = ParsedRequest()
        req.sort = '[("dates.start", 1)]'
        req.where = json.dumps({
            '$and': [{
                'recurrence_id': selected['recurrence_id']
            }, {
                '_id': {
                    '$ne': selected[config.ID_FIELD]
                }
            }]
        })

        for event in list(self.get_from_mongo(req, {})):
            end = event['dates']['end']
            start = event['dates']['start']
            if end < utcnow():
                historic.append(event)
            elif start < selected_start:
                past.append(event)
            elif start > selected_start:
                future.append(event)

        return historic, past, future