def on_update(self, updates, original): self._refresh_associated_items(original) self._validate(original, updates) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) convert_task_attributes_to_objectId(updates) # ??? transtype_metadata(updates, original) self._process_publish_updates(original, updates) self._mark_media_item_as_used(updates, original) update_refs(updates, original)
def on_update(self, updates, original): self._refresh_associated_items(original) self._validate(original, updates) self._set_updates( original, updates, updates.get(config.LAST_UPDATED, utcnow()), preserve_state=original.get("state") in (CONTENT_STATE.SCHEDULED,) and "pubstatus" not in updates, ) convert_task_attributes_to_objectId(updates) # ??? transtype_metadata(updates, original) self._process_publish_updates(original, updates) self._mark_media_item_as_used(updates, original) update_refs(updates, original)
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id desk_id = doc.get("desk") stage_id = doc.get("stage") ingest_service = get_resource_service("ingest") ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( _("Fail to found ingest item with _id: {id}").format(id=id_of_item_to_be_fetched) ) if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get("macro"): # there is a macro so transform it macro_kwargs = kwargs.get("macro_kwargs") or {} ingest_doc = get_resource_service("macros").execute_macro( ingest_doc, doc.get("macro"), dest_desk_id=desk_id, dest_stage_id=stage_id, **macro_kwargs, ) dest_doc = fetch_item( ingest_doc, desk_id, stage_id, # we might want to change state or target from the macro state=ingest_doc[ITEM_STATE] if ingest_doc.get(ITEM_STATE) and ingest_doc[ITEM_STATE] != CONTENT_STATE.INGESTED else doc.get(ITEM_STATE), target=ingest_doc.get("target", doc.get("target")), ) id_of_fetched_items.append(dest_doc[config.ID_FIELD]) ingest_service.patch(id_of_item_to_be_fetched, {"archived": dest_doc["versioncreated"]}) dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD] dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get("version")) dest_doc[INGEST_VERSION] = ingest_doc.get("version") self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) self.__fetch_associated_items(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service("desks").find_one(req=None, _id=desk_id) if desk and desk.get("default_content_profile"): dest_doc.setdefault("profile", desk["default_content_profile"]) if dest_doc.get("type", "text") in MEDIA_TYPES: dest_doc["profile"] = None update_refs(dest_doc, {}) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc) doc.update(dest_doc) if kwargs.get("notify", True): ingest_doc.update({"task": dest_doc.get("task")}) push_item_move_notification(ingest_doc, doc, "item:fetch") return id_of_fetched_items
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( _('Fail to found ingest item with _id: {id}').format(id=id_of_item_to_be_fetched)) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it macro_kwargs = kwargs.get('macro_kwargs') or {} ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro'), dest_desk_id=desk_id, dest_stage_id=stage_id, **macro_kwargs, ) dest_doc = fetch_item(ingest_doc, desk_id, stage_id, state=doc.get(ITEM_STATE), target=doc.get('target')) id_of_fetched_items.append(dest_doc[config.ID_FIELD]) ingest_service.patch(id_of_item_to_be_fetched, {'archived': dest_doc['versioncreated']}) dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD] dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get('version')) dest_doc[INGEST_VERSION] = ingest_doc.get('version') self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) self.__fetch_associated_items(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service('desks').find_one(req=None, _id=desk_id) if desk and desk.get('default_content_profile'): dest_doc.setdefault('profile', desk['default_content_profile']) if dest_doc.get('type', 'text') in MEDIA_TYPES: dest_doc['profile'] = None update_refs(dest_doc, {}) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc) doc.update(dest_doc) if kwargs.get('notify', True): ingest_doc.update({'task': dest_doc.get('task')}) push_item_move_notification(ingest_doc, doc, 'item:fetch') return id_of_fetched_items
def test_content_filtering(self): item = { "guid": "u3", "type": "text", "source": "foo", "urgency": 3, "associations": { "one": {"guid": "u2", "type": "text", "source": "bar"}, "two": {"guid": "u1", "type": "text", "source": "baz"}, }, } update_refs(item, {}) self.content_api.publish(item, [self.subscriber]) self.content_api.publish({"guid": "u2", "type": "text", "source": "bar", "urgency": 2}, [self.subscriber]) headers = self._auth_headers() with self.capi.test_client() as c: response = c.get('items?where={"urgency":3}', headers=headers) data = json.loads(response.data) self.assertEqual(1, data["_meta"]["total"]) self.assertEqual(3, data["_items"][0]["urgency"]) response = c.get("items?q=urgency:3", headers=headers) data = json.loads(response.data) self.assertEqual(1, data["_meta"]["total"]) self.assertEqual(3, data["_items"][0]["urgency"]) response = c.get("items?urgency=3", headers=headers) data = json.loads(response.data) self.assertEqual(1, data["_meta"]["total"]) self.assertEqual(3, data["_items"][0]["urgency"]) response = c.get("items?urgency=[3,2]", headers=headers) data = json.loads(response.data) self.assertEqual(2, data["_meta"]["total"]) response = c.get("items?item_source=foo", headers=headers) data = json.loads(response.data) self.assertEqual(1, data["_meta"]["total"]) self.assertEqual("foo", data["_items"][0]["source"]) response = c.get('items?item_source=["foo","bar"]', headers=headers) data = json.loads(response.data) self.assertEqual(2, data["_meta"]["total"]) response = c.get("items?related_to=u2", headers=headers) self.assertEqual(200, response.status_code) data = json.loads(response.data) self.assertEqual(1, data["_meta"]["total"]) response = c.get("items?related_to=empty", headers=headers) self.assertEqual(200, response.status_code) data = json.loads(response.data) self.assertEqual(0, data["_meta"]["total"]) response = c.get("items?related_source=bar", headers=headers) self.assertEqual(200, response.status_code) data = json.loads(response.data) self.assertEqual(1, data["_meta"]["total"]) response = c.get("items?related_source=empty", headers=headers) self.assertEqual(200, response.status_code) data = json.loads(response.data) self.assertEqual(0, data["_meta"]["total"])