def on_create(self, docs): on_create_item(docs) for doc in docs: resolve_document_version(doc, ARCHIVE, 'POST') self.update_times(doc) self.update_stage(doc) convert_task_attributes_to_objectId(doc)
def on_update(self, updates, original): self._refresh_associated_items(original) self._validate(original, updates) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) convert_task_attributes_to_objectId(updates) # ??? self._process_publish_updates(original, updates) self._mark_media_item_as_used(updates, original)
def on_update(self, updates, original): self._refresh_associated_items(original) self._validate(original, updates) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) convert_task_attributes_to_objectId(updates) # ??? self._process_publish_updates(original, updates)
def _move(self, archived_doc, doc): archive_service = get_resource_service(ARCHIVE) original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in { CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED }: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_item_move_notification(original, archived_doc) app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
def on_update(self, updates, original): self._refresh_associated_items(original) self._validate(original, updates) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) convert_task_attributes_to_objectId(updates) # ??? transtype_metadata(updates, original) self._process_publish_updates(original, updates) self._mark_media_item_as_used(updates, original) update_refs(updates, original)
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str( doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError( message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in { CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED }: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def on_update(self, updates, original): self.update_times(updates) if is_assigned_to_a_desk(updates): self.__update_state(updates, original) new_stage_id = str(updates.get('task', {}).get('stage', '')) old_stage_id = str(original.get('task', {}).get('stage', '')) new_user_id = updates.get('task', {}).get('user', '') if new_stage_id and new_stage_id != old_stage_id: updates[ITEM_OPERATION] = ITEM_SEND send_to(doc=original, update=updates, desk_id=None, stage_id=new_stage_id, user_id=new_user_id) resolve_document_version(updates, ARCHIVE, 'PATCH', original) convert_task_attributes_to_objectId(updates) update_version(updates, original)
def on_update(self, updates, original): self._refresh_associated_items(original) self._validate(original, updates) self._set_updates( original, updates, updates.get(config.LAST_UPDATED, utcnow()), preserve_state=original.get("state") in (CONTENT_STATE.SCHEDULED,) and "pubstatus" not in updates, ) convert_task_attributes_to_objectId(updates) # ??? transtype_metadata(updates, original) self._process_publish_updates(original, updates) self._mark_media_item_as_used(updates, original) update_refs(updates, original)
def test_if_task_attributes_converted_to_objectid(self): doc = { 'task': { 'user': '******', 'desk': ObjectId("562435241d41c835d7b5fb5d"), 'stage': 'test', 'last_authoring_desk': 3245, 'last_production_desk': None } } convert_task_attributes_to_objectId(doc) self.assertIsInstance(doc['task']['user'], ObjectId) self.assertEqual(doc['task']['desk'], ObjectId("562435241d41c835d7b5fb5d")) self.assertEqual(doc['task']['stage'], 'test') self.assertEqual(doc['task']['last_authoring_desk'], 3245) self.assertIsNone(doc['task']['last_production_desk'])
def test_if_task_attributes_converted_to_objectid(self): doc = { "task": { "user": "******", "desk": ObjectId("562435241d41c835d7b5fb5d"), "stage": "test", "last_authoring_desk": 3245, "last_production_desk": None, } } convert_task_attributes_to_objectId(doc) self.assertIsInstance(doc["task"]["user"], ObjectId) self.assertEqual(doc["task"]["desk"], ObjectId("562435241d41c835d7b5fb5d")) self.assertEqual(doc["task"]["stage"], "test") self.assertEqual(doc["task"]["last_authoring_desk"], 3245) self.assertIsNone(doc["task"]["last_production_desk"])
def on_update(self, updates, original): self.raise_if_not_marked_for_publication(original) self.raise_if_invalid_state_transition(original) updated = original.copy() updated.update(updates) takes_package = self.takes_package_service.get_take_package(original) if self.publish_type == "publish": # validate if take can be published if takes_package and not self.takes_package_service.can_publish_take( takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1)) ): raise PublishQueueError.previous_take_not_published_error( Exception("Previous takes are not published.") ) validate_schedule(updated.get("publish_schedule"), takes_package.get(SEQUENCE, 1) if takes_package else 1) if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(EMBARGO): get_resource_service(ARCHIVE).validate_embargo(updated) if self.publish_type in ["correct", "kill"]: if updates.get(EMBARGO): raise SuperdeskApiError.badRequestError("Embargo can't be set after publishing") if updates.get("dateline"): raise SuperdeskApiError.badRequestError("Dateline can't be modified after publishing") validate_item = {"act": self.publish_type, "type": original["type"], "validate": updated} validation_errors = get_resource_service("validate").post([validate_item]) if validation_errors[0]: raise ValidationError(validation_errors) # validate the package if it is one package_validation_errors = [] self._validate_package_contents(original, takes_package, package_validation_errors) if len(package_validation_errors) > 0: raise ValidationError(package_validation_errors) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) updates[ITEM_OPERATION] = ITEM_PUBLISH convert_task_attributes_to_objectId(updates)
def _move(self, archived_doc, doc): archive_service = get_resource_service(ARCHIVE) original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_item_move_notification(original, archived_doc)
def _move(self, archived_doc, doc): archive_service = get_resource_service(ARCHIVE) original = deepcopy(archived_doc) user = get_user() send_to( doc=archived_doc, desk_id=doc.get("task", {}).get("desk"), stage_id=doc.get("task", {}).get("stage"), user_id=user.get(config.ID_FIELD), ) if archived_doc[ITEM_STATE] not in ({ CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.CORRECTION, }): archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, "PATCH", original) del archived_doc[config.ID_FIELD] del archived_doc[config.ETAG] # force etag update archived_doc["versioncreated"] = utcnow() signals.item_move.send(self, item=archived_doc, original=original) archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_item_move_notification(original, archived_doc) app.on_archive_item_updated(archived_doc, original, ITEM_MOVE) # make sure `item._id` is there in signal moved_item = archived_doc.copy() moved_item[config.ID_FIELD] = original[config.ID_FIELD] signals.item_moved.send(self, item=moved_item, original=original)
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def on_update(self, updates, original): self.raise_if_not_marked_for_publication(original) self.raise_if_invalid_state_transition(original) updated = original.copy() updated.update(updates) takes_package = self.takes_package_service.get_take_package(original) if self.publish_type == 'publish': # validate if take can be published if takes_package and not self.takes_package_service.can_publish_take( takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1))): raise PublishQueueError.previous_take_not_published_error( Exception("Previous takes are not published.")) validate_schedule(updated.get('publish_schedule'), takes_package.get(SEQUENCE, 1) if takes_package else 1) if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(EMBARGO): get_resource_service(ARCHIVE).validate_embargo(updated) if self.publish_type in ['correct', 'kill']: if updates.get(EMBARGO): raise SuperdeskApiError.badRequestError("Embargo can't be set after publishing") if updates.get('dateline'): raise SuperdeskApiError.badRequestError("Dateline can't be modified after publishing") validate_item = {'act': self.publish_type, 'type': original['type'], 'validate': updated} validation_errors = get_resource_service('validate').post([validate_item]) if validation_errors[0]: raise ValidationError(validation_errors) # validate the package if it is one package_validation_errors = [] self._validate_package_contents(original, takes_package, package_validation_errors) if len(package_validation_errors) > 0: raise ValidationError(package_validation_errors) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) updates[ITEM_OPERATION] = ITEM_PUBLISH convert_task_attributes_to_objectId(updates)
def on_update(self, updates, original): self._validate(original, updates) self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow())) convert_task_attributes_to_objectId(updates) # ??? self._process_publish_updates(original, updates)