def test_is_workflow_action_valid(self): superdesk.workflow_action( name="test_spike", exclude_states=["spiked", "published", "scheduled", "killed"], privileges=["spike"]) superdesk.workflow_action( name="test_on_hold", exclude_states=[ "spiked", "published", "scheduled", "killed", "on_hold" ], privileges=["on_hold"], ) self.assertTrue( superdesk.is_workflow_state_transition_valid( "test_spike", "in_progress")) self.assertFalse( superdesk.is_workflow_state_transition_valid( "test_spike", "spiked")) self.assertTrue( superdesk.is_workflow_state_transition_valid( "test_on_hold", "routed")) self.assertTrue( superdesk.is_workflow_state_transition_valid( "test_on_hold", "fetched")) self.assertFalse( superdesk.is_workflow_state_transition_valid( "test_on_hold", "published")) self.assertFalse( superdesk.is_workflow_state_transition_valid( "test_on_hold", "scheduled"))
def test_is_workflow_action_valid(self): superdesk.workflow_action( name='test_spike', exclude_states=['spiked', 'published', 'killed'], privileges=['spike']) superdesk.workflow_action( name='test_on_hold', exclude_states=['spiked', 'published', 'killed', 'on_hold'], privileges=['on_hold']) self.assertTrue( superdesk.is_workflow_state_transition_valid( 'test_spike', 'in_progress')) self.assertFalse( superdesk.is_workflow_state_transition_valid( 'test_spike', 'spiked')) self.assertTrue( superdesk.is_workflow_state_transition_valid( 'test_on_hold', 'routed')) self.assertTrue( superdesk.is_workflow_state_transition_valid( 'test_on_hold', 'fetched')) self.assertFalse( superdesk.is_workflow_state_transition_valid( 'test_on_hold', 'published'))
def test_action_registry(self): superdesk.workflow_action( name='include', include_states=['include'], privileges=['test-privilege'] ) actions = [action for action in superdesk.get_workflow_actions() if action['name'] == 'include'] self.assertEqual(1, len(actions)) self.assertIn('include', actions[0]['include_states']) self.assertIn('test-privilege', actions[0]['privileges'])
def test_action_registry(self): superdesk.workflow_action(name="include", include_states=["include"], privileges=["test-privilege"]) actions = [ action for action in superdesk.get_workflow_actions() if action["name"] == "include" ] self.assertEqual(1, len(actions)) self.assertIn("include", actions[0]["include_states"]) self.assertIn("test-privilege", actions[0]["privileges"])
def init_app(app): if app.config['WORKFLOW_ALLOW_COPY_TO_PERSONAL']: kwargs = dict(exclude_states=[ CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED ], ) else: kwargs = dict(include_states=[CONTENT_STATE.DRAFT], ) superdesk.workflow_action( name='copy', privileges=['archive'], **kwargs, ) app.client_config['workflow_allow_copy_to_personal'] = app.config[ 'WORKFLOW_ALLOW_COPY_TO_PERSONAL']
def init_app(app) -> None: if app.config["WORKFLOW_ALLOW_COPY_TO_PERSONAL"]: kwargs = dict(exclude_states=[ CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED ], ) else: kwargs = dict(include_states=[CONTENT_STATE.DRAFT], ) superdesk.workflow_action( name="copy", privileges=["archive"], **kwargs, ) app.client_config["workflow_allow_copy_to_personal"] = app.config[ "WORKFLOW_ALLOW_COPY_TO_PERSONAL"]
def test_is_workflow_action_valid(self): superdesk.workflow_action( name='test_spike', exclude_states=['spiked', 'published', 'killed'], privileges=['spike'] ) superdesk.workflow_action( name='test_on_hold', exclude_states=['spiked', 'published', 'killed', 'on_hold'], privileges=['on_hold'] ) self.assertTrue(superdesk.is_workflow_state_transition_valid('test_spike', 'in_progress')) self.assertFalse(superdesk.is_workflow_state_transition_valid('test_spike', 'spiked')) self.assertTrue(superdesk.is_workflow_state_transition_valid('test_on_hold', 'routed')) self.assertTrue(superdesk.is_workflow_state_transition_valid('test_on_hold', 'fetched')) self.assertFalse(superdesk.is_workflow_state_transition_valid('test_on_hold', 'published'))
def test_action_registry(self): superdesk.workflow_action(name='include', include_states=['include'], privileges=['test-privilege']) actions = [ action for action in superdesk.get_workflow_actions() if action['name'] == 'include' ] self.assertEqual(1, len(actions)) self.assertIn('include', actions[0]['include_states']) self.assertIn('test-privilege', actions[0]['privileges']) self.assertIn('spike', names(superdesk.get_workflow_actions(state='draft'))) self.assertNotIn('spike', names(superdesk.get_workflow_actions(state='spiked'))) self.assertIn('unspike', names(superdesk.get_workflow_actions(state='spiked'))) self.assertNotIn('unspike', names(superdesk.get_workflow_actions(state='draft')))
original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid(ITEM_UNSPIKE, original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) self.set_unspike_updates(item, updates) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification("item:unspike", item=str(id), user=str(user.get(config.ID_FIELD))) app.on_archive_item_updated(updates, original, ITEM_UNSPIKE) return item superdesk.workflow_state("spiked") superdesk.workflow_action( name="spike", exclude_states=["spiked", "published", "scheduled", "corrected", "killed"], privileges=["spike"]) superdesk.workflow_action(name="unspike", include_states=["spiked"], privileges=["unspike"])
UPDATE_SCHEDULE_DEFAULT = {'minutes': 5} LAST_UPDATED = 'last_updated' LAST_ITEM_UPDATE = 'last_item_update' STATE_INGESTED = 'ingested' IDLE_TIME_DEFAULT = {'hours': 0, 'minutes': 0} logger = logging.getLogger(__name__) superdesk.workflow_state(STATE_INGESTED) superdesk.workflow_action( name='ingest' ) def is_valid_type(provider, provider_type_filter=None): """Test if given provider has valid type and should be updated. :param provider: provider to be updated :param provider_type_filter: active provider type filter """ provider_type = provider.get('type') if provider_type not in providers: return False if provider_type_filter and provider_type != provider_type_filter: return False return True
superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('guid')) desk = doc.get('desk') refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')} for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref] if refs: self.create(refs) else: if doc.get(PACKAGE): links = archived_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get('_id'), {LINKED_IN_PACKAGES: links}) return [doc.get('guid') for doc in docs] superdesk.workflow_state(STATE_FETCHED) superdesk.workflow_action( name='fetch_as_from_ingest', include_states=['ingested'], privileges=['archive', 'ingest_move'] ) superdesk.workflow_state('routed') superdesk.workflow_action( name='route', include_states=['ingested'] )
@celery.task def publish_images(images, original, item): orig_file = get_file(original, item) for image in images: content_type = original['mimetype'] ok, output = crop_image(orig_file, image['file_name'], image['crop'], image['spec']) if ok: app.media.put(output, image['file_name'], content_type, _id=image['media']) superdesk.workflow_state('published') superdesk.workflow_action( name='publish', include_states=['fetched', 'routed', 'submitted', 'in_progress', 'scheduled'], privileges=['publish'] ) superdesk.workflow_state('scheduled') superdesk.workflow_action( name='schedule', include_states=['fetched', 'routed', 'submitted', 'in_progress'], privileges=['schedule'] ) superdesk.workflow_action( name='deschedule', include_states=['scheduled'], privileges=['deschedule'] )
schema = ArchiveResource.schema datasource = {'source': 'archive', 'aggregations': aggregations} resource_methods = ['GET', 'POST'] item_methods = ['GET', 'PATCH', 'DELETE'] resource_title = endpoint_name class UserContentService(BaseService): custom_hateoas = {'self': {'title': 'Archive', 'href': '/archive/{_id}'}} def get(self, req, lookup): docs = super().get(req, lookup) for doc in docs: build_custom_hateoas(self.custom_hateoas, doc) return docs superdesk.workflow_state('draft') superdesk.workflow_action( name='fetch_from_content', include_states=['fetched', 'routed', 'submitted', 'in-progress'], privileges=['archive'] ) superdesk.workflow_action( name='fetch_as_from_content', include_states=['fetched', 'routed', 'submitted', 'in-progress'], privileges=['archive'] )
def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError('Content is missing') req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]['_id'], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError( 'Client and server etags don\'t match') except KeyError: raise SuperdeskApiError.badRequestError( "Request for Auto-save must have _id") return [docs[0]['_id']] superdesk.workflow_state('in_progress') superdesk.workflow_action( name='save', include_states=['draft', 'fetched', 'routed', 'submitted', 'scheduled'], privileges=['archive']) superdesk.workflow_state('submitted') superdesk.workflow_action(name='move', exclude_states=[ 'ingested', 'spiked', 'on-hold', 'published', 'scheduled', 'killed' ], privileges=['archive'])
class ArchiveHoldService(BaseService): def create(self, docs, **kwargs): user = get_user(required=True) auth = get_auth() item_id = request.view_args['item_id'] item = get_component(ItemHold).hold({'_id': item_id}, user['_id'], auth['_id']) build_custom_hateoas(custom_hateoas, item) return [item['_id']] def delete(self, lookup): user = get_user(required=True) item_id = request.view_args['item_id'] get_component(ItemHold).restore({'_id': item_id}, user['_id']) superdesk.workflow_state('on_hold') superdesk.workflow_action( name='hold', exclude_states=['ingested', 'draft', 'spiked', 'published', 'killed'], privileges=['hold'] ) superdesk.workflow_action( name='restore', include_states=['on_hold'], privileges=['restore'] )
insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) return archived_doc def set_change_in_desk_type(self, updated, original): """ Detects if the change in the desk is between authoring to production (and vice versa). And sets the field 'last_production_desk' and 'last_authoring_desk'. :param dict updated: document to be saved :param dict original: original document """ old_desk_id = str(original.get('task', {}).get('desk', '')) new_desk_id = str(updated.get('task', {}).get('desk', '')) if old_desk_id and old_desk_id != new_desk_id: old_desk = get_resource_service('desks').find_one(req=None, _id=old_desk_id) new_desk = get_resource_service('desks').find_one(req=None, _id=new_desk_id) if old_desk.get('desk_type', '') != new_desk.get('desk_type', ''): if new_desk.get('desk_type') == DeskTypes.production.value: updated['task'][LAST_AUTHORING_DESK] = old_desk_id else: updated['task'][LAST_PRODUCTION_DESK] = old_desk_id superdesk.workflow_action( name='submit_to_desk', include_states=['draft', 'fetched', 'routed', 'submitted', 'in_progress', 'published', 'scheduled'], privileges=['archive', 'move'] )
for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_copied) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message='Copy is not allowed on items in a desk.') if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): push_notification('item:copy', copied=1) return guid_of_copied_items superdesk.workflow_action( name='copy', include_states=[CONTENT_STATE.DRAFT], privileges=['archive'] )
} } } # eve-elastic specific filter } resource_methods = ['GET', 'POST'] item_methods = ['GET', 'PATCH', 'DELETE'] resource_title = endpoint_name class UserContentService(BaseService): custom_hateoas = {'self': {'title': 'Archive', 'href': '/archive/{_id}'}} def get(self, req, lookup): docs = super().get(req, lookup) for doc in docs: build_custom_hateoas(self.custom_hateoas, doc) return docs superdesk.workflow_state('draft') superdesk.workflow_action( name='fetch_from_content', include_states=['fetched', 'routed', 'submitted', 'in-progress'], privileges=['archive']) superdesk.workflow_action( name='fetch_as_from_content', include_states=['fetched', 'routed', 'submitted', 'in-progress'], privileges=['archive'])
for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError(_( 'Fail to found item with guid: {guid}').format(guid=guid_of_item_to_be_copied)) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message=_('Copy is not allowed on items in a desk.')) if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): user = get_user() push_notification('item:copy', copied=1, user=str(user.get(config.ID_FIELD, ''))) return guid_of_copied_items superdesk.workflow_action( name='copy', include_states=[CONTENT_STATE.DRAFT], privileges=['archive'] )
_id=old_desk_id) new_desk = get_resource_service("desks").find_one(req=None, _id=new_desk_id) if old_desk and new_desk and old_desk.get( "desk_type", "") != new_desk.get("desk_type", ""): if new_desk.get("desk_type") == DeskTypes.production.value: updated["task"][LAST_AUTHORING_DESK] = old_desk_id else: updated["task"][LAST_PRODUCTION_DESK] = old_desk_id updated["task"][LAST_DESK] = old_desk_id updated["task"].setdefault(DESK_HISTORY, []) if old_desk_id not in updated["task"][DESK_HISTORY]: updated["task"][DESK_HISTORY].append(old_desk_id) superdesk.workflow_action( name="submit_to_desk", include_states=[ "draft", "fetched", "routed", "submitted", "in_progress", "published", "scheduled", "unpublished", "correction", ], privileges=["archive"], )
resource_title = endpoint_name privileges = {'POST': 'hold', 'DELETE': 'restore'} class ArchiveHoldService(BaseService): def create(self, docs, **kwargs): user = get_user(required=True) auth = get_auth() item_id = request.view_args['item_id'] item = get_component(ItemHold).hold({'_id': item_id}, user['_id'], auth['_id']) build_custom_hateoas(custom_hateoas, item) return [item['_id']] def delete(self, lookup): user = get_user(required=True) item_id = request.view_args['item_id'] get_component(ItemHold).restore({'_id': item_id}, user['_id']) superdesk.workflow_state('on_hold') superdesk.workflow_action( name='hold', exclude_states=['ingested', 'draft', 'spiked', 'published', 'killed'], privileges=['hold']) superdesk.workflow_action(name='restore', include_states=['on_hold'], privileges=['restore'])
original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid(ITEM_UNSPIKE, original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) self.set_unspike_updates(item, updates) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification('item:unspike', item=str(id), user=str(user.get(config.ID_FIELD))) app.on_archive_item_updated(updates, original, ITEM_UNSPIKE) return item superdesk.workflow_state('spiked') superdesk.workflow_action( name='spike', exclude_states=['spiked', 'published', 'scheduled', 'corrected', 'killed'], privileges=['spike']) superdesk.workflow_action(name='unspike', include_states=['spiked'], privileges=['unspike'])
original) insert_into_versions(doc=doc) except KeyError: raise SuperdeskApiError.badRequestError("A non-existent content id is requested to publish") def __send_to_publish_stage(self, doc): desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk']) if desk.get('published_stage') and doc['task']['stage'] != desk['published_stage']: doc['task']['stage'] = desk['published_stage'] return get_resource_service('move').move_content(doc['_id'], doc)['task'] superdesk.workflow_state('published') superdesk.workflow_action( name='publish', include_states=['fetched', 'routed', 'submitted', 'in_progress'], privileges=['publish'] ) superdesk.workflow_state('killed') superdesk.workflow_action( name='kill', include_states=['published'], privileges=['kill'] ) superdesk.workflow_state('corrected') superdesk.workflow_action( name='correct', include_states=['published'], privileges=['correction']
schema = item_schema({"_id": {"type": "string"}}) resource_methods = ["POST"] item_methods = ["GET", "PUT", "PATCH"] resource_title = endpoint_name privileges = {"POST": "archive", "PATCH": "archive", "PUT": "archive"} class ArchiveSaveService(BaseService): def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError("Content is missing") req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]["_id"], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError("Client and server etags don't match") return [docs[0]["_id"]] superdesk.workflow_state("in_progress") superdesk.workflow_action( name="save", include_states=["draft", "fetched", "routed", "submitted", "scheduled"], privileges=["archive"] ) superdesk.workflow_state("submitted") superdesk.workflow_action( name="move", exclude_states=["ingested", "spiked", "on-hold", "published", "scheduled", "killed"], privileges=["archive"], )
for ref in [ref for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref['location'] = ARCHIVE refs = [{'_id': ref.get('residRef'), 'desk': desk, 'stage': stage, 'state': state, 'destination_groups': destination_groups} for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref] if refs: new_ref_guids = self.fetch(refs, id=None, notify=False) count = 0 for ref in [ref for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref['residRef'] = ref['guid'] = new_ref_guids[count] count += 1 superdesk.workflow_state(STATE_FETCHED) superdesk.workflow_action( name='fetch_from_ingest', include_states=['ingested'], privileges=['ingest', 'archive', 'fetch'] ) superdesk.workflow_state('routed') superdesk.workflow_action( name='route', include_states=['ingested'] )
links.append({PACKAGE: doc.get(PACKAGE)}) dest_doc[LINKED_IN_PACKAGES] = links superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get("guid")) desk = doc.get("desk") refs = [ {"guid": ref.get("residRef"), "desk": desk, PACKAGE: dest_doc.get("_id")} for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref ] if refs: self.create(refs) else: if doc.get(PACKAGE): links = archived_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get("_id"), {LINKED_IN_PACKAGES: links}) return [doc.get("guid") for doc in docs] superdesk.workflow_state(STATE_FETCHED) superdesk.workflow_action( name="fetch_as_from_ingest", include_states=["ingested"], privileges=["archive", "ingest_move"] ) superdesk.workflow_state("routed") superdesk.workflow_action(name="route", include_states=["ingested"])
def on_delete(self, doc): """ Overriding to check if the Ingest Source which has received item being deleted. """ if doc.get("last_item_update"): raise SuperdeskApiError.forbiddenError("Deleting an Ingest Source after receiving items is prohibited.") def on_deleted(self, doc): """ Overriding to send notification and record activity about channel deletion. """ notify_and_add_activity( ACTIVITY_DELETE, "Deleted Ingest Channel {{name}}", self.datasource, item=None, user_list=self.user_service.get_users_by_user_type("administrator"), name=doc.get("name"), provider_id=doc.get(config.ID_FIELD), ) push_notification("ingest_provider:delete", provider_id=str(doc.get(config.ID_FIELD))) get_resource_service("sequences").delete( lookup={"key": "ingest_providers_{_id}".format(_id=doc[config.ID_FIELD])} ) logger.info("Deleted Ingest Channel. Data:{}".format(doc)) superdesk.workflow_state(CONTENT_STATE.INGESTED) superdesk.workflow_action(name="ingest")
resource_title = endpoint_name privileges = {'POST': 'publish', 'DELETE': 'kill', 'PATCH': 'correction'} class ArchivePublishService(BaseService): pass superdesk.workflow_state('published') superdesk.workflow_state('killed') superdesk.workflow_state('corrected') superdesk.workflow_action( name='publish', include_states=['draft'], privileges=['publish'] ) superdesk.workflow_action( name='kill', include_states=['published'], privileges=['kill'] ) superdesk.workflow_action( name='correct', include_states=['published'], privileges=['correction'] )
:param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError("Fail to found item with guid: %s" % guid_to_duplicate) if not is_workflow_state_transition_valid("duplicate", doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get("lock_user", None) force_unlock = doc_in_archive.get("force_unlock", False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError("The item was locked by another user") superdesk.workflow_action( name="duplicate", exclude_states=[CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED], privileges=["archive", "duplicate"] )
resource_methods = ['POST'] item_methods = ['GET', 'PUT', 'PATCH'] resource_title = endpoint_name privileges = {'POST': 'archive', 'PATCH': 'archive', 'PUT': 'archive'} class ArchiveSaveService(BaseService): def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError('Content is missing') req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]['_id'], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError('Client and server etags don\'t match') return [docs[0]['_id']] superdesk.workflow_state('in_progress') superdesk.workflow_action( name='save', include_states=['draft', 'fetched', 'routed', 'submitted'], privileges=['archive'] ) superdesk.workflow_state('submitted') superdesk.workflow_action( name='move', exclude_states=['ingested', 'spiked', 'on-hold', 'published', 'killed'], privileges=['archive'] )
updates = { "translation_id": item["translation_id"], "translations": item["translations"], } archive_service.system_update(item["_id"], updates, item) published_service.update_published_items(item["_id"], "translation_id", item["_id"]) published_service.update_published_items(item["_id"], "translations", item["translations"]) if kwargs.get("notify", True): push_content_notification([item]) return translation_guid def create(self, docs, **kwargs): ids = [] for doc in docs: task = None if doc.get("desk"): desk = get_resource_service("desks").find_one(req=None, _id=doc["desk"]) or {} task = dict(desk=desk.get("_id"), stage=desk.get("working_stage"), user=get_user_id()) ids.append(self._translate_item(doc["guid"], doc["language"], task, **kwargs)) return ids superdesk.workflow_action( name="translate", exclude_states=[CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED], privileges=["archive", "translate"], )
return archived_doc def set_change_in_desk_type(self, updated, original): """ Detects if the change in the desk is between authoring to production (and vice versa). And sets the field 'last_production_desk' and 'last_authoring_desk'. :param dict updated: document to be saved :param dict original: original document """ old_desk_id = str(original.get('task', {}).get('desk', '')) new_desk_id = str(updated.get('task', {}).get('desk', '')) if old_desk_id and old_desk_id != new_desk_id: old_desk = get_resource_service('desks').find_one(req=None, _id=old_desk_id) new_desk = get_resource_service('desks').find_one(req=None, _id=new_desk_id) if old_desk.get('desk_type', '') != new_desk.get('desk_type', ''): if new_desk.get('desk_type') == DeskTypes.production.value: updated['task'][LAST_AUTHORING_DESK] = old_desk_id else: updated['task'][LAST_PRODUCTION_DESK] = old_desk_id superdesk.workflow_action(name='submit_to_desk', include_states=[ 'draft', 'fetched', 'routed', 'submitted', 'in_progress', 'published', 'scheduled' ], privileges=['archive', 'move'])
# don't validate items that already have published if doc[ITEM_STATE] not in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED]: validate_item = {"act": self.publish_type, "type": doc[ITEM_TYPE], "validate": doc} errors = get_resource_service("validate").post([validate_item], headline=True) if errors[0]: validation_errors.extend(errors[0]) # check the locks on the items if doc.get("lock_session", None) and package["lock_session"] != doc["lock_session"]: validation_errors.extend(["{}: packaged item cannot be locked".format(doc["headline"])]) superdesk.workflow_state("published") superdesk.workflow_action( name="publish", include_states=["fetched", "routed", "submitted", "in_progress", "scheduled"], privileges=["publish"], ) superdesk.workflow_state("scheduled") superdesk.workflow_action( name="schedule", include_states=["fetched", "routed", "submitted", "in_progress"], privileges=["schedule"] ) superdesk.workflow_action(name="deschedule", include_states=["scheduled"], privileges=["deschedule"]) superdesk.workflow_state("killed") superdesk.workflow_action(name="kill", include_states=["published", "scheduled", "corrected"], privileges=["kill"]) superdesk.workflow_state("corrected") superdesk.workflow_action(name="correct", include_states=["published", "corrected"], privileges=["correct"])
updates["expiry"] = get_expiry(desk_id=desk_id) return updates def on_update(self, updates, original): updates[ITEM_OPERATION] = ITEM_UNSPIKE def update(self, id, updates, original): original_state = original[config.CONTENT_STATE] if not is_workflow_state_transition_valid("unspike", original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) updates.update(self.get_unspike_updates(item)) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification("item:unspike", item=str(id), user=str(user)) return item superdesk.workflow_state("spiked") superdesk.workflow_action( name="spike", exclude_states=["spiked", "published", "scheduled", "killed"], privileges=["spike"] ) superdesk.workflow_action(name="unspike", include_states=["spiked"], privileges=["unspike"])
:type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError('The item was locked by another user') superdesk.workflow_action( name='duplicate', exclude_states=[CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED], privileges=['archive', 'duplicate'] )
if doc.get('last_item_update'): raise SuperdeskApiError.forbiddenError( "Deleting an Ingest Source after receiving items is prohibited." ) def on_deleted(self, doc): """ Overriding to send notification and record activity about channel deletion. """ notify_and_add_activity( ACTIVITY_DELETE, 'Deleted Ingest Channel {{name}}', self.datasource, item=None, user_list=self.user_service.get_users_by_user_type( 'administrator'), name=doc.get('name'), provider_id=doc.get(config.ID_FIELD)) push_notification('ingest_provider:delete', provider_id=str(doc.get(config.ID_FIELD))) get_resource_service('sequences').delete( lookup={ 'key': 'ingest_providers_{_id}'.format( _id=doc[config.ID_FIELD]) }) logger.info("Deleted Ingest Channel. Data:{}".format(doc)) superdesk.workflow_state(CONTENT_STATE.INGESTED) superdesk.workflow_action(name='ingest')
class ArchivePublishResource(Resource): endpoint_name = 'archive_publish' url = 'archive/<{0}:item_id>/publish'.format(item_url) datasource = {'source': 'archive'} resource_methods = ['POST', 'DELETE', 'PATCH'] resource_title = endpoint_name privileges = {'POST': 'publish', 'DELETE': 'kill', 'PATCH': 'correction'} class ArchivePublishService(BaseService): pass superdesk.workflow_state('published') superdesk.workflow_state('killed') superdesk.workflow_state('corrected') superdesk.workflow_action(name='publish', include_states=['draft'], privileges=['publish']) superdesk.workflow_action(name='kill', include_states=['published'], privileges=['kill']) superdesk.workflow_action(name='correct', include_states=['published'], privileges=['correction'])
fields = ('CropLeft', 'CropTop', 'CropRight', 'CropBottom') return {field: rendition[field] for field in fields if field in rendition} def update_item_data(item, data, keys=DEFAULT_SCHEMA.keys()): """Update main item data, so only keys from default schema. """ for key in keys: if data.get(key): item[key] = data[key] superdesk.workflow_state('published') superdesk.workflow_action(name='publish', include_states=[ 'fetched', 'routed', 'submitted', 'in_progress', 'scheduled' ], privileges=['publish']) superdesk.workflow_state('scheduled') superdesk.workflow_action( name='schedule', include_states=['fetched', 'routed', 'submitted', 'in_progress'], privileges=['schedule']) superdesk.workflow_action(name='deschedule', include_states=['scheduled'], privileges=['deschedule']) superdesk.workflow_state('killed') superdesk.workflow_action(
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('unspike', original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) updates.update(self.get_unspike_updates(item)) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification('item:unspike', item=str(id), user=str(user.get(config.ID_FIELD))) return item superdesk.workflow_state('spiked') superdesk.workflow_action( name='spike', exclude_states=['spiked', 'published', 'scheduled', 'corrected', 'killed'], privileges=['spike'] ) superdesk.workflow_action( name='unspike', include_states=['spiked'], privileges=['unspike'] )
""" if keys is None: keys = DEFAULT_SCHEMA.keys() for key in keys: if data.get(key): if keep_existing: item.setdefault(key, data[key]) else: item[key] = data[key] superdesk.workflow_state("published") superdesk.workflow_action( name="publish", include_states=["fetched", "routed", "submitted", "in_progress", "scheduled", "unpublished", "correction"], privileges=["publish"], ) superdesk.workflow_state("scheduled") superdesk.workflow_action( name="schedule", include_states=["fetched", "routed", "submitted", "in_progress"], privileges=["schedule"] ) superdesk.workflow_action(name="deschedule", include_states=["scheduled"], privileges=["deschedule"]) superdesk.workflow_state("killed") superdesk.workflow_action( name="kill", include_states=["published", "scheduled", "corrected", "correction"], privileges=["kill"] )
item[config.ID_FIELD] = new_ids[0] def __fetch_items_in_package(self, dest_doc, desk, stage, state): # Note: macro and target information is not user for package publishing. # Needs to looked later when package ingest requirements is clear. for ref in [ref for group in dest_doc.get(GROUPS, []) for ref in group.get(REFS, []) if ref.get(RESIDREF)]: ref["location"] = ARCHIVE refs = [ {config.ID_FIELD: ref.get(RESIDREF), "desk": desk, "stage": stage, ITEM_STATE: state} for group in dest_doc.get(GROUPS, []) for ref in group.get(REFS, []) if ref.get(RESIDREF) ] if refs: new_ref_guids = self.fetch(refs, id=None, notify=False) count = 0 for ref in [ref for group in dest_doc.get(GROUPS, []) for ref in group.get(REFS, []) if ref.get(RESIDREF)]: ref[RESIDREF] = ref[GUID_FIELD] = new_ref_guids[count] count += 1 superdesk.workflow_state(CONTENT_STATE.FETCHED) superdesk.workflow_action( name="fetch_from_ingest", include_states=["ingested"], privileges=["ingest", "archive", "fetch"] ) superdesk.workflow_state("routed") superdesk.workflow_action(name="route", include_states=["ingested"])
:type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError( 'The item was locked by another user') superdesk.workflow_action( name='duplicate', exclude_states=[CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED], privileges=['archive', 'duplicate'])
user_list=self.user_service.get_users_by_user_type( "administrator"), name=doc.get("name"), provider_id=doc.get(config.ID_FIELD), ) push_notification("ingest_provider:delete", provider_id=str(doc.get(config.ID_FIELD))) get_resource_service("sequences").delete( lookup={ "key": "ingest_providers_{_id}".format( _id=doc[config.ID_FIELD]) }) logger.info("Deleted Ingest Channel. Data:{}".format(doc)) def _test_config(self, updates, original=None): provider = original.copy() if original else {} provider.update(updates) if provider.get("skip_config_test"): return try: service = get_feeding_service(provider["feeding_service"]) except KeyError: return service.config_test(provider) superdesk.workflow_state(CONTENT_STATE.INGESTED) superdesk.workflow_action(name="ingest")