def create_scheduled_content(now=None): lock_name = get_lock_id("Template", "Schedule") if not lock(lock_name, expire=130): logger.info("Task: {} is already running.".format(lock_name)) return try: if now is None: now = utcnow() templates = get_scheduled_templates(now) production = superdesk.get_resource_service(ARCHIVE) items = [] for template in templates: set_template_timestamps(template, now) item = get_item_from_template(template) item[config.VERSION] = 1 production.post([item]) insert_into_versions(doc=item) try: apply_onstage_rule(item, item.get(config.ID_FIELD)) except Exception as ex: # noqa logger.exception( "Failed to apply on stage rule while scheduling template.") items.append(item) return items except Exception as e: logger.exception("Task: {} failed with error {}.".format( lock_name, str(e))) finally: unlock(lock_name)
def create_scheduled_content(now=None): lock_name = get_lock_id("Template", "Schedule") if not lock(lock_name, expire=130): logger.info('Task: {} is already running.'.format(lock_name)) return try: if now is None: now = utcnow() templates = get_scheduled_templates(now) production = superdesk.get_resource_service(ARCHIVE) items = [] for template in templates: set_template_timestamps(template, now) item = get_item_from_template(template) item[config.VERSION] = 1 production.post([item]) insert_into_versions(doc=item) try: apply_onstage_rule(item, item.get(config.ID_FIELD)) except Exception as ex: # noqa logger.exception('Failed to apply on stage rule while scheduling template.') items.append(item) return items except Exception as e: logger.exception('Task: {} failed with error {}.'.format(lock_name, str(e))) finally: unlock(lock_name)
def test_apply_onstage_rule_applies(self): doc = {"id": "1", "body_html": "Test-1", "task": {"stage": 1}} stages = [{"_id": 1, "onstage_macro": "populate_abstract"}] self.app.data.insert("stages", stages) with self.app.app_context(): apply_onstage_rule(doc, 1) self.assertEquals(doc["abstract"], "Test-1")
def test_apply_onstage_rule_applies(self): doc = {'id': '1', 'body_html': 'Test-1', 'task': {'stage': 1}} stages = [{'_id': 1, 'onstage_macro': 'populate_abstract'}] self.app.data.insert('stages', stages) with self.app.app_context(): apply_onstage_rule(doc, 1) self.assertEquals(doc['abstract'], 'Test-1')
def test_apply_onstage_rule(self): doc = {"id": "1", "body_html": "Test-1", "task": {"stage": 1}} stages = [{"_id": 1, "onstage_macro": "take_key_validator"}] self.app.data.insert("stages", stages) with self.app.app_context(): with assert_raises(SuperdeskApiError): apply_onstage_rule(doc, 1)
def test_apply_onstage_rule(self): doc = {'id': '1', 'body_html': 'Test-1', 'task': {'stage': 1}} stages = [{'_id': 1, 'onstage_macro': 'take_key_validator'}] self.app.data.insert('stages', stages) with self.app.app_context(): with assert_raises(SuperdeskApiError): apply_onstage_rule(doc, 1)
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str( doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError( message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in { CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED }: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) self._validate(archived_doc, doc) self._move(archived_doc, doc) # get the recent updates again archived_doc = archive_service.find_one(req=None, _id=id) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, id) return archived_doc
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError(_('Fail to found item with guid: {guid}').format(guid=id)) self._validate(archived_doc, doc) self._move(archived_doc, doc) # get the recent updates again archived_doc = archive_service.find_one(req=None, _id=id) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, id) return archived_doc
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) self._validate(archived_doc, doc) self._move(archived_doc, doc) # move the takes package where the first take is located. takes_service = TakesPackageService() takes_package = takes_service.get_take_package(archived_doc) if takes_package and \ takes_service.get_take_by_take_no(archived_doc, package=takes_package) == id: self._move(takes_package, doc) # get the recent updates again archived_doc = archive_service.find_one(req=None, _id=id) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, id) return archived_doc