Example #1
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        set_sign_off(archived_doc, original=original)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        return archived_doc
 def on_update(self, updates, original):
     if not original.get('firstpublished'):
         updates.setdefault('firstpublished', utcnow())
     updates[ITEM_OPERATION] = self.item_operation
     super().on_update(updates, original)
     set_sign_off(updates, original)
     update_word_count(updates)
Example #3
0
 def on_update(self, updates, original):
     if 'firstpublished' not in original:
         updates.setdefault('firstpublished', utcnow())
     updates[ITEM_OPERATION] = ITEM_PUBLISH
     super().on_update(updates, original)
     set_sign_off(updates, original)
     update_word_count(updates)
Example #4
0
    def _publish_package_items(self, package, last_updated):
        """
        Publishes items of a package recursively
        """

        items = [ref.get('residRef') for group in package.get('groups', [])
                 for ref in group.get('refs', []) if 'residRef' in ref]

        if items:
            for guid in items:
                doc = super().find_one(req=None, _id=guid)
                try:
                    if doc['type'] == 'composite':
                        self._publish_package_items(doc)

                    original = copy(doc)

                    set_sign_off(doc, original)

                    self._set_version_last_modified_and_state(original, doc, last_updated)
                    self._update_archive(original, {config.CONTENT_STATE: doc[config.CONTENT_STATE],
                                                    config.ETAG: doc[config.ETAG],
                                                    config.VERSION: doc[config.VERSION],
                                                    config.LAST_UPDATED: doc[config.LAST_UPDATED],
                                                    'sign_off': doc['sign_off']},
                                         versioned_doc=doc)
                except KeyError:
                    raise SuperdeskApiError.badRequestError("A non-existent content id is requested to publish")
Example #5
0
 def on_update(self, updates, original):
     if not original.get('firstpublished'):
         updates.setdefault('firstpublished', utcnow())
     updates[ITEM_OPERATION] = self.item_operation
     super().on_update(updates, original)
     set_sign_off(updates, original)
     update_word_count(updates)
Example #6
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc,
             desk_id=doc.get('task', {}).get('desk'),
             stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {
             CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
             CONTENT_STATE.KILLED
     }:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc,
                            original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
     app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
Example #7
0
 def on_update(self, updates, original):
     CropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     updates[ITEM_OPERATION] = ITEM_CORRECT
     updates['versioncreated'] = utcnow()
     updates['correction_sequence'] = original.get('correction_sequence', 1) + 1
     set_sign_off(updates, original)
Example #8
0
 def on_update(self, updates, original):
     CropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     updates[ITEM_OPERATION] = ITEM_CORRECT
     updates['versioncreated'] = utcnow()
     updates['correction_sequence'] = original.get('correction_sequence',
                                                   1) + 1
     set_sign_off(updates, original)
Example #9
0
 def on_update(self, updates, original):
     update_associations(updates)
     CropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     updates[ITEM_OPERATION] = self.item_operation
     updates['versioncreated'] = utcnow()
     updates['correction_sequence'] = original.get('correction_sequence', 1) + 1
     set_sign_off(updates, original)
     update_word_count(updates, original)
Example #10
0
 def on_update(self, updates, original):
     update_associations(updates)
     CropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     updates[ITEM_OPERATION] = self.item_operation
     updates['versioncreated'] = utcnow()
     updates['correction_sequence'] = original.get('correction_sequence', 1) + 1
     set_sign_off(updates, original)
     update_word_count(updates, original)
Example #11
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            last_updated = updates.get(config.LAST_UPDATED, utcnow())

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, last_updated)

            set_sign_off(updates, original)
            queued_digital = False
            package_id = None

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
                # if target_for is set the we don't to digital client.
                if not updates.get('targeted_for', original.get('targeted_for')):
                    # check if item is in a digital package
                    package_id = TakesPackageService().get_take_package_id(original)

                    if package_id:
                        queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                    original, last_updated)
                    else:
                        # if item is going to be sent to digital subscribers, package it as a take
                        if self.sending_to_digital_subscribers(updates):
                            updated = copy(original)
                            updated.update(updates)
                            # create a takes package
                            package_id = TakesPackageService().package_story_as_a_take(updated, {}, None)
                            original = get_resource_service('archive').find_one(req=None, _id=original['_id'])
                            queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                        original, last_updated)

                # queue only text items
                queued_wire = \
                    self.publish(doc=original, updates=updates, target_media_type=WIRE if package_id else None)

                queued = queued_digital or queued_wire
                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self._set_version_last_modified_and_state(original, updates, last_updated)
            self._update_archive(original=original, updates=updates, should_insert_into_versions=False)
            push_notification('item:publish', item=str(id), unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')), user=str(user.get('_id', '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Example #12
0
 def on_update(self, updates, original):
     CropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     updates[ITEM_OPERATION] = self.item_operation
     updates["versioncreated"] = utcnow()
     updates["correction_sequence"] = original.get("correction_sequence", 1) + 1
     set_sign_off(updates, original)
     update_word_count(updates, original)
     flush_renditions(updates, original)
     self.change_being_corrected_to_published(updates, original)
     self.send_to_original_desk(updates, original)
Example #13
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError(
                'Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(
                doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(
                message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk',
                                                  archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc,
                desk_id=doc.get('task', {}).get('desk'),
                stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {
                CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED
        }:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Example #14
0
    def on_update(self, updates, original):
        if not original.get('firstpublished'):
            updates.setdefault('firstpublished', utcnow())

        if original.get('marked_for_user'):
            # remove marked_for_user on publish and keep it as previous_marked_user for history
            updates['previous_marked_user'] = original['marked_for_user']
            updates['marked_for_user'] = None

        updates[ITEM_OPERATION] = self.item_operation
        super().on_update(updates, original)
        set_sign_off(updates, original)
        update_word_count(updates)
Example #15
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)
        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in ['published', 'killed', 'scheduled']:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # queue only text items
                any_channel_closed = self.queue_transmission(original)
                task = self.__send_to_publish_stage(original)
                if task:
                    updates['task'] = task

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item['task']['desk']), user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Example #16
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc, original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
Example #17
0
    def on_update(self, updates, original):
        updates[ITEM_OPERATION] = self.item_operation
        super().on_update(updates, original)

        if not original.get("firstpublished"):
            if updates.get(SCHEDULE_SETTINGS) and updates[SCHEDULE_SETTINGS].get("utc_publish_schedule"):
                updates["firstpublished"] = updates[SCHEDULE_SETTINGS]["utc_publish_schedule"]
            else:
                updates["firstpublished"] = utcnow()

        if original.get("marked_for_user"):
            # remove marked_for_user on publish and keep it as previous_marked_user for history
            updates["previous_marked_user"] = original["marked_for_user"]
            updates["marked_for_user"] = None
            updates["marked_for_sign_off"] = None

        set_sign_off(updates, original)
        update_word_count(updates)
        self.set_desk(updates, original)
Example #18
0
    def _move(self, archived_doc, doc):
        archive_service = get_resource_service(ARCHIVE)
        original = deepcopy(archived_doc)
        user = get_user()
        send_to(
            doc=archived_doc,
            desk_id=doc.get("task", {}).get("desk"),
            stage_id=doc.get("task", {}).get("stage"),
            user_id=user.get(config.ID_FIELD),
        )
        if archived_doc[ITEM_STATE] not in ({
                CONTENT_STATE.PUBLISHED,
                CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED,
                CONTENT_STATE.RECALLED,
                CONTENT_STATE.CORRECTION,
        }):
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE
        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, "PATCH", original)

        del archived_doc[config.ID_FIELD]
        del archived_doc[config.ETAG]  # force etag update
        archived_doc["versioncreated"] = utcnow()

        signals.item_move.send(self, item=archived_doc, original=original)
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])
        push_item_move_notification(original, archived_doc)
        app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)

        # make sure `item._id` is there in signal
        moved_item = archived_doc.copy()
        moved_item[config.ID_FIELD] = original[config.ID_FIELD]
        signals.item_moved.send(self, item=moved_item, original=original)
Example #19
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Example #20
0
    def _set_metadata(self, doc):
        """
        Adds metadata to doc.
        """

        update_dates_for(doc)
        generate_unique_id_and_name(doc)
        doc['guid'] = generate_guid(type=GUID_TAG)
        doc.setdefault(config.ID_FIELD, doc['guid'])
        doc[config.VERSION] = 1
        set_item_expiry({}, doc)

        if not doc.get('_import', None):
            set_original_creator(doc)

        doc.setdefault(ITEM_STATE, CONTENT_STATE.DRAFT)

        if not doc.get('ingest_provider'):
            doc['source'] = DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

        if BYLINE not in doc:
            set_byline(doc)

        set_sign_off(doc)
Example #21
0
    def _set_metadata(self, doc):
        """
        Adds metadata to doc.
        """

        update_dates_for(doc)
        generate_unique_id_and_name(doc)
        doc['guid'] = generate_guid(type=GUID_TAG)
        doc.setdefault(config.ID_FIELD, doc['guid'])
        doc[config.VERSION] = 1
        set_item_expiry({}, doc)

        if not doc.get('_import', None):
            set_original_creator(doc)

        doc.setdefault(ITEM_STATE, CONTENT_STATE.DRAFT)

        if not doc.get('ingest_provider'):
            doc['source'] = DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

        if BYLINE not in doc:
            set_byline(doc)

        set_sign_off(doc)
Example #22
0
 def on_update(self, updates, original):
     super().on_update(updates, original)
     set_sign_off(updates, original)
Example #23
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)

        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in PUBLISH_STATES:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates['publish_schedule'] = None
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # check if item is in a digital package
                package_id = TakesPackageService().get_take_package_id(original)
                if package_id:
                    # process the takes to form digital master file content
                    package, package_updates = self.process_takes(take=original, package_id=package_id)
                    package_updates[config.CONTENT_STATE] = self.published_state
                    resolve_document_version(document=package_updates,
                                             resource=ARCHIVE, method='PATCH',
                                             latest_doc=package)
                    self.backend.update(self.datasource, package['_id'], package_updates, package)
                    package.update(package_updates)
                    insert_into_versions(doc=package)

                    # send it to the digital channels
                    any_channel_closed_digital, queued_digital = \
                        self.publish(doc=package, updates=None, target_output_channels=DIGITAL)

                    self.update_published_collection(published_item=package)
                else:
                    any_channel_closed_digital = False
                    queued_digital = False

                # queue only text items
                any_channel_closed_wire, queued_wire = \
                    self.publish(doc=original, updates=updates, target_output_channels=WIRE if package_id else None)

                any_channel_closed = any_channel_closed_digital or any_channel_closed_wire
                queued = queued_digital or queued_wire

                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item.get('task', {}).get('desk', '')),
                              user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Example #24
0
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_CORRECT
     ArchiveCropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     set_sign_off(updates, original)
Example #25
0
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_PUBLISH
     super().on_update(updates, original)
     set_sign_off(updates, original)
Example #26
0
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_SPIKE
     self._validate_item(original)
     self._validate_take(original)
     self._update_rewrite(original)
     set_sign_off(updates, original=original)
Example #27
0
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_UNSPIKE
     set_sign_off(updates, original=original)
Example #28
0
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_PUBLISH
     super().on_update(updates, original)
     set_sign_off(updates, original)
     update_word_count(updates)
Example #29
0
 def on_update(self, updates, original):
     CropService().validate_multiple_crops(updates, original)
     super().on_update(updates, original)
     updates[ITEM_OPERATION] = ITEM_CORRECT
     set_sign_off(updates, original)
Example #30
0
 def on_update(self, updates, original):
     super().on_update(updates, original)
     set_sign_off(updates, original)
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_UNSPIKE
     set_sign_off(updates, original=original)
 def on_update(self, updates, original):
     updates[ITEM_OPERATION] = ITEM_SPIKE
     self._validate_item(original)
     self._validate_take(original)
     self._update_rewrite(original)
     set_sign_off(updates, original=original)