Beispiel #1
0
    def _set_updates(self,
                     original,
                     updates,
                     last_updated,
                     preserve_state=False):
        """Sets config.VERSION, config.LAST_UPDATED, ITEM_STATE in updates document.

        If item is being published and embargo is available then append Editorial Note with 'Embargoed'.

        :param dict original: original document
        :param dict updates: updates related to the original document
        :param datetime last_updated: datetime of the updates.
        """
        if not preserve_state:
            self.set_state(original, updates)
        updates.setdefault(config.LAST_UPDATED, last_updated)

        if original[config.VERSION] == updates.get(config.VERSION,
                                                   original[config.VERSION]):
            resolve_document_version(document=updates,
                                     resource=ARCHIVE,
                                     method='PATCH',
                                     latest_doc=original)

        user = get_user()
        if user and user.get(config.ID_FIELD):
            updates['version_creator'] = user[config.ID_FIELD]
 def on_updated(self, updates, original):
     user = get_user()
     push_notification(
         'events-template:updated',
         item=str(original[config.ID_FIELD]),
         user=str(user.get(config.ID_FIELD))
     )
    def on_updated(self, updates, original):
        user = get_user(required=True).get(config.ID_FIELD, '')
        session = get_auth().get(config.ID_FIELD, '')

        # Save history
        get_resource_service(
            'assignments_history').on_item_revert_availability(
                updates, original)

        push_notification('assignments:reverted',
                          item=str(original[config.ID_FIELD]),
                          planning=original.get('planning_item'),
                          assigned_user=(original.get('assigned_to')
                                         or {}).get('user'),
                          assigned_desk=(original.get('assigned_to')
                                         or {}).get('desk'),
                          assignment_state=updates.get('assigned_to',
                                                       {})['state'],
                          user=str(user),
                          session=str(session),
                          coverage=original.get('coverage_item'))

        # publish the planning item
        get_resource_service('assignments').publish_planning(
            original.get('planning_item'))

        # External (slack/browser pop-up) notifications
        assignments_service = get_resource_service('assignments')
        assignments_service.send_assignment_notification(
            updates, original, True)
 def on_deleted(self, doc):
     user = get_user()
     push_notification(
         'events-template:deleted',
         item=str(doc[config.ID_FIELD]),
         user=str(user.get(config.ID_FIELD))
     )
Beispiel #5
0
    def _set_assignment_information(self, doc):
        if doc.get('planning') and doc['planning'].get('assigned_to'):
            planning = doc['planning']
            if planning['assigned_to'].get('user') and not planning['assigned_to'].get('desk'):
                raise SuperdeskApiError.badRequestError(message="Assignment should have a desk.")

            # In case user was removed
            if not planning['assigned_to'].get('user'):
                planning['assigned_to']['user'] = None

            user = get_user()
            if user and user.get(config.ID_FIELD):
                planning['assigned_to']['assigned_by'] = user[config.ID_FIELD]

            planning['assigned_to']['assigned_date'] = utcnow()
            if planning['assigned_to'].get('user'):
                # Done to avoid fetching users data for every assignment
                # Because user assigned can also be a provider whose qcode
                # might be an invalid GUID, check if the user assigned is a valid user (GUID)
                # However, in a rare case where qcode of a provider is a valid GUID,
                # This will create activity records - inappropirate
                if ObjectId.is_valid(planning['assigned_to'].get('user')):
                    add_activity(ACTIVITY_UPDATE,
                                 '{{assignor}} assigned a coverage to you',
                                 self.datasource,
                                 notify=[planning['assigned_to'].get('user')],
                                 assignor=user.get('username'))
    def create(self, docs, **kwargs):
        user_id = get_user(required=True)['_id']
        session_id = get_auth()['_id']
        lock_service = LockService()

        # If the event is a recurrent event, unlock all other events in this series
        item_id = request.view_args['item_id']
        resource_service = get_resource_service('events')
        item = resource_service.find_one(req=None, _id=item_id)
        if item.get('recurrence_id') and not item.get(LOCK_USER):
            # Find the actual event that is locked
            historic, past, future = resource_service.get_recurring_timeline(
                item)
            series = historic + past + future

            for event in series:
                if event.get(LOCK_USER):
                    updated_item = lock_service.unlock(event, user_id,
                                                       session_id, 'events')
                    break
        else:
            updated_item = lock_service.unlock(item, user_id, session_id,
                                               'events')

        if updated_item is None:
            # version 1 item must have been deleted by now
            return [0]

        return _update_returned_document(docs[0], updated_item)
    def set_assignment(self, updates, original=None):
        """Set the assignment information"""
        if not original:
            original = {}

        self.set_type(updates, original)

        if not updates.get('assigned_to'):
            if updates.get('priority'):
                # Priority was edited - nothing to set here
                return
            else:
                updates['assigned_to'] = {}

        assigned_to = updates.get('assigned_to')
        if assigned_to.get('user') and not assigned_to.get('desk'):
            raise SuperdeskApiError.badRequestError(
                message="Assignment should have a desk.")

        # set the assignment information
        user = get_user()
        if original.get('assigned_to',
                        {}).get('desk') != assigned_to.get('desk'):
            if original.get('assigned_to', {}).get('state') in \
                    [ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS, ASSIGNMENT_WORKFLOW_STATE.SUBMITTED]:
                raise SuperdeskApiError.forbiddenError(
                    message=
                    "Assignment linked to content. Desk reassignment not allowed."
                )

            assigned_to['assigned_date_desk'] = utcnow()

            if user and user.get(config.ID_FIELD):
                assigned_to['assignor_desk'] = user.get(config.ID_FIELD)

        if assigned_to.get('user') and original.get(
                'assigned_to', {}).get('user') != assigned_to.get('user'):
            assigned_to['assigned_date_user'] = utcnow()

            if user and user.get(config.ID_FIELD):
                assigned_to['assignor_user'] = user.get(config.ID_FIELD)

        if not original.get(config.ID_FIELD):
            updates['original_creator'] = str(user.get(
                config.ID_FIELD)) if user else None
            updates['assigned_to'][ITEM_STATE] = updates['assigned_to'].get(ITEM_STATE) or \
                ASSIGNMENT_WORKFLOW_STATE.ASSIGNED
        else:
            # In case user was removed
            if not assigned_to.get('user'):
                assigned_to['user'] = None
            else:
                # Moving from submitted to assigned after user assigned after desk submission
                if original.get('assigned_to')[
                        'state'] == ASSIGNMENT_WORKFLOW_STATE.SUBMITTED:
                    updates['assigned_to'][
                        'state'] = ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS

            updates['version_creator'] = str(user.get(
                config.ID_FIELD)) if user else None
Beispiel #8
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)
        try:
            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            self.backend.update(self.datasource, id, updates, original)
            original.update(updates)

            if archived_item['type'] != 'composite':
                # queue only text items
                self.queue_transmission(original)
                task = self.__send_to_publish_stage(original)
                if task:
                    updates['task'] = task

            # document is saved to change the status
            updates[config.CONTENT_STATE] = 'published'
            item = self.backend.update(self.datasource, id, updates, original)
            original.update(updates)
            user = get_user()
            push_notification('item:publish', item=str(item.get('_id')), user=str(user))
            original.update(super().find_one(req=None, _id=id))
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Beispiel #9
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        set_sign_off(archived_doc, original=original)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        return archived_doc
Beispiel #10
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc,
             desk_id=doc.get('task', {}).get('desk'),
             stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {
             CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
             CONTENT_STATE.KILLED
     }:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc,
                            original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
     app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
Beispiel #11
0
    def on_create(self, docs, **kwargs):
        user_id = get_user(required=True)['_id']
        session_id = get_auth()['_id']

        existing_locks = list(self.find(where={}))
        for existing_lock in existing_locks:
            if str(existing_lock.get(LOCK_USER)) != str(user_id):
                raise SuperdeskApiError.forbiddenError(
                    message=
                    "Featured stories already being managed by another user.")
            elif str(existing_lock.get(LOCK_SESSION)) != str(session_id):
                raise SuperdeskApiError.forbiddenError(
                    message=
                    "Featured stories already being managed by you in another session."
                )

        # get the lock if not raise forbidden exception
        if not lock(LOCK_ID, expire=5):
            raise SuperdeskApiError.forbiddenError(
                message="Unable to obtain lock on Featured stories.")

        for doc in docs:
            doc['_id'] = generate_guid(type=GUID_NEWSML)
            lock_updates = {
                LOCK_USER: user_id,
                LOCK_SESSION: session_id,
                LOCK_TIME: utcnow()
            }
            doc.update(lock_updates)

        return docs
Beispiel #12
0
 def _validate_user(self, doc_user_id, doc_is_global):
     session_user = get_user(required=True)
     if str(session_user['_id']) != str(doc_user_id):
         if not doc_is_global:
             raise SuperdeskApiError.forbiddenError('Unauthorized to modify other user\'s local search.')
         elif not current_user_has_privilege('global_saved_searches'):
             raise SuperdeskApiError.forbiddenError('Unauthorized to modify global search.')
Beispiel #13
0
 def on_created(self, docs):
     user_id = get_user(required=True)['_id']
     session_id = get_auth()['_id']
     unlock(LOCK_ID, remove=True)
     push_notification('planning_featured_lock:lock',
                       user=str(user_id),
                       lock_session=str(session_id))
    def publish(self, doc, updates, target_output_channels=None):
        any_channel_closed, wrong_formatted_channels, queued = \
            self.queue_transmission(doc=doc, target_output_channels=target_output_channels)

        if updates:
            desk = None

            if doc.get('task', {}).get('desk'):
                desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk'])

            if not doc.get('ingest_provider'):
                updates['source'] = desk['source'] if desk and desk.get('source', '') \
                    else DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

        user = get_user()

        if wrong_formatted_channels and len(wrong_formatted_channels) > 0:
            push_notification('item:publish:wrong:format',
                              item=str(doc['_id']), unique_name=doc['unique_name'],
                              desk=str(doc['task']['desk']),
                              user=str(user.get('_id', '')),
                              output_channels=[c['name'] for c in wrong_formatted_channels])

        if not target_output_channels and not queued:
            raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

        return any_channel_closed, queued
 def on_replaced(self, document, original):
     user = get_user()
     push_notification(
         'events-template:replaced',
         item=str(original[config.ID_FIELD]),
         user=str(user.get(config.ID_FIELD))
     )
Beispiel #16
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            last_updated = updates.get(config.LAST_UPDATED, utcnow())

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)

            queued_digital = False
            package = None

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
                # if target_for is set the we don't to digital client.
                if not updates.get('targeted_for', original.get('targeted_for')):
                    # check if item is in a digital package
                    package = self.takes_package_service.get_take_package(original)

                    if package:
                        queued_digital = self._publish_takes_package(package, updates, original, last_updated)
                    else:
                        '''
                        If type of the item is text or preformatted then item need to be sent to digital subscribers.
                        So, package the item as a take.
                        '''
                        updated = copy(original)
                        updated.update(updates)

                        if original[ITEM_TYPE] in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED] and \
                                self.sending_to_digital_subscribers(updated):
                            # create a takes package
                            package_id = self.takes_package_service.package_story_as_a_take(updated, {}, None)
                            updates[LINKED_IN_PACKAGES] = updated[LINKED_IN_PACKAGES]
                            package = get_resource_service(ARCHIVE).find_one(req=None, _id=package_id)
                            queued_digital = self._publish_takes_package(package, updates, original, last_updated)

                # queue only text items
                queued_wire = \
                    self.publish(doc=original, updates=updates, target_media_type=WIRE if package else None)

                queued = queued_digital or queued_wire
                if not queued:
                    logger.exception('Nothing is saved to publish queue for story: {} for action: {}'.
                                     format(original[config.ID_FIELD], self.publish_type))

            self._update_archive(original=original, updates=updates, should_insert_into_versions=False)
            push_notification('item:publish', item=str(id), unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Beispiel #17
0
    def create(self, docs):
        ids = []
        production = get_resource_service('archive')
        assignments_service = get_resource_service('assignments')
        items = []

        for doc in docs:
            assignment = assignments_service.find_one(
                req=None, _id=doc.pop('assignment_id'))
            item = production.find_one(req=None, _id=doc.pop('item_id'))
            # Boolean set to true if the unlink is as the result of spiking the content item
            spike = doc.pop('spike', False)

            # Set the state to 'assigned' if the item is 'submitted'
            updates = {'assigned_to': deepcopy(assignment.get('assigned_to'))}
            updates['assigned_to'][
                'state'] = ASSIGNMENT_WORKFLOW_STATE.ASSIGNED
            assignments_service.patch(assignment[config.ID_FIELD], updates)

            production.system_update(item[config.ID_FIELD],
                                     {'assignment_id': None}, item)

            get_resource_service('delivery').delete_action(
                lookup={
                    'assignment_id': assignment[config.ID_FIELD],
                    'item_id': item[config.ID_FIELD]
                })

            doc.update(item)
            ids.append(doc[config.ID_FIELD])
            items.append(item)

            user = get_user()
            PlanningNotifications().notify_assignment(
                target_desk=item.get('task').get('desk'),
                message='{{actioning_user}} has {{action}} '
                'a {{coverage_type}} coverage for \"{{slugline}}\"',
                actioning_user=user.get('display_name',
                                        user.get('username', 'Unknown')),
                action='unlinked' if not spike else 'spiked',
                coverage_type=get_coverage_type_name(item.get('type', '')),
                slugline=item.get('slugline'),
                omit_user=True)

            push_content_notification(items)
            push_notification('content:unlink',
                              item=str(item[config.ID_FIELD]),
                              assignment=str(assignment[config.ID_FIELD]))

        assignment_history_service = get_resource_service(
            'assignments_history')
        if spike:
            get_resource_service('assignments_history').on_item_content_unlink(
                updates, assignment, ASSIGNMENT_HISTORY_ACTIONS.SPIKE_UNLINK)
        else:
            assignment_history_service.on_item_content_unlink(
                updates, assignment)

        return ids
Beispiel #18
0
    def on_updated(self, updates, original):
        # Spike associated planning
        planning_spike_service = get_resource_service('planning_spike')
        query = {
            'query': {
                'bool': {
                    'must': {
                        'term': {
                            'event_item': str(original[config.ID_FIELD])
                        }
                    }
                }
            }
        }
        results = get_resource_service('planning').search(query)
        spiked_items = []
        if len(results.docs) > 0:
            for planning in results.docs:
                if planning['state'] == WORKFLOW_STATE.DRAFT:
                    planning_spike_service.patch(planning[config.ID_FIELD],
                                                 {'state': 'spiked'})
                    spiked_items.append(str(planning[config.ID_FIELD]))

            # When a planning item associated with this event is spiked
            # If there were any failures in removing assignments
            # Send those notifications here
            if len(spiked_items) > 0:
                query = {
                    'query': {
                        'filtered': {
                            'filter': {
                                'bool': {
                                    'must': {
                                        'terms': {
                                            'planning_item': spiked_items
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                req = ParsedRequest()
                req.args = {'source': json.dumps(query)}

                assignments = get_resource_service('assignments').get(
                    req=req, lookup=None)
                if assignments.count() > 0:
                    session_id = get_auth().get('_id')
                    user_id = get_user().get(config.ID_FIELD)
                    push_notification(
                        'assignments:delete:fail',
                        items=[{
                            'slugline': a.get('planning').get('slugline'),
                            'type': a.get('planning').get('g2_content_type')
                        } for a in assignments],
                        session=session_id,
                        user=user_id)
Beispiel #19
0
def set_original_creator(doc):
    """Set the original creator"""
    usr = get_user()
    user = str(usr.get('_id', doc.get('original_creator', ''))) or None
    if not user:
        doc.pop('original_creator', None)
        return
    doc['original_creator'] = user
Beispiel #20
0
 def _push_formatter_notification(self, doc, no_formatters=[]):
     if len(no_formatters) > 0:
         user = get_user()
         push_notification('item:publish:wrong:format',
                           item=str(doc[config.ID_FIELD]), unique_name=doc.get('unique_name'),
                           desk=str(doc.get('task', {}).get('desk', '')),
                           user=str(user.get(config.ID_FIELD, '')),
                           formats=no_formatters)
    def find_one(self, req, **lookup):
        item = super().find_one(req, **lookup)

        if item and str(item.get('task', {}).get('stage', '')) in \
                get_resource_service('users').get_invisible_stages_ids(get_user().get('_id')):
            raise SuperdeskApiError.forbiddenError("User does not have permissions to read the item.")

        return item
Beispiel #22
0
    def find_one(self, req, **lookup):
        item = super().find_one(req, **lookup)

        if item and str(item.get('task', {}).get('stage', '')) in \
                get_resource_service('users').get_invisible_stages_ids(get_user().get('_id')):
            raise SuperdeskApiError.forbiddenError(_("User does not have permissions to read the item."))

        return item
 def on_created(self, docs):
     user = get_user()
     for doc in docs:
         push_notification(
             'events-template:created',
             item=str(doc.get(config.ID_FIELD)),
             user=str(user.get(config.ID_FIELD))
         )
 def create(self, docs, **kwargs):
     user = get_user(required=True)
     auth = get_auth()
     item_id = request.view_args['item_id']
     lock_service = LockService()
     item = lock_service.unlock(item_id, user['_id'], auth['_id'],
                                'planning')
     return _update_returned_document(docs[0], item)
 def _push_formatter_notification(self, doc, no_formatters=[]):
     if len(no_formatters) > 0:
         user = get_user()
         push_notification('item:publish:wrong:format',
                           item=str(doc[config.ID_FIELD]), unique_name=doc.get('unique_name'),
                           desk=str(doc.get('task', {}).get('desk', '')),
                           user=str(user.get(config.ID_FIELD, '')),
                           formats=no_formatters)
    def on_updated(self, updates, original):
        user = get_user(required=True).get(config.ID_FIELD, '')
        session = get_auth().get(config.ID_FIELD, '')

        push_notification('planning:rescheduled',
                          item=str(original[config.ID_FIELD]),
                          user=str(user),
                          session=str(session))
    def update(self, id, updates, original):
        user = get_user(required=True).get(config.ID_FIELD, '')
        session = get_auth().get(config.ID_FIELD, '')
        coverage_states = get_resource_service('vocabularies').find_one(
            req=None, _id='newscoveragestatus')

        event_cancellation = updates.pop('event_cancellation', False)
        cancel_all_coverage = updates.pop('cancel_all_coverage', False)

        coverage_cancel_state = None
        if coverage_states:
            coverage_cancel_state = next(
                (x for x in coverage_states.get('items', [])
                 if x['qcode'] == 'ncostat:notint'), None)
            coverage_cancel_state.pop('is_active', None)

        ids = []
        updates['coverages'] = deepcopy(original.get('coverages'))
        coverages = updates.get('coverages') or []
        reason = updates.pop('reason', None)

        planning_service = get_resource_service('planning')
        for coverage in coverages:
            if coverage['workflow_status'] != WORKFLOW_STATE.CANCELLED:
                ids.append(coverage.get('coverage_id'))
                planning_service.cancel_coverage(
                    coverage, coverage_cancel_state,
                    coverage.get('workflow_status'), None, reason,
                    event_cancellation)

        if cancel_all_coverage:
            item = None
            if len(ids) > 0:
                item = self.backend.update(self.datasource, id, updates,
                                           original)
                push_notification('coverage:cancelled',
                                  planning_item=str(original[config.ID_FIELD]),
                                  user=str(user),
                                  session=str(session),
                                  reason=reason,
                                  coverage_state=coverage_cancel_state,
                                  etag=item.get('_etag'),
                                  ids=ids)
            return item if item else self.find_one(req=None, _id=id)

        self._cancel_plan(updates, reason)

        item = self.backend.update(self.datasource, id, updates, original)

        push_notification('planning:cancelled',
                          item=str(original[config.ID_FIELD]),
                          user=str(user),
                          session=str(session),
                          reason=reason,
                          coverage_state=coverage_cancel_state,
                          event_cancellation=event_cancellation)

        return item
    def update(self, id, updates, original):
        user = get_user(required=True).get(config.ID_FIELD, '')
        session = get_auth().get(config.ID_FIELD, '')

        updates['assigned_to'] = deepcopy(original).get('assigned_to')

        # If we are confirming availability, save the revert state for revert action
        coverage_type = original.get('planning', {}).get('g2_content_type')
        if coverage_type != 'text':
            updates['assigned_to']['revert_state'] = updates['assigned_to'][
                'state']

        updates['assigned_to']['state'] = ASSIGNMENT_WORKFLOW_STATE.COMPLETED

        remove_lock_information(updates)

        item = self.backend.update(self.datasource, id, updates, original)

        # Save history if user initiates complete
        if coverage_type == 'text':
            get_resource_service('assignments_history').on_item_complete(
                updates, original)
        else:
            get_resource_service(
                'assignments_history').on_item_confirm_availability(
                    updates, original)

        push_notification('assignments:completed',
                          item=str(original[config.ID_FIELD]),
                          planning=original.get('planning_item'),
                          assigned_user=(original.get('assigned_to')
                                         or {}).get('user'),
                          assigned_desk=(original.get('assigned_to')
                                         or {}).get('desk'),
                          assignment_state=ASSIGNMENT_WORKFLOW_STATE.COMPLETED,
                          user=str(user),
                          session=str(session),
                          coverage=original.get('coverage_item'))

        # Send notification that the work has been completed
        # Determine the display name of the assignee
        assigned_to_user = get_resource_service('users').find_one(req=None,
                                                                  _id=user)
        assignee = assigned_to_user.get(
            'display_name') if assigned_to_user else 'Unknown'
        PlanningNotifications().notify_assignment(
            target_user=str(
                original.get('assigned_to', {}).get('assignor_user')),
            message='{{coverage_type}} coverage \"{{slugline}}\" has been '
            'completed by {{assignee}}',
            assignee=assignee,
            coverage_type=get_coverage_type_name(
                original.get('planning', {}).get('g2_content_type', '')),
            slugline=original.get('planning', {}).get('slugline'),
            omit_user=True)

        return item
Beispiel #29
0
 def create(self, docs, **kwargs):
     user_id = get_user(required=True)['_id']
     session_id = get_auth()['_id']
     item_id = request.view_args['item_id']
     lock_service = get_component(LockService)
     resource_service = get_resource_service('planning')
     item = resource_service.find_one(req=None, _id=item_id)
     updated_item = lock_service.unlock(item, user_id, session_id, 'planning')
     return update_returned_document(docs[0], updated_item, CUSTOM_HATEOAS_PLANNING)
Beispiel #30
0
 def unlock_item(self, item_id, doc):
     user_id = get_user(required=True)['_id']
     session_id = get_auth()['_id']
     lock_service = get_component(LockService)
     resource_service = get_resource_service('events')
     item = resource_service.find_one(req=None, _id=item_id)
     updated_item = lock_service.unlock(item, user_id, session_id, 'events')
     return update_returned_document(doc, updated_item,
                                     CUSTOM_HATEOAS_EVENTS)
Beispiel #31
0
    def on_update(self, updates, original):
        user = get_user()
        self.validate_on_update(updates, original, user)

        if user and user.get(config.ID_FIELD):
            updates['version_creator'] = user[config.ID_FIELD]

        self._set_coverage(updates, original)
        self.set_planning_schedule(updates, original)
Beispiel #32
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected, Killed and TakeDown.
        """
        try:
            user = get_user()
            auto_publish = updates.get("auto_publish", False)

            # unlock the item
            set_unlock_updates(updates)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
            else:
                self._publish_associated_items(original, updates)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                if updates.get(ASSOCIATIONS):
                    self._refresh_associated_items(updated, skip_related=True)  # updates got lost with update

                if updated.get(ASSOCIATIONS):
                    self._fix_related_references(updated, updates)

                signals.item_publish.send(self, item=updated)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)

            from apps.publish.enqueue import enqueue_published

            enqueue_published.apply_async()

            push_notification(
                "item:publish",
                item=str(id),
                unique_name=original["unique_name"],
                desk=str(original.get("task", {}).get("desk", "")),
                user=str(user.get(config.ID_FIELD, "")),
            )

            if updates.get("previous_marked_user") and not updates.get("marked_for_user"):
                # send notification so that marked for me list can be updated
                get_resource_service("archive").handle_mark_user_notifications(updates, original, False)

        except SuperdeskApiError:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(
                message=_("Key is missing on article to be published: {exception}").format(exception=str(e))
            )
        except Exception as e:
            logger.exception(e)
            raise SuperdeskApiError.internalError(
                message=_("Failed to publish the item: {id}").format(id=str(id)), exception=e
            )
Beispiel #33
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
            else:
                self._refresh_associated_items(original)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                if updates.get(ASSOCIATIONS):
                    self._refresh_associated_items(
                        updated)  # updates got lost with update

                # process takes package for published or corrected items
                # if no_takes is true but takes package exists then process takes package.
                if self.published_state != CONTENT_STATE.KILLED and \
                        (not app.config.get('NO_TAKES', False) or
                         self.takes_package_service.get_take_package_id(updated)):
                    self._process_takes_package(original, updated, updates)

                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
                self.update_published_collection(
                    published_item_id=original[config.ID_FIELD],
                    updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish',
                              item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(
                                  original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(
                    str(e)))
        except Exception as e:
            raise SuperdeskApiError.internalError(
                message="Failed to publish the item: {}".format(str(id)),
                exception=e)
Beispiel #34
0
def is_locked_in_this_session(item, user_id=None, session_id=None):
    if user_id is None:
        user = get_user(required=True)
        user_id = user.get(config.ID_FIELD)

    if session_id is None:
        session = get_auth()
        session_id = session.get(config.ID_FIELD)

    return item.get(LOCK_USER) == user_id and item.get(LOCK_SESSION) == session_id
    def __is_authorized_to_update_seq_num_settings(self, output_channel):
        """
        Checks if the user requested is authorized to modify sequence number settings.
        If unauthorized then exception will be raised.
        """

        user = get_user()
        if 'sequence_num_settings' in output_channel and not is_admin(user) \
                and (user['active_privileges'].get('output_channel_seq_num_settings', 0) == 0):
            raise SuperdeskApiError.forbiddenError("Unauthorized to modify Sequence Number Settings")
Beispiel #36
0
 def on_updated(self, updates, original):
     self.update_published_collection(
         published_item_id=original[config.ID_FIELD])
     original = get_resource_service(ARCHIVE).find_one(
         req=None, _id=original[config.ID_FIELD])
     updates.update(original)
     user = get_user()
     push_notification('item:updated',
                       item=str(original[config.ID_FIELD]),
                       user=str(user.get(config.ID_FIELD)))
Beispiel #37
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            last_updated = updates.get(config.LAST_UPDATED, utcnow())

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, last_updated)

            set_sign_off(updates, original)
            queued_digital = False
            package_id = None

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
                # if target_for is set the we don't to digital client.
                if not updates.get('targeted_for', original.get('targeted_for')):
                    # check if item is in a digital package
                    package_id = TakesPackageService().get_take_package_id(original)

                    if package_id:
                        queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                    original, last_updated)
                    else:
                        # if item is going to be sent to digital subscribers, package it as a take
                        if self.sending_to_digital_subscribers(updates):
                            updated = copy(original)
                            updated.update(updates)
                            # create a takes package
                            package_id = TakesPackageService().package_story_as_a_take(updated, {}, None)
                            original = get_resource_service('archive').find_one(req=None, _id=original['_id'])
                            queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                        original, last_updated)

                # queue only text items
                queued_wire = \
                    self.publish(doc=original, updates=updates, target_media_type=WIRE if package_id else None)

                queued = queued_digital or queued_wire
                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self._set_version_last_modified_and_state(original, updates, last_updated)
            self._update_archive(original=original, updates=updates, should_insert_into_versions=False)
            push_notification('item:publish', item=str(id), unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')), user=str(user.get('_id', '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
    def create(self, docs, **kwargs):
        """Toggle marked desk status for given desk and item."""

        service = get_resource_service('archive')
        published_service = get_resource_service('published')
        ids = []
        for doc in docs:
            item = service.find_one(req=None, guid=doc['marked_item'])
            if not item:
                ids.append(None)
                continue
            ids.append(item['_id'])
            marked_desks = item.get('marked_desks', [])
            if not marked_desks:
                marked_desks = []

            existing_mark = next((m for m in marked_desks if m['desk_id'] == doc['marked_desk']), None)

            if existing_mark:
                # there is an existing mark so this is un-mark action
                marked_desks = [m for m in marked_desks if m['desk_id'] != doc['marked_desk']]
                marked_desks_on = False  # highlight toggled off
            else:
                # there is no existing mark so this is mark action
                user = get_user() or {}
                new_mark = {}
                new_mark['desk_id'] = doc['marked_desk']
                new_mark['user_marked'] = str(user.get(config.ID_FIELD, ''))
                new_mark['date_marked'] = utcnow()
                marked_desks.append(new_mark)
                marked_desks_on = True

            updates = {'marked_desks': marked_desks}
            service.system_update(item['_id'], updates, item)

            publishedItems = published_service.find({'item_id': item['_id']})
            for publishedItem in publishedItems:
                if publishedItem['_current_version'] == item['_current_version'] or not marked_desks_on:
                    updates = {'marked_desks': marked_desks}
                    published_service.system_update(publishedItem['_id'], updates, publishedItem)

            push_notification(
                'item:marked_desks',
                marked=int(marked_desks_on),
                item_id=item['_id'],
                mark_id=str(doc['marked_desk']))

            if marked_desks_on:
                app.on_archive_item_updated({'desk_id': doc['marked_desk']}, item, ITEM_MARK)
            else:
                app.on_archive_item_updated({'desk_id': doc['marked_desk']}, item, ITEM_UNMARK)

        return ids
Beispiel #39
0
 def on_update(self, updates, original):
     """
     Checks if the request owner and the saved search owner are the same person
     If not then the request owner should have global saved search privilege
     """
     request_user = request.view_args['user']
     user = get_user(required=True)
     if str(user['_id']) == request_user or user['active_privileges'].get('global_saved_search', 0) == 0:
         if 'filter' in updates:
             self.process(updates)
         super().on_update(updates, original)
     else:
         raise SuperdeskApiError.forbiddenError("Unauthorized to modify global search")
Beispiel #40
0
    def on_updated(self, updates, original):
        self.update_published_collection(published_item_id=original[config.ID_FIELD])
        original = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)
        user = get_user()

        if updates[ITEM_OPERATION] != ITEM_KILL and \
                original.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
            get_resource_service('archive_broadcast').on_broadcast_master_updated(updates[ITEM_OPERATION], original)

        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)
        push_notification('item:updated', item=str(original[config.ID_FIELD]), user=str(user.get(config.ID_FIELD)))
        self._import_into_legal_archive(updates)
Beispiel #41
0
 def update(self, id, updates, original):
     archived_item = super().find_one(req=None, _id=id)
     try:
         if archived_item['type'] == 'composite':
             self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])
         user = get_user()
         updates[config.CONTENT_STATE] = 'published'
         item = self.backend.update(self.datasource, id, updates, original)
         push_notification('item:publish', item=str(item.get('_id')), user=str(user))
         return item
     except KeyError:
         raise SuperdeskApiError.badRequestError(message="A non-existent content id is requested to publish")
     except Exception as e:
         logger.error("Something bad happened while publishing %s".format(id), e)
         raise SuperdeskApiError.internalError(message="Failed to publish the item")
Beispiel #42
0
    def create(self, docs):
        service = get_resource_service(SOURCE)
        item_id = request.view_args['item_id']
        item = service.find_one(req=None, _id=item_id)
        doc = docs[0]

        self._valid_broadcast_item(item)

        desk_id = doc.get('desk')
        desk = None

        if desk_id:
            desk = get_resource_service('desks').find_one(req=None, _id=desk_id)

        doc.pop('desk', None)
        doc['task'] = {}
        if desk:
            doc['task']['desk'] = desk.get(config.ID_FIELD)
            doc['task']['stage'] = desk.get('working_stage')

        doc['task']['user'] = get_user().get('_id')
        genre_list = get_resource_service('vocabularies').find_one(req=None, _id='genre') or {}
        broadcast_genre = [{'qcode': genre.get('qcode'), 'name': genre.get('name')}
                           for genre in genre_list.get('items', [])
                           if genre.get('qcode') == BROADCAST_GENRE and genre.get('is_active')]

        if not broadcast_genre:
            raise SuperdeskApiError.badRequestError(message="Cannot find the {} genre.".format(BROADCAST_GENRE))

        doc['broadcast'] = {
            'status': '',
            'master_id': item_id,
            'takes_package_id': self.takesService.get_take_package_id(item),
            'rewrite_id': item.get('rewritten_by')
        }

        doc['genre'] = broadcast_genre
        doc['family_id'] = item.get('family_id')

        for key in FIELDS_TO_COPY:
            doc[key] = item.get(key)

        resolve_document_version(document=doc, resource=SOURCE, method='POST')
        service.post(docs)
        insert_into_versions(id_=doc[config.ID_FIELD])
        build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return [doc[config.ID_FIELD]]
Beispiel #43
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)
        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in ['published', 'killed', 'scheduled']:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # queue only text items
                any_channel_closed = self.queue_transmission(original)
                task = self.__send_to_publish_stage(original)
                if task:
                    updates['task'] = task

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item['task']['desk']), user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Beispiel #44
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc, original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
Beispiel #45
0
    def _set_updates(self, original, updates, last_updated, preserve_state=False):
        """Sets config.VERSION, config.LAST_UPDATED, ITEM_STATE in updates document.

        If item is being published and embargo is available then append Editorial Note with 'Embargoed'.

        :param dict original: original document
        :param dict updates: updates related to the original document
        :param datetime last_updated: datetime of the updates.
        """
        if not preserve_state:
            self.set_state(original, updates)
        updates.setdefault(config.LAST_UPDATED, last_updated)

        if original[config.VERSION] == updates.get(config.VERSION, original[config.VERSION]):
            resolve_document_version(document=updates, resource=ARCHIVE, method='PATCH', latest_doc=original)

        user = get_user()
        if user and user.get(config.ID_FIELD):
            updates['version_creator'] = user[config.ID_FIELD]
Beispiel #46
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Beispiel #47
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
            else:
                self._refresh_associated_items(original)
                self._publish_associations(original, id)
                updated = deepcopy(original)
                updated.update(updates)

                if updates.get('associations'):
                    self._refresh_associated_items(updated)  # updates got lost with update

                if self.published_state != CONTENT_STATE.KILLED and not app.config.get('NO_TAKES', False):
                    self._process_takes_package(original, updated, updates)

                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish', item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Beispiel #48
0
    def _remove_and_set_kill_properties(self, article, articles_to_kill, updates):
        """
        Removes the irrelevant properties from the given article and sets the properties for kill operation.

        :param article: article from the archived repo
        :type article: dict
        :param articles_to_kill: list of articles which were about to kill from dusty archive
        :type articles_to_kill: list
        :param updates: updates to be applied on the article before saving
        :type updates: dict
        """

        article.pop('archived_id', None)
        article.pop('_type', None)
        article.pop('_links', None)
        article.pop('queue_state', None)
        article.pop(config.ETAG, None)

        for field in ['headline', 'abstract', 'body_html']:
            article[field] = updates.get(field, article.get(field, ''))

        article[ITEM_STATE] = CONTENT_STATE.KILLED
        article[ITEM_OPERATION] = ITEM_KILL
        article['pubstatus'] = PUB_STATUS.CANCELED
        article[config.LAST_UPDATED] = utcnow()

        user = get_user()
        article['version_creator'] = str(user[config.ID_FIELD])

        resolve_document_version(article, ARCHIVE, 'PATCH', article)

        if article[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            package_service = PackageService()
            item_refs = package_service.get_item_refs(article)
            for ref in item_refs:
                item_in_package = [item for item in articles_to_kill
                                   if item.get('item_id', item.get(config.ID_FIELD)) == ref[RESIDREF]]
                ref['location'] = ARCHIVE
                ref[config.VERSION] = item_in_package[0][config.VERSION]
Beispiel #49
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected, Killed and TakeDown.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
            else:
                self._publish_associated_items(original, updates, publish=True)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                self._publish_associated_items(updated, updates)  # updates got lost with update

                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish', item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e))
            )
        except Exception as e:
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(id)), exception=e)
Beispiel #50
0
    def _remove_and_set_kill_properties(self, article, articles_to_kill):
        """
        Removes the irrelevant properties from the given article and sets the properties for kill operation.

        :param article: article from the archived repo
        :type article: dict
        :param articles_to_kill: list of articles which were about to kill from dusty archive
        :type articles_to_kill: list
        """

        article[config.ID_FIELD] = article.pop('item_id', article['item_id'])

        article.pop('allow_post_publish_actions', None)
        article.pop('can_be_removed', None)
        article.pop('archived_id', None)
        article.pop('_type', None)
        article.pop('_links', None)
        article.pop(config.ETAG, None)

        article[ITEM_STATE] = CONTENT_STATE.KILLED
        article[ITEM_OPERATION] = ITEM_KILL
        article['pubstatus'] = PUB_STATUS.CANCELED
        article[config.LAST_UPDATED] = utcnow()

        user = get_user()
        article['version_creator'] = str(user[config.ID_FIELD])

        resolve_document_version(article, ARCHIVE, 'PATCH', article)

        if article[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            for group in article.get(GROUPS, []):
                for ref in group.get(REFS, []):
                    if RESIDREF in ref:
                        item_in_package = [item for item in articles_to_kill if item.get('item_id') == ref[RESIDREF]]
                        ref['location'] = ARCHIVE
                        ref[config.VERSION] = item_in_package[0][config.VERSION]
Beispiel #51
0
    def publish(self, doc, updates, target_media_type=None):
        """
        1. Sets the Metadata Properties - source and pubstatus
        2. Formats and queues the article to subscribers based on the state of the article:
            a. If the state of the article is killed then:
                i.  An email should be sent to all Subscribers irrespective of their status.
                ii. The article should be formatted as per the type of the format and then queue article to the
                    Subscribers who received the article previously.
            b. If the state of the article is corrected then:
                i.      The article should be formatted as per the type of the format and then queue article to the
                        Subscribers who received the article previously.
                ii.     Fetch Active Subscribers and exclude those who received the article previously.
                iii.    If article has 'targeted_for' property then exclude subscribers of type Internet from
                        Subscribers list.
                iv.     For each subscriber in the list, check if the article matches against publish filters and
                        global filters if configured for the subscriber. If matches then the article should be formatted
                        as per the type of the format and then queue article to the subscribers.
            c. If the state of the article is published then:
                i.     Fetch Active Subscribers.
                ii.    If article has 'targeted_for' property then exclude subscribers of type Internet from
                       Subscribers list.
                iii.    For each subscriber in the list, check if the article matches against publish filters and global
                        filters if configured for the subscriber. If matches then the article should be formatted
                        as per the type of the format and then queue article.
        3. Sends notification if no formatter has found for any of the formats configured in Subscriber.
        """

        queued = True
        no_formatters = []
        updated = doc.copy()

        # Step 1
        if updates:
            desk = None

            if doc.get('task', {}).get('desk'):
                desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk'])

            if not doc.get('ingest_provider'):
                updates['source'] = desk['source'] if desk and desk.get('source', '') \
                    else DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

            updates['pubstatus'] = PUB_STATUS.CANCELED if self.publish_type == 'killed' else PUB_STATUS.USABLE
            updated.update(updates)

        # Step 2(a)
        if self.published_state == 'killed':
            req = ParsedRequest()
            req.sort = '[("completed_at", 1)]'
            queued_items = get_resource_service('publish_queue').get(
                req=req, lookup={'item_id': updated[config.ID_FIELD]})

            if queued_items.count():
                queued_items = list(queued_items)

                # Step 2(a)(i)
                subscribers = list(get_resource_service('subscribers').get(req=None, lookup=None))
                recipients = [s.get('email') for s in subscribers if s.get('email')]
                send_article_killed_email(doc, recipients, queued_items[0].get('completed_at'))

                # Step 2(a)(ii)
                no_formatters, queued = self.queue_transmission(updated, subscribers, None)
        elif self.published_state == 'corrected':  # Step 2(b)
            subscribers, subscribers_yet_to_receive = self.get_subscribers(updated)
            if subscribers:
                no_formatters, queued = self.queue_transmission(updated, subscribers)

                if subscribers_yet_to_receive:
                    # Step 2(b)(iv)
                    formatters_not_found, queued_new_subscribers = \
                        self.queue_transmission(updated, subscribers_yet_to_receive, target_media_type)
                    no_formatters.extend(formatters_not_found)
        elif self.published_state == 'published':  # Step 2(c)
            subscribers, subscribers_yet_to_receive = self.get_subscribers(updated)

            # Step 2(c)(iii)
            no_formatters, queued = self.queue_transmission(updated, subscribers, target_media_type)

        # Step 3
        user = get_user()
        if len(no_formatters) > 0:
            push_notification('item:publish:wrong:format',
                              item=str(doc[config.ID_FIELD]), unique_name=doc['unique_name'],
                              desk=str(doc.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')),
                              formats=no_formatters)

        if not target_media_type and not queued:
            raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

        return queued
Beispiel #52
0
 def on_updated(self, updates, original):
     self.update_published_collection(published_item_id=original['_id'])
     original = get_resource_service('archive').find_one(req=None, _id=original['_id'])
     updates.update(original)
     user = get_user()
     push_notification('item:updated', item=str(original['_id']), user=str(user.get('_id')))
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)

        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in PUBLISH_STATES:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates['publish_schedule'] = None
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # check if item is in a digital package
                package_id = TakesPackageService().get_take_package_id(original)
                if package_id:
                    # process the takes to form digital master file content
                    package, package_updates = self.process_takes(take=original, package_id=package_id)
                    package_updates[config.CONTENT_STATE] = self.published_state
                    resolve_document_version(document=package_updates,
                                             resource=ARCHIVE, method='PATCH',
                                             latest_doc=package)
                    self.backend.update(self.datasource, package['_id'], package_updates, package)
                    package.update(package_updates)
                    insert_into_versions(doc=package)

                    # send it to the digital channels
                    any_channel_closed_digital, queued_digital = \
                        self.publish(doc=package, updates=None, target_output_channels=DIGITAL)

                    self.update_published_collection(published_item=package)
                else:
                    any_channel_closed_digital = False
                    queued_digital = False

                # queue only text items
                any_channel_closed_wire, queued_wire = \
                    self.publish(doc=original, updates=updates, target_output_channels=WIRE if package_id else None)

                any_channel_closed = any_channel_closed_digital or any_channel_closed_wire
                queued = queued_digital or queued_wire

                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item.get('task', {}).get('desk', '')),
                              user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
 def on_updated(self, updates, original):
     self.update_published_collection(published_item=original)
     user = get_user()
     push_notification('item:updated', item=str(original['_id']), user=str(user.get('_id')))
Beispiel #55
0
 def on_deleted(self, doc):
     user = get_user()
     push_notification('item:deleted:archive:text', item=str(doc['_id']), user=str(user.get('_id')))
Beispiel #56
0
    def publish(self, doc, updates, target_media_type=None):
        """
        Queue the content for publishing.
        1. Sets the Metadata Properties - source and pubstatus
        2. Get the subscribers.
        3. Update the headline of wire stories with the sequence
        4. Queue the content for subscribers
        5. Queue the content for previously published subscribers if any.
        6. Sends notification if no formatter has found for any of the formats configured in Subscriber.
        7. If not queued and not formatters then raise exception.
        :param dict doc: document to publish
        :param dict updates: updates for the document
        :param str target_media_type: dictate if the doc being queued is a Takes Package or an Individual Article.
                Valid values are - Wire, Digital. If Digital then the doc being queued is a Takes Package and if Wire
                then the doc being queues is an Individual Article.
        :param dict target_subscribers: list of subscribers that document needs to get sent
        :return bool: if content is queued then True else False
        :raises PublishQueueError.item_not_queued_error:
                If the nothing is queued.
        """

        queued = True
        no_formatters = []
        updated = doc.copy()

        # Step 1
        if updates:
            self._process_publish_updates(doc, updates)
            updated.update(updates)

        # Step 2
        subscribers, subscribers_yet_to_receive = self.get_subscribers(doc, target_media_type)

        # Step 3
        if target_media_type == SUBSCRIBER_TYPES.WIRE:
            self._update_headline_sequence(updated)

        # Step 4
        no_formatters, queued = self.queue_transmission(updated, subscribers)

        # Step 5
        if subscribers_yet_to_receive:
            formatters_not_found, queued_new_subscribers = self.queue_transmission(updated, subscribers_yet_to_receive)
            no_formatters.extend(formatters_not_found)
            queued = queued or queued_new_subscribers

        # Step 6
        user = get_user()
        if len(no_formatters) > 0:
            push_notification('item:publish:wrong:format',
                              item=str(doc[config.ID_FIELD]), unique_name=doc['unique_name'],
                              desk=str(doc.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')),
                              formats=no_formatters)

        # Step 7
        if not target_media_type and not queued:
            logger.exception('Nothing is saved to publish queue for story: {} for action: {}'.
                             format(doc[config.ID_FIELD], self.publish_type))

        return queued
 def _validate_user(self, doc_user_id, doc_is_global):
     session_user = get_user(required=True)
     if str(session_user['_id']) != doc_user_id and \
             not (current_user_has_privilege('global_saved_searches') and doc_is_global):
         raise SuperdeskApiError.forbiddenError('Unauthorized to modify global search.')
Beispiel #58
0
 def get_user_id(self, item):
     user = get_user()
     if user:
         return user.get(config.ID_FIELD)