original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_UNSPIKE,
                                                  original_state):
            raise InvalidStateTransitionError()

        user = get_user(required=True)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)

        self.set_unspike_updates(item, updates)
        self.backend.update(self.datasource, id, updates, original)

        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        push_notification('item:unspike',
                          item=str(id),
                          user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, original, ITEM_UNSPIKE)

        return item


superdesk.workflow_state('spiked')

superdesk.workflow_action(
    name='spike',
    exclude_states=['spiked', 'published', 'scheduled', 'corrected', 'killed'],
    privileges=['spike'])

superdesk.workflow_action(name='unspike',
                          include_states=['spiked'],
                          privileges=['unspike'])
예제 #2
0
from .common import CUSTOM_HATEOAS
from superdesk.metadata.utils import aggregations
from .archive import ArchiveResource
import superdesk


class UserContentResource(Resource):
    endpoint_name = "user_content"
    item_url = ArchiveResource.item_url
    url = 'users/<regex("[a-f0-9]{24}"):original_creator>/content'
    schema = ArchiveResource.schema
    datasource = {
        "source": "archive",
        "aggregations": aggregations,
        "elastic_filter": {"and": [{"not": {"exists": {"field": "task.desk"}}}, {"not": {"term": {"version": 0}}}]},
    }
    resource_methods = ["GET", "POST"]
    item_methods = ["GET", "PATCH", "DELETE"]
    resource_title = endpoint_name


class UserContentService(BaseService):
    def get(self, req, lookup):
        docs = super().get(req, lookup)
        for doc in docs:
            build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return docs


superdesk.workflow_state("draft")
예제 #3
0
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_UNSPIKE,
                                                  original_state):
            raise InvalidStateTransitionError()

        user = get_user(required=True)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)

        self.set_unspike_updates(item, updates)
        self.backend.update(self.datasource, id, updates, original)

        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        push_notification("item:unspike",
                          item=str(id),
                          user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, original, ITEM_UNSPIKE)

        return item


superdesk.workflow_state("spiked")

superdesk.workflow_action(
    name="spike",
    exclude_states=["spiked", "published", "scheduled", "corrected", "killed"],
    privileges=["spike"])

superdesk.workflow_action(name="unspike",
                          include_states=["spiked"],
                          privileges=["unspike"])
예제 #4
0
from superdesk.metadata.item import GUID_NEWSML, GUID_FIELD, FAMILY_ID, ITEM_TYPE, CONTENT_TYPE
from superdesk.metadata.utils import generate_guid
from superdesk.celery_task_utils import mark_task_as_not_running, is_task_running


UPDATE_SCHEDULE_DEFAULT = {'minutes': 5}
LAST_UPDATED = 'last_updated'
LAST_ITEM_UPDATE = 'last_item_update'
STATE_INGESTED = 'ingested'
IDLE_TIME_DEFAULT = {'hours': 0, 'minutes': 0}


logger = logging.getLogger(__name__)


superdesk.workflow_state(STATE_INGESTED)

superdesk.workflow_action(
    name='ingest'
)


def is_valid_type(provider, provider_type_filter=None):
    """Test if given provider has valid type and should be updated.

    :param provider: provider to be updated
    :param provider_type_filter: active provider type filter
    """
    provider_type = provider.get('type')
    if provider_type not in providers:
        return False
예제 #5
0
    resource_methods = ['POST']
    item_methods = ['GET', 'PUT', 'PATCH']
    resource_title = endpoint_name
    privileges = {'POST': 'archive', 'PATCH': 'archive', 'PUT': 'archive'}


class ArchiveSaveService(BaseService):
    def create(self, docs, **kwargs):
        if not docs:
            raise SuperdeskApiError.notFoundError('Content is missing')
        req = parse_request(self.datasource)
        try:
            get_component(ItemAutosave).autosave(docs[0]['_id'], docs[0], get_user(required=True), req.if_match)
        except InvalidEtag:
            raise SuperdeskApiError.preconditionFailedError('Client and server etags don\'t match')
        return [docs[0]['_id']]

superdesk.workflow_state('in_progress')
superdesk.workflow_action(
    name='save',
    include_states=['draft', 'fetched', 'routed', 'submitted'],
    privileges=['archive']
)

superdesk.workflow_state('submitted')
superdesk.workflow_action(
    name='move',
    exclude_states=['ingested', 'spiked', 'on-hold', 'published', 'killed'],
    privileges=['archive']
)
예제 #6
0
def get_crop(rendition):
    fields = ('CropLeft', 'CropTop', 'CropRight', 'CropBottom')
    return {field: rendition[field] for field in fields if field in rendition}


@celery.task
def publish_images(images, original, item):
    orig_file = get_file(original, item)
    for image in images:
        content_type = original['mimetype']
        ok, output = crop_image(orig_file, image['file_name'], image['crop'], image['spec'])
        if ok:
            app.media.put(output, image['file_name'], content_type, _id=image['media'])


superdesk.workflow_state('published')
superdesk.workflow_action(
    name='publish',
    include_states=['fetched', 'routed', 'submitted', 'in_progress', 'scheduled'],
    privileges=['publish']
)

superdesk.workflow_state('scheduled')
superdesk.workflow_action(
    name='schedule',
    include_states=['fetched', 'routed', 'submitted', 'in_progress'],
    privileges=['schedule']
)

superdesk.workflow_action(
    name='deschedule',
예제 #7
0
class UserContentResource(Resource):
    endpoint_name = 'user_content'
    item_url = ArchiveResource.item_url
    url = 'users/<regex("[a-f0-9]{24}"):original_creator>/content'
    schema = ArchiveResource.schema
    datasource = {
        'source': 'archive',
        'aggregations': aggregations,
        'elastic_filter': {
            'and': [
                {'not': {'exists': {'field': 'task.desk'}}},
                {'not': {'term': {'version': 0}}},
            ]
        }
    }
    resource_methods = ['GET', 'POST']
    item_methods = ['GET', 'PATCH', 'DELETE']
    resource_title = endpoint_name


class UserContentService(BaseService):

    def get(self, req, lookup):
        docs = super().get(req, lookup)
        for doc in docs:
            build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return docs


superdesk.workflow_state('draft')
        if doc.get('last_item_update'):
            raise SuperdeskApiError.forbiddenError("Deleting an Ingest Source after receiving items is prohibited.")

    def on_deleted(self, doc):
        """
        Overriding to send notification and record activity about channel deletion.
        """
        notify_and_add_activity(ACTIVITY_DELETE, 'Deleted Ingest Channel {{name}}',
                                self.datasource, item=None,
                                user_list=self.user_service.get_users_by_user_type('administrator'),
                                name=doc.get('name'), provider_id=doc.get(config.ID_FIELD))
        push_notification('ingest_provider:delete', provider_id=str(doc.get(config.ID_FIELD)))
        get_resource_service('sequences').delete(lookup={
            'key': 'ingest_providers_{_id}'.format(_id=doc[config.ID_FIELD])
        })
        logger.info("Deleted Ingest Channel. Data:{}".format(doc))

    def _test_config(self, updates, original=None):
        provider = original.copy() if original else {}
        provider.update(updates)

        try:
            service = registered_feeding_services[provider['feeding_service']].__class__()
        except KeyError:
            return
        service.config_test(provider)


superdesk.workflow_state(CONTENT_STATE.INGESTED)
superdesk.workflow_action(name='ingest')
예제 #9
0
                    "not": {
                        "term": {
                            "version": 0
                        }
                    }
                },
            ]
        },
    }
    resource_methods = ["GET", "POST"]
    item_methods = ["GET", "PATCH", "DELETE"]
    resource_title = endpoint_name


class UserContentService(BaseService):
    def on_fetched(self, docs):
        """
        Overriding this to handle existing data in Mongo & Elastic
        """
        self.enhance_items(docs["_items"])

    def on_fetched_item(self, doc):
        self.enhance_items([doc])

    def enhance_items(self, items):
        for item in items:
            build_custom_hateoas(CUSTOM_HATEOAS, item)


superdesk.workflow_state("draft")
예제 #10
0
class ArchiveHoldService(BaseService):

    def create(self, docs, **kwargs):
        user = get_user(required=True)
        auth = get_auth()
        item_id = request.view_args['item_id']
        item = get_component(ItemHold).hold({'_id': item_id}, user['_id'], auth['_id'])
        build_custom_hateoas(custom_hateoas, item)
        return [item['_id']]

    def delete(self, lookup):
        user = get_user(required=True)
        item_id = request.view_args['item_id']
        get_component(ItemHold).restore({'_id': item_id}, user['_id'])


superdesk.workflow_state('on_hold')

superdesk.workflow_action(
    name='hold',
    exclude_states=['ingested', 'draft', 'spiked', 'published', 'killed'],
    privileges=['hold']
)

superdesk.workflow_action(
    name='restore',
    include_states=['on_hold'],
    privileges=['restore']
)
예제 #11
0
 def test_status_registry(self):
     superdesk.workflow_state(name="test")
     self.assertIn({"name": "test"}, superdesk.get_workflow_states())
     self.assertIn("test", superdesk.allowed_workflow_states)
예제 #12
0
    schema = item_schema({"_id": {"type": "string"}})
    resource_methods = ["POST"]
    item_methods = ["GET", "PUT", "PATCH"]
    resource_title = endpoint_name
    privileges = {"POST": "archive", "PATCH": "archive", "PUT": "archive"}


class ArchiveSaveService(BaseService):
    def create(self, docs, **kwargs):
        if not docs:
            raise SuperdeskApiError.notFoundError("Content is missing")
        req = parse_request(self.datasource)
        try:
            get_component(ItemAutosave).autosave(docs[0]["_id"], docs[0], get_user(required=True), req.if_match)
        except InvalidEtag:
            raise SuperdeskApiError.preconditionFailedError("Client and server etags don't match")
        return [docs[0]["_id"]]


superdesk.workflow_state("in_progress")
superdesk.workflow_action(
    name="save", include_states=["draft", "fetched", "routed", "submitted", "scheduled"], privileges=["archive"]
)

superdesk.workflow_state("submitted")
superdesk.workflow_action(
    name="move",
    exclude_states=["ingested", "spiked", "on-hold", "published", "scheduled", "killed"],
    privileges=["archive"],
)
예제 #13
0
    resource_title = endpoint_name
    privileges = {'POST': 'hold', 'DELETE': 'restore'}


class ArchiveHoldService(BaseService):
    def create(self, docs, **kwargs):
        user = get_user(required=True)
        auth = get_auth()
        item_id = request.view_args['item_id']
        item = get_component(ItemHold).hold({'_id': item_id}, user['_id'],
                                            auth['_id'])
        build_custom_hateoas(custom_hateoas, item)
        return [item['_id']]

    def delete(self, lookup):
        user = get_user(required=True)
        item_id = request.view_args['item_id']
        get_component(ItemHold).restore({'_id': item_id}, user['_id'])


superdesk.workflow_state('on_hold')

superdesk.workflow_action(
    name='hold',
    exclude_states=['ingested', 'draft', 'spiked', 'published', 'killed'],
    privileges=['hold'])

superdesk.workflow_action(name='restore',
                          include_states=['on_hold'],
                          privileges=['restore'])
예제 #14
0
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    dest_doc[LINKED_IN_PACKAGES] = links
                superdesk.get_resource_service(ARCHIVE).post([dest_doc])
                insert_into_versions(dest_doc.get("guid"))
                desk = doc.get("desk")
                refs = [
                    {"guid": ref.get("residRef"), "desk": desk, PACKAGE: dest_doc.get("_id")}
                    for group in dest_doc.get("groups", [])
                    for ref in group.get("refs", [])
                    if "residRef" in ref
                ]
                if refs:
                    self.create(refs)
            else:
                if doc.get(PACKAGE):
                    links = archived_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get("_id"), {LINKED_IN_PACKAGES: links})

        return [doc.get("guid") for doc in docs]


superdesk.workflow_state(STATE_FETCHED)

superdesk.workflow_action(
    name="fetch_as_from_ingest", include_states=["ingested"], privileges=["archive", "ingest_move"]
)

superdesk.workflow_state("routed")
superdesk.workflow_action(name="route", include_states=["ingested"])
예제 #15
0
        for ref in [ref for group in dest_doc.get('groups', [])
                    for ref in group.get('refs', []) if 'residRef' in ref]:
            ref['location'] = ARCHIVE

        refs = [{'_id': ref.get('residRef'), 'desk': desk,
                 'stage': stage, 'state': state, 'destination_groups': destination_groups}
                for group in dest_doc.get('groups', [])
                for ref in group.get('refs', []) if 'residRef' in ref]

        if refs:
            new_ref_guids = self.fetch(refs, id=None, notify=False)
            count = 0
            for ref in [ref for group in dest_doc.get('groups', [])
                        for ref in group.get('refs', []) if 'residRef' in ref]:
                ref['residRef'] = ref['guid'] = new_ref_guids[count]
                count += 1


superdesk.workflow_state(STATE_FETCHED)
superdesk.workflow_action(
    name='fetch_from_ingest',
    include_states=['ingested'],
    privileges=['ingest', 'archive', 'fetch']
)

superdesk.workflow_state('routed')
superdesk.workflow_action(
    name='route',
    include_states=['ingested']
)
예제 #16
0
    schema = ArchiveResource.schema
    datasource = {'source': 'archive', 'aggregations': aggregations}
    resource_methods = ['GET', 'POST']
    item_methods = ['GET', 'PATCH', 'DELETE']
    resource_title = endpoint_name


class UserContentService(BaseService):
    custom_hateoas = {'self': {'title': 'Archive', 'href': '/archive/{_id}'}}

    def get(self, req, lookup):
        docs = super().get(req, lookup)
        for doc in docs:
            build_custom_hateoas(self.custom_hateoas, doc)
        return docs


superdesk.workflow_state('draft')

superdesk.workflow_action(
    name='fetch_from_content',
    include_states=['fetched', 'routed', 'submitted', 'in-progress'],
    privileges=['archive']
)

superdesk.workflow_action(
    name='fetch_as_from_content',
    include_states=['fetched', 'routed', 'submitted', 'in-progress'],
    privileges=['archive']
)
예제 #17
0
                for ref in group.get(REFS, []) if ref.get(RESIDREF)
        ]:
            ref['location'] = ARCHIVE

        refs = [{
            config.ID_FIELD: ref.get(RESIDREF),
            'desk': desk,
            'stage': stage,
            ITEM_STATE: state
        } for group in dest_doc.get(GROUPS, []) for ref in group.get(REFS, [])
                if ref.get(RESIDREF)]

        if refs:
            new_ref_guids = self.fetch(refs, id=None, notify=False)
            count = 0
            for ref in [
                    ref for group in dest_doc.get(GROUPS, [])
                    for ref in group.get(REFS, []) if ref.get(RESIDREF)
            ]:
                ref[RESIDREF] = ref[GUID_FIELD] = new_ref_guids[count]
                count += 1


superdesk.workflow_state(CONTENT_STATE.FETCHED)
superdesk.workflow_action(name='fetch_from_ingest',
                          include_states=['ingested'],
                          privileges=['ingest', 'archive', 'fetch'])

superdesk.workflow_state('routed')
superdesk.workflow_action(name='route', include_states=['ingested'])
예제 #18
0
    def create(self, docs, **kwargs):
        if not docs:
            raise SuperdeskApiError.notFoundError('Content is missing')
        req = parse_request(self.datasource)
        try:
            get_component(ItemAutosave).autosave(docs[0]['_id'], docs[0],
                                                 get_user(required=True),
                                                 req.if_match)
        except InvalidEtag:
            raise SuperdeskApiError.preconditionFailedError(
                'Client and server etags don\'t match')
        except KeyError:
            raise SuperdeskApiError.badRequestError(
                "Request for Auto-save must have _id")
        return [docs[0]['_id']]


superdesk.workflow_state('in_progress')
superdesk.workflow_action(
    name='save',
    include_states=['draft', 'fetched', 'routed', 'submitted', 'scheduled'],
    privileges=['archive'])

superdesk.workflow_state('submitted')
superdesk.workflow_action(name='move',
                          exclude_states=[
                              'ingested', 'spiked', 'on-hold', 'published',
                              'scheduled', 'killed'
                          ],
                          privileges=['archive'])
예제 #19
0
 def test_status_registry(self):
     superdesk.workflow_state(name='test')
     self.assertIn({'name': 'test'}, superdesk.get_workflow_states())
     self.assertIn('test', superdesk.allowed_workflow_states)
예제 #20
0
                superdesk.get_resource_service(ARCHIVE).post([dest_doc])
                insert_into_versions(dest_doc.get('guid'))
                desk = doc.get('desk')
                refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')}
                        for group in dest_doc.get('groups', [])
                        for ref in group.get('refs', []) if 'residRef' in ref]
                if refs:
                    self.create(refs)
            else:
                if doc.get(PACKAGE):
                    links = archived_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get('_id'), {LINKED_IN_PACKAGES: links})

        return [doc.get('guid') for doc in docs]


superdesk.workflow_state(STATE_FETCHED)

superdesk.workflow_action(
    name='fetch_as_from_ingest',
    include_states=['ingested'],
    privileges=['archive', 'ingest_move']
)

superdesk.workflow_state('routed')
superdesk.workflow_action(
    name='route',
    include_states=['ingested']
)
예제 #21
0
        return id_of_fetched_items

    def __fetch_items_in_package(self, dest_doc, desk, stage, state):
        # Note: macro and target information is not user for package publishing.
        # Needs to looked later when package ingest requirements is clear.
        for ref in [ref for group in dest_doc.get(GROUPS, []) for ref in group.get(REFS, []) if ref.get(RESIDREF)]:
            ref["location"] = ARCHIVE

        refs = [
            {config.ID_FIELD: ref.get(RESIDREF), "desk": desk, "stage": stage, ITEM_STATE: state}
            for group in dest_doc.get(GROUPS, [])
            for ref in group.get(REFS, [])
            if ref.get(RESIDREF)
        ]

        if refs:
            new_ref_guids = self.fetch(refs, id=None, notify=False)
            count = 0
            for ref in [ref for group in dest_doc.get(GROUPS, []) for ref in group.get(REFS, []) if ref.get(RESIDREF)]:
                ref[RESIDREF] = ref[GUID_FIELD] = new_ref_guids[count]
                count += 1


superdesk.workflow_state(CONTENT_STATE.FETCHED)
superdesk.workflow_action(
    name="fetch_from_ingest", include_states=["ingested"], privileges=["ingest", "archive", "fetch"]
)

superdesk.workflow_state("routed")
superdesk.workflow_action(name="route", include_states=["ingested"])
예제 #22
0
        updates["expiry"] = get_expiry(desk_id=desk_id)
        return updates

    def on_update(self, updates, original):
        updates[ITEM_OPERATION] = ITEM_UNSPIKE

    def update(self, id, updates, original):
        original_state = original[config.CONTENT_STATE]
        if not is_workflow_state_transition_valid("unspike", original_state):
            raise InvalidStateTransitionError()
        user = get_user(required=True)

        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        updates.update(self.get_unspike_updates(item))

        self.backend.update(self.datasource, id, updates, original)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)

        push_notification("item:unspike", item=str(id), user=str(user))
        return item


superdesk.workflow_state("spiked")

superdesk.workflow_action(
    name="spike", exclude_states=["spiked", "published", "scheduled", "killed"], privileges=["spike"]
)

superdesk.workflow_action(name="unspike", include_states=["spiked"], privileges=["unspike"])
예제 #23
0
 def test_status_registry(self):
     superdesk.workflow_state(name='test')
     self.assertIn({'name': 'test'}, superdesk.get_workflow_states())
     self.assertIn('test', superdesk.allowed_workflow_states)
예제 #24
0
        if doc.get('last_item_update'):
            raise SuperdeskApiError.forbiddenError(
                "Deleting an Ingest Source after receiving items is prohibited."
            )

    def on_deleted(self, doc):
        """
        Overriding to send notification and record activity about channel deletion.
        """
        notify_and_add_activity(
            ACTIVITY_DELETE,
            'Deleted Ingest Channel {{name}}',
            self.datasource,
            item=None,
            user_list=self.user_service.get_users_by_user_type(
                'administrator'),
            name=doc.get('name'),
            provider_id=doc.get(config.ID_FIELD))
        push_notification('ingest_provider:delete',
                          provider_id=str(doc.get(config.ID_FIELD)))
        get_resource_service('sequences').delete(
            lookup={
                'key': 'ingest_providers_{_id}'.format(
                    _id=doc[config.ID_FIELD])
            })
        logger.info("Deleted Ingest Channel. Data:{}".format(doc))


superdesk.workflow_state(CONTENT_STATE.INGESTED)
superdesk.workflow_action(name='ingest')
예제 #25
0
    def update(self, id, updates, original):
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid('unspike', original_state):
            raise InvalidStateTransitionError()
        user = get_user(required=True)

        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        updates.update(self.get_unspike_updates(item))

        self.backend.update(self.datasource, id, updates, original)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)

        push_notification('item:unspike', item=str(id), user=str(user.get(config.ID_FIELD)))
        return item


superdesk.workflow_state('spiked')

superdesk.workflow_action(
    name='spike',
    exclude_states=['spiked', 'published', 'scheduled', 'corrected', 'killed'],
    privileges=['spike']
)

superdesk.workflow_action(
    name='unspike',
    include_states=['spiked'],
    privileges=['unspike']
)
예제 #26
0

def get_crop(rendition):
    fields = ('CropLeft', 'CropTop', 'CropRight', 'CropBottom')
    return {field: rendition[field] for field in fields if field in rendition}


def update_item_data(item, data, keys=DEFAULT_SCHEMA.keys()):
    """Update main item data, so only keys from default schema.
    """
    for key in keys:
        if data.get(key):
            item[key] = data[key]


superdesk.workflow_state('published')
superdesk.workflow_action(name='publish',
                          include_states=[
                              'fetched', 'routed', 'submitted', 'in_progress',
                              'scheduled'
                          ],
                          privileges=['publish'])

superdesk.workflow_state('scheduled')
superdesk.workflow_action(
    name='schedule',
    include_states=['fetched', 'routed', 'submitted', 'in_progress'],
    privileges=['schedule'])

superdesk.workflow_action(name='deschedule',
                          include_states=['scheduled'],
예제 #27
0
from superdesk.media.media_operations import download_file_from_url, process_file
from superdesk.media.renditions import generate_renditions
from superdesk.io.iptc import subject_codes
from superdesk.metadata.item import GUID_NEWSML, GUID_FIELD, FAMILY_ID, ITEM_TYPE, CONTENT_TYPE
from superdesk.metadata.utils import generate_guid
from superdesk.lock import lock, unlock

UPDATE_SCHEDULE_DEFAULT = {'minutes': 5}
LAST_UPDATED = 'last_updated'
LAST_ITEM_UPDATE = 'last_item_update'
STATE_INGESTED = 'ingested'
IDLE_TIME_DEFAULT = {'hours': 0, 'minutes': 0}

logger = logging.getLogger(__name__)

superdesk.workflow_state(STATE_INGESTED)

superdesk.workflow_action(name='ingest')


def is_valid_type(provider, provider_type_filter=None):
    """Test if given provider has valid type and should be updated.

    :param provider: provider to be updated
    :param provider_type_filter: active provider type filter
    """
    provider_type = provider.get('type')
    if provider_type not in providers:
        return False
    if provider_type_filter and provider_type != provider_type_filter:
        return False
예제 #28
0
    :param dict data: update date
    :param list keys: keys of item to update
    :param bool keep_existing: if True, will only set non existing values
    """
    if keys is None:
        keys = DEFAULT_SCHEMA.keys()

    for key in keys:
        if data.get(key):
            if keep_existing:
                item.setdefault(key, data[key])
            else:
                item[key] = data[key]


superdesk.workflow_state("published")
superdesk.workflow_action(
    name="publish",
    include_states=["fetched", "routed", "submitted", "in_progress", "scheduled", "unpublished", "correction"],
    privileges=["publish"],
)

superdesk.workflow_state("scheduled")
superdesk.workflow_action(
    name="schedule", include_states=["fetched", "routed", "submitted", "in_progress"], privileges=["schedule"]
)

superdesk.workflow_action(name="deschedule", include_states=["scheduled"], privileges=["deschedule"])

superdesk.workflow_state("killed")
superdesk.workflow_action(
예제 #29
0
                    if doc.get(EMBARGO):
                        validation_errors.append("Package cannot have Items with Embargo")

                    # don't validate items that already have published
                    if doc[ITEM_STATE] not in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED]:
                        validate_item = {"act": self.publish_type, "type": doc[ITEM_TYPE], "validate": doc}
                        errors = get_resource_service("validate").post([validate_item], headline=True)
                        if errors[0]:
                            validation_errors.extend(errors[0])

                    # check the locks on the items
                    if doc.get("lock_session", None) and package["lock_session"] != doc["lock_session"]:
                        validation_errors.extend(["{}: packaged item cannot be locked".format(doc["headline"])])


superdesk.workflow_state("published")
superdesk.workflow_action(
    name="publish",
    include_states=["fetched", "routed", "submitted", "in_progress", "scheduled"],
    privileges=["publish"],
)

superdesk.workflow_state("scheduled")
superdesk.workflow_action(
    name="schedule", include_states=["fetched", "routed", "submitted", "in_progress"], privileges=["schedule"]
)

superdesk.workflow_action(name="deschedule", include_states=["scheduled"], privileges=["deschedule"])

superdesk.workflow_state("killed")
superdesk.workflow_action(name="kill", include_states=["published", "scheduled", "corrected"], privileges=["kill"])