예제 #1
0
from superdesk.vocabularies import is_related_content
from superdesk.default_settings import strtobool
from apps.item_lock.components.item_lock import set_unlock_updates

from flask_babel import _
from flask import request, json


logger = logging.getLogger(__name__)

ITEM_PUBLISH = "publish"
ITEM_CORRECT = "correct"
ITEM_KILL = "kill"
ITEM_TAKEDOWN = "takedown"
ITEM_UNPUBLISH = "unpublish"
item_operations.extend([ITEM_PUBLISH, ITEM_CORRECT, ITEM_KILL, ITEM_TAKEDOWN, ITEM_UNPUBLISH])
publish_services = {
    ITEM_PUBLISH: "archive_publish",
    ITEM_CORRECT: "archive_correct",
    ITEM_KILL: "archive_kill",
    ITEM_TAKEDOWN: "archive_takedown",
    ITEM_UNPUBLISH: "archive_unpublish",
}

PRESERVED_FIELDS = [
    "headline",
    "byline",
    "usageterms",
    "alt_text",
    "description_text",
    "copyrightholder",
예제 #2
0
from eve.utils import config
from superdesk.workflow import is_workflow_state_transition_valid
from apps.archive.archive import ArchiveResource, SOURCE as ARCHIVE
from apps.tasks import get_expiry
from apps.packages import PackageService, TakesPackageService
from apps.archive.archive_rewrite import ArchiveRewriteService
from apps.archive.common import item_operations, ITEM_OPERATION


logger = logging.getLogger(__name__)

EXPIRY = "expiry"
REVERT_STATE = "revert_state"
ITEM_SPIKE = "spike"
ITEM_UNSPIKE = "unspike"
item_operations.extend([ITEM_SPIKE, ITEM_UNSPIKE])


class ArchiveSpikeResource(ArchiveResource):
    endpoint_name = "archive_spike"
    resource_title = endpoint_name
    datasource = {"source": ARCHIVE}

    url = "archive/spike"
    item_url = item_url

    resource_methods = []
    item_methods = ["PATCH"]

    privileges = {"PATCH": "spike"}
예제 #3
0
    convert_task_attributes_to_objectId, \
    get_expiry, get_utc_schedule, get_dateline_city, get_expiry_date
from apps.common.components.utils import get_component
from apps.item_autosave.components.item_autosave import ItemAutosave
from apps.legal_archive.commands import import_into_legal_archive
from apps.packages.package_service import PackageService
from apps.publish.published_item import LAST_PUBLISHED_VERSION, PUBLISHED,\
    PUBLISHED_IN_PACKAGE
from superdesk.media.crop import CropService

logger = logging.getLogger(__name__)

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'
item_operations.extend([ITEM_PUBLISH, ITEM_CORRECT, ITEM_KILL])


class BasePublishResource(ArchiveResource):
    """
    Base resource class for "publish" endpoint.
    """
    def __init__(self, endpoint_name, app, service, publish_type):
        self.endpoint_name = 'archive_%s' % publish_type
        self.resource_title = endpoint_name
        self.schema[PUBLISHED_IN_PACKAGE] = {'type': 'string'}

        self.datasource = {'source': ARCHIVE}

        self.url = 'archive/{}'.format(publish_type)
        self.item_url = item_url
예제 #4
0
from apps.packages import TakesPackageService
from apps.packages.package_service import PackageService
from apps.publish.published_item import LAST_PUBLISHED_VERSION, PUBLISHED,\
    PUBLISHED_IN_PACKAGE
from apps.picture_crop import get_file
from superdesk.media.crop import CropService
from superdesk.media.media_operations import crop_image
from superdesk.celery_app import celery


logger = logging.getLogger(__name__)

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'
item_operations.extend([ITEM_PUBLISH, ITEM_CORRECT, ITEM_KILL])


class BasePublishResource(ArchiveResource):
    """
    Base resource class for "publish" endpoint.
    """

    def __init__(self, endpoint_name, app, service, publish_type):
        self.endpoint_name = 'archive_%s' % publish_type
        self.resource_title = endpoint_name
        self.schema[PUBLISHED_IN_PACKAGE] = {'type': 'string'}

        self.datasource = {'source': ARCHIVE}

        self.url = 'archive/{}'.format(publish_type)
예제 #5
0
from superdesk.utc import get_expiry_date
from superdesk.metadata.utils import item_url
from .common import get_user, is_assigned_to_a_desk, get_expiry
from superdesk.workflow import is_workflow_state_transition_valid
from apps.archive.archive import ArchiveResource, SOURCE as ARCHIVE
from apps.packages import PackageService, TakesPackageService
from apps.archive.archive_rewrite import ArchiveRewriteService
from apps.archive.common import item_operations, ITEM_OPERATION, is_item_in_package, set_sign_off

logger = logging.getLogger(__name__)

EXPIRY = 'expiry'
REVERT_STATE = 'revert_state'
ITEM_SPIKE = 'spike'
ITEM_UNSPIKE = 'unspike'
item_operations.extend([ITEM_SPIKE, ITEM_UNSPIKE])


class ArchiveSpikeResource(ArchiveResource):
    endpoint_name = 'archive_spike'
    resource_title = endpoint_name
    datasource = {'source': ARCHIVE}

    url = "archive/spike"
    item_url = item_url

    resource_methods = []
    item_methods = ['PATCH']

    privileges = {'PATCH': 'spike'}
예제 #6
0
    item_operations,
)
from superdesk.metadata.utils import generate_guid
from superdesk.metadata.item import GUID_TAG, INGEST_ID, FAMILY_ID, ITEM_STATE, CONTENT_STATE, GUID_FIELD
from superdesk.errors import SuperdeskApiError, InvalidStateTransitionError
from superdesk.resource import Resource, build_custom_hateoas
from superdesk.services import BaseService
from superdesk.utc import utcnow
from superdesk.workflow import is_workflow_state_transition_valid
from superdesk import get_resource_service
from superdesk.metadata.packages import RESIDREF, REFS, GROUPS
from superdesk.metadata.item import MEDIA_TYPES

custom_hateoas = {"self": {"title": "Archive", "href": "/archive/{_id}"}}
ITEM_FETCH = "fetch"
item_operations.extend([ITEM_FETCH])


class FetchResource(Resource):
    endpoint_name = "fetch"
    resource_title = endpoint_name

    schema = {
        "desk": Resource.rel("desks", False, required=True),
        "stage": Resource.rel("stages", False, nullable=True),
        "macro": {"type": "string"},
    }

    url = "ingest/<{0}:id>/fetch".format(item_url)

    resource_methods = ["POST"]
예제 #7
0
    set_original_creator, insert_into_versions, ITEM_OPERATION, item_operations
from superdesk.metadata.utils import generate_guid
from superdesk.metadata.item import GUID_TAG, INGEST_ID, FAMILY_ID, ITEM_STATE, \
    CONTENT_STATE, GUID_FIELD
from superdesk.errors import SuperdeskApiError, InvalidStateTransitionError
from superdesk.notification import push_notification
from superdesk.resource import Resource, build_custom_hateoas
from superdesk.services import BaseService
from superdesk.utc import utcnow
from superdesk.workflow import is_workflow_state_transition_valid
from superdesk import get_resource_service
from superdesk.metadata.packages import RESIDREF, REFS, GROUPS

custom_hateoas = {'self': {'title': 'Archive', 'href': '/archive/{_id}'}}
ITEM_FETCH = 'fetch'
item_operations.extend([ITEM_FETCH])


class FetchResource(Resource):
    endpoint_name = 'fetch'
    resource_title = endpoint_name

    schema = {
        'desk': Resource.rel('desks', False, required=True),
        'stage': Resource.rel('stages', False, nullable=True),
        'macro': {'type': 'string'}
    }

    url = 'ingest/<{0}:id>/fetch'.format(item_url)

    resource_methods = ['POST']
예제 #8
0
from apps.common.components.utils import get_component
from apps.item_autosave.components.item_autosave import ItemAutosave
from apps.legal_archive.commands import import_into_legal_archive
from apps.packages.package_service import PackageService
from apps.publish.published_item import LAST_PUBLISHED_VERSION, PUBLISHED,\
    PUBLISHED_IN_PACKAGE
from superdesk.media.crop import CropService


logger = logging.getLogger(__name__)

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'
ITEM_TAKEDOWN = 'takedown'
item_operations.extend([ITEM_PUBLISH, ITEM_CORRECT, ITEM_KILL, ITEM_TAKEDOWN])
publish_services = {
    ITEM_PUBLISH: 'archive_publish',
    ITEM_CORRECT: 'archive_correct',
    ITEM_KILL: 'archive_kill',
    ITEM_TAKEDOWN: 'archive_takedown'
}

PRESERVED_FIELDS = ['headline', 'byline', 'usageterms', 'alt_text',
                    'description_text', 'copyrightholder', 'copyrightnotice']


class BasePublishResource(ArchiveResource):
    """
    Base resource class for "publish" endpoint.
    """