Exemple #1
0
def get_upload_as_data_uri(media_id):
    if not request.args.get('resource'):
        media_id = app.media.getFilename(media_id)
        media_file = app.media.get(media_id, 'upload')
    else:
        media_file = app.media.get(media_id, request.args['resource'])
    if media_file:
        data = wrap_file(request.environ, media_file, buffer_size=1024 * 256)
        response = app.response_class(data,
                                      mimetype=media_file.content_type,
                                      direct_passthrough=True)
        response.content_length = media_file.length
        response.last_modified = media_file.upload_date
        response.set_etag(media_file.md5)
        response.cache_control.max_age = cache_for
        response.cache_control.s_max_age = cache_for
        response.cache_control.public = True
        response.make_conditional(request)

        if strtobool(request.args.get('download', 'False')):
            response.headers['Content-Disposition'] = 'attachment'
        else:
            response.headers['Content-Disposition'] = 'inline'
        return response

    raise SuperdeskApiError.notFoundError('File not found on media storage.')
def generate_response_for_file(
    file: SuperdeskFile,
    cache_for: int = 3600 * 24 * 30,  # 30d cache
    buffer_size: int = 1024 * 256,
    content_disposition: str = None,
):
    data = wrap_file(request.environ, file, buffer_size=buffer_size)
    response = app.response_class(data,
                                  mimetype=file.content_type,
                                  direct_passthrough=True)
    response.content_length = file.length
    response.last_modified = file.upload_date
    response.set_etag(file.md5)
    response.cache_control.max_age = cache_for
    response.cache_control.s_max_age = cache_for
    response.cache_control.public = True

    # Add ``accept_ranges`` & ``complete_length`` so video seeking is supported
    response.make_conditional(request,
                              accept_ranges=True,
                              complete_length=file.length)

    if content_disposition:
        response.headers["Content-Disposition"] = content_disposition
    else:
        filename = "; filename={}".format(
            file.filename or file.name) if file.filename or file.name else ""
        if strtobool(request.args.get("download", "False")):
            response.headers["Content-Disposition"] = "Attachment" + filename
        else:
            response.headers["Content-Disposition"] = "Inline" + filename

    return response
Exemple #3
0
    def _raise_if_unpublished_related_items(self, original):
        if not request:
            return

        if (
            config.PUBLISH_ASSOCIATED_ITEMS
            or not original.get(ASSOCIATIONS)
            or self.publish_type not in [ITEM_PUBLISH, ITEM_CORRECT]
        ):
            return

        archive_service = get_resource_service("archive")
        publishing_warnings_confirmed = strtobool(request.args.get("publishing_warnings_confirmed") or "False")

        if not publishing_warnings_confirmed:
            for key, associated_item in original.get(ASSOCIATIONS).items():
                if associated_item and is_related_content(key):
                    item = archive_service.find_one(req=None, _id=associated_item.get("_id"))
                    item = item if item else associated_item

                    if item.get("state") not in PUBLISH_STATES:
                        error_msg = json.dumps(
                            {
                                "warnings": [
                                    _(
                                        "There are unpublished related "
                                        + "items that won't be sent out as "
                                        + "related items. Do you want to publish the article anyway?"
                                    )
                                ]
                            }
                        )
                        raise ValidationError(error_msg)
Exemple #4
0
    def _get_value(field: str):
        value_type = (schema.get(field) or {}).get("type")
        sams_mapping = (schema.get(field) or {}).get("sams") or {}
        data_value = data[field]

        if value_type == "boolean" and isinstance(data_value, str):
            data_value = strtobool(data_value)

        return (sams_mapping.get("map_value")
                or {}).get(data_value) or data_value
Exemple #5
0
def download_image(item_id: str):
    """Downloads an image from SAMS and sends back to the requestee"""

    width = int(request.args["width"]) if request.args.get("width") else None
    height = int(
        request.args["height"]) if request.args.get("height") else None
    keep_proportions = strtobool(request.args.get("keep_proportions", "True"))
    file = get_image_from_sams(get_sams_client(), ObjectId(item_id), width,
                               height, keep_proportions)

    if not file:
        raise SuperdeskApiError.notFoundError(_("SAMS Image Asset not found"))

    return generate_response_for_file(file)
Exemple #6
0
PRODAPI_INSTALLED_APPS = (
    'prod_api.items',
    'prod_api.assets',
    'prod_api.desks',
    'prod_api.planning',
    'prod_api.contacts',
    'prod_api.users',
)

# NOTE: no trailing slash for the PRODAPI_URL setting!
PRODAPI_URL = env('PRODAPI_URL', 'http://localhost:5500')
URL_PREFIX = env('PRODAPI_URL_PREFIX', 'prodapi')
API_VERSION = 'v1'
MEDIA_PREFIX = env(
    'MEDIA_PREFIX', '{}/{}/{}/assets'.format(PRODAPI_URL.rstrip('/'),
                                             URL_PREFIX, API_VERSION))

# date formats
DATE_FORMAT = '%Y-%m-%dT%H:%M:%S+0000'
ELASTIC_DATE_FORMAT = '%Y-%m-%d'

# response in json
XML = False

# authorisation server
AUTH_SERVER_SHARED_SECRET = env('AUTH_SERVER_SHARED_SECRET', '')

# authentication
PRODAPI_AUTH_ENABLED = strtobool(env('PRODAPI_AUTH_ENABLED', 'true'))
Exemple #7
0
PRODAPI_INSTALLED_APPS = (
    "prod_api.items",
    "prod_api.assets",
    "prod_api.desks",
    "prod_api.planning",
    "prod_api.contacts",
    "prod_api.users",
)

# NOTE: no trailing slash for the PRODAPI_URL setting!
PRODAPI_URL = env("PRODAPI_URL", "http://localhost:5500")
URL_PREFIX = env("PRODAPI_URL_PREFIX", "prodapi")
API_VERSION = "v1"
MEDIA_PREFIX = env(
    "MEDIA_PREFIX", "{}/{}/{}/assets".format(PRODAPI_URL.rstrip("/"),
                                             URL_PREFIX, API_VERSION))

# date formats
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S+0000"
ELASTIC_DATE_FORMAT = "%Y-%m-%d"

# response in json
XML = False

# authorisation server
AUTH_SERVER_SHARED_SECRET = env("AUTH_SERVER_SHARED_SECRET", "")

# authentication
PRODAPI_AUTH_ENABLED = strtobool(env("PRODAPI_AUTH_ENABLED", "true"))
WS_HOST = env('WSHOST', '0.0.0.0')
WS_PORT = env('WSPORT', '5100')

LOG_CONFIG_FILE = env('LOG_CONFIG_FILE', 'logging_config.yml')

REDIS_URL = env('REDIS_URL', 'redis://localhost:6379')
if env('REDIS_PORT'):
    REDIS_URL = env('REDIS_PORT').replace('tcp:', 'redis:')
BROKER_URL = env('CELERY_BROKER_URL', REDIS_URL)

SECRET_KEY = env('SECRET_KEY', '')

PUBLISH_ASSOCIATED_ITEMS = True

# Highcharts Export Server - default settings
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env('ANALYTICS_ENABLE_SCHEDULED_REPORTS', 'true'))
HIGHCHARTS_SERVER_HOST = env('HIGHCHARTS_SERVER_HOST', 'localhost')
HIGHCHARTS_SERVER_PORT = env('HIGHCHARTS_SERVER_PORT', '6060')

# schema for images, video, audio
SCHEMA = {
    'picture': {
        'headline': {
            'required': False
        },
        'description_text': {
            'required': True
        },
        'byline': {
            'required': False
        },
RENDITIONS = {
    'picture': {
        'thumbnail': {'width': 220, 'height': 120},
        'viewImage': {'width': 640, 'height': 640},
        'baseImage': {'width': 1400, 'height': 1400},
    },
    'avatar': {
        'thumbnail': {'width': 60, 'height': 60},
        'viewImage': {'width': 200, 'height': 200},
    }
}

WS_HOST = env('WSHOST', '0.0.0.0')
WS_PORT = env('WSPORT', '5100')

LOG_CONFIG_FILE = env('LOG_CONFIG_FILE', 'logging_config.yml')

REDIS_URL = env('REDIS_URL', 'redis://localhost:6379')
if env('REDIS_PORT'):
    REDIS_URL = env('REDIS_PORT').replace('tcp:', 'redis:')
BROKER_URL = env('CELERY_BROKER_URL', REDIS_URL)

SECRET_KEY = env('SECRET_KEY', '')

# Highcharts Export Server - default settings
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env('ANALYTICS_ENABLE_SCHEDULED_REPORTS', 'true')
)
HIGHCHARTS_SERVER_HOST = env('HIGHCHARTS_SERVER_HOST', 'localhost')
HIGHCHARTS_SERVER_PORT = env('HIGHCHARTS_SERVER_PORT', '6060')
Exemple #10
0
}

WS_HOST = env("WSHOST", "0.0.0.0")
WS_PORT = env("WSPORT", "5100")

LOG_CONFIG_FILE = env("LOG_CONFIG_FILE", "logging_config.yml")

REDIS_URL = env("REDIS_URL", "redis://localhost:6379")
if env("REDIS_PORT"):
    REDIS_URL = env("REDIS_PORT").replace("tcp:", "redis:")
BROKER_URL = env("CELERY_BROKER_URL", REDIS_URL)

SECRET_KEY = env("SECRET_KEY", os.urandom(32))

# Highcharts Export Server - default settings
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env("ANALYTICS_ENABLE_SCHEDULED_REPORTS", "true"))
HIGHCHARTS_SERVER_HOST = env("HIGHCHARTS_SERVER_HOST", "localhost")
HIGHCHARTS_SERVER_PORT = env("HIGHCHARTS_SERVER_PORT", "6060")
ANALYTICS_ENABLE_ARCHIVE_STATS = strtobool(
    env("ANALYTICS_ENABLE_ARCHIVE_STATS", "false"))

LANGUAGES = [
    {
        "language": "en-CA",
        "label": "English",
        "source": True,
        "destination": True
    },
    {
        "language": "fr-CA",
        "label": "Français",
Exemple #11
0
    def _save_coverage_history(self, updates, original):
        """Save the coverage history for the planning item"""
        item = deepcopy(original)
        original_coverages = {
            c.get('coverage_id'): c
            for c in (original or {}).get('coverages') or []
        }
        updates_coverages = {
            c.get('coverage_id'): c
            for c in (updates or {}).get('coverages') or []
        }
        added, deleted, updated = [], [], []
        planning_service = get_resource_service('planning')
        add_to_planning = strtobool(
            request.args.get('add_to_planning', 'false'))

        for coverage_id, coverage in updates_coverages.items():
            original_coverage = original_coverages.get(coverage_id)
            if not original_coverage:
                added.append(coverage)
            elif planning_service.is_coverage_planning_modified(coverage, original_coverage) or \
                    planning_service.is_coverage_assignment_modified(coverage, original_coverage):
                updated.append(coverage)

        deleted = [
            coverage for cid, coverage in original_coverages.items()
            if cid not in updates_coverages
        ]

        for cov in added:
            if cov.get('assigned_to',
                       {}).get('state') == ASSIGNMENT_WORKFLOW_STATE.ASSIGNED:
                diff = {'coverage_id': cov.get('coverage_id')}
                diff.update(cov)
                self._save_history(
                    item, diff, 'coverage_created_content'
                    if add_to_planning else 'coverage_created')
                self._save_history(item, diff, 'reassigned')
                self._save_history(item, diff, 'add_to_workflow')
            else:
                self._save_history(item, cov, 'coverage_created')

        for cov in updated:
            original_coverage = original_coverages.get(cov.get('coverage_id'))
            diff = self._get_coverage_diff(cov, original_coverage)
            if len(diff.keys()) > 1:
                self._save_history(item, diff, 'coverage_edited')

            if cov.get('workflow_status') == WORKFLOW_STATE.CANCELLED and \
                    original_coverage.get('workflow_status') != WORKFLOW_STATE.CANCELLED:
                operation = 'coverage_cancelled'
                diff = {
                    'coverage_id': cov.get('coverage_id'),
                    'workflow_status': cov['workflow_status']
                }
                if not original.get(LOCK_ACTION):
                    operation = 'events_cancel'
                elif original.get(LOCK_ACTION) == ITEM_ACTIONS.PLANNING_CANCEL or \
                        updates.get('state') == WORKFLOW_STATE.CANCELLED:
                    # If cancelled through item action or through editor
                    operation = 'planning_cancel'

                self._save_history(item, diff, operation)

            # If assignment was added in an update
            if cov.get('assigned_to', {}).get('assignment_id') and\
                    not (original_coverage.get('assigned_to') or {}).get('assignment_id'):
                diff = {
                    'coverage_id': cov.get('coverage_id'),
                    'assigned_to': cov['assigned_to']
                }
                self._save_history(item, diff, 'coverage_assigned')

        for cov in deleted:
            self._save_history(item, {'coverage_id': cov.get('coverage_id')},
                               'coverage_deleted')
Exemple #12
0
 def on_item_created(self, items):
     add_to_planning = strtobool(
         request.args.get('add_to_planning', 'false'))
     super().on_item_created(items,
                             'add_to_planning' if add_to_planning else None)
NEWSML_PROVIDER_ID = 'aap.com.au'
ORGANIZATION_NAME = 'Australian Associated Press'
ORGANIZATION_NAME_ABBREVIATION = 'AAP'

AMAZON_CONTAINER_NAME = env('AMAZON_CONTAINER_NAME', '')
AMAZON_ACCESS_KEY_ID = env('AMAZON_ACCESS_KEY_ID', '')
AMAZON_SECRET_ACCESS_KEY = env('AMAZON_SECRET_ACCESS_KEY', '')
AMAZON_REGION = env('AMAZON_REGION', 'us-east-1')
AMAZON_SERVE_DIRECT_LINKS = env('AMAZON_SERVE_DIRECT_LINKS', False)
AMAZON_S3_USE_HTTPS = env('AMAZON_S3_USE_HTTPS', False)

is_testing = os.environ.get('SUPERDESK_TESTING', '').lower() == 'true'
ELASTICSEARCH_FORCE_REFRESH = is_testing
ELASTICSEARCH_AUTO_AGGREGATIONS = True

# URL of the Highcharts Export Server (used in analytics for generating charts on the server)
HIGHCHARTS_SERVER_HOST = env('HIGHCHARTS_SERVER_HOST', 'localhost')
HIGHCHARTS_SERVER_PORT = env('HIGHCHARTS_SERVER_PORT', '6060')
HIGHCHARTS_SERVER_WORKERS = env('HIGHCHARTS_SERVER_WORKERS', '1')
HIGHCHARTS_SERVER_WORK_LIMIT = env('HIGHCHARTS_SERVER_WORK_LIMIT', '10')
HIGHCHARTS_SERVER_LOG_LEVEL = env('HIGHCHARTS_SERVER_LOG_LEVEL', '4')
HIGHCHARTS_SERVER_QUEUE_SIZE = env('HIGHCHARTS_SERVER_QUEUE_SIZE', '10')
HIGHCHARTS_SERVER_RATE_LIMIT = env('HIGHCHARTS_SERVER_RATE_LIMIT', False)
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env('ANALYTICS_ENABLE_SCHEDULED_REPORTS', 'false'))

# Archive Statistics
STATISTICS_MONGO_DBNAME = 'sptests'
ANALYTICS_ENABLE_ARCHIVE_STATS = strtobool(
    env('ANALYTICS_ENABLE_ARCHIVE_STATS', 'true'))
Exemple #14
0
}

WS_HOST = env('WSHOST', '0.0.0.0')
WS_PORT = env('WSPORT', '5100')

LOG_CONFIG_FILE = env('LOG_CONFIG_FILE', 'logging_config.yml')

REDIS_URL = env('REDIS_URL', 'redis://localhost:6379')
if env('REDIS_PORT'):
    REDIS_URL = env('REDIS_PORT').replace('tcp:', 'redis:')
BROKER_URL = env('CELERY_BROKER_URL', REDIS_URL)

SECRET_KEY = env('SECRET_KEY', os.urandom(32))

# Highcharts Export Server - default settings
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env('ANALYTICS_ENABLE_SCHEDULED_REPORTS', 'true'))
HIGHCHARTS_SERVER_HOST = env('HIGHCHARTS_SERVER_HOST', 'localhost')
HIGHCHARTS_SERVER_PORT = env('HIGHCHARTS_SERVER_PORT', '6060')

LANGUAGES = [{
    'language': 'en-CA',
    'label': 'English',
    'source': True,
    'destination': True
}, {
    'language': 'fr-CA',
    'label': 'Français',
    'source': True,
    'destination': True
}]
Exemple #15
0
        "maxlength": 100
    },
    "archive_description": {
        "required": False,
    },
    "byline": {
        "required": False,
    },
}

# max multi day event duration in days
MAX_MULTI_DAY_EVENT_DURATION = int(env('MAX_MULTI_DAY_EVENT_DURATION', 7))

# Highcharts Export Server - default settings
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env('ANALYTICS_ENABLE_SCHEDULED_REPORTS', 'true')
)
ANALYTICS_ENABLE_ARCHIVE_STATS = strtobool(
    env('ANALYTICS_ENABLE_ARCHIVE_STATS', 'true')
)
HIGHCHARTS_SERVER_HOST = env('HIGHCHARTS_SERVER_HOST', 'localhost')
HIGHCHARTS_SERVER_PORT = env('HIGHCHARTS_SERVER_PORT', '6060')
HIGHCHARTS_SERVER_WORKERS = env('HIGHCHARTS_SERVER_WORKERS', None)
HIGHCHARTS_SERVER_WORK_LIMIT = env('HIGHCHARTS_SERVER_WORK_LIMIT', None)
HIGHCHARTS_SERVER_LOG_LEVEL = env('HIGHCHARTS_SERVER_LOG_LEVEL', None)
HIGHCHARTS_SERVER_QUEUE_SIZE = env('HIGHCHARTS_SERVER_QUEUE_SIZE', None)
HIGHCHARTS_SERVER_RATE_LIMIT = env('HIGHCHARTS_SERVER_RATE_LIMIT', None)

MIN_BROADCAST_TEXT_WORD_COUNT = int(env('MIN_BROADCAST_TEXT_WORD_COUNT', 120))

# Intelematics Weather API Key
LDAP_BASE_FILTER = env('LDAP_BASE_FILTER', '')

# change the user depending on the LDAP directory structure
LDAP_USER_FILTER = env(
    'LDAP_USER_FILTER',
    "(&(objectCategory=user)(objectClass=user)(sAMAccountName={}))")

# LDAP User Attributes to fetch. Keys would be LDAP Attribute Name and Value would be Supderdesk Model Attribute Name
LDAP_USER_ATTRIBUTES = json.loads(
    env(
        'LDAP_USER_ATTRIBUTES',
        '{"givenName": "first_name", "sn": "last_name", '
        '"displayName": "display_name", "mail": "email"}'))

#: Enable the ability for the display name of the user to be overridden with Superdesk user attributes
LDAP_SET_DISPLAY_NAME = strtobool(env('LDAP_SET_DISPLAY_NAME', 'True'))

if LDAP_SERVER:
    INSTALLED_APPS.append('apps.ldap')
else:
    INSTALLED_APPS.append('superdesk.users')
    INSTALLED_APPS.append('apps.auth.db')

INSTALLED_APPS.extend([
    'superdesk.upload',
    'superdesk.download',
    'superdesk.sequences',
    'superdesk.notification',
    'superdesk.data_updates',
    'superdesk.activity',
    'superdesk.vocabularies',
                            routing_key='newsroom.#'), )

CELERY_TASK_ROUTES = {
    'newsroom.*': {
        'queue': celery_queue('newsroom'),
        'routing_key': 'newsroom.task',
    }
}

#: celery beat config
CELERY_BEAT_SCHEDULE = {
    'newsroom:company_expiry': {
        'task': 'newsroom.company_expiry_alerts.company_expiry',
        'schedule': crontab(hour=local_to_utc_hour(0),
                            minute=0),  # Runs every day at midnight
    },
    'newsroom:monitoring_schedule_alerts': {
        'task': 'newsroom.monitoring.email_alerts.monitoring_schedule_alerts',
        'schedule': timedelta(seconds=60),
    },
    'newsroom:monitoring_immediate_alerts': {
        'task': 'newsroom.monitoring.email_alerts.monitoring_immediate_alerts',
        'schedule': timedelta(seconds=60),
    }
}

MAX_EXPIRY_QUERY_LIMIT = os.environ.get('MAX_EXPIRY_QUERY_LIMIT', 100)
CONTENT_API_EXPIRY_DAYS = os.environ.get('CONTENT_API_EXPIRY_DAYS', 180)

NEWS_API_ENABLED = strtobool(env('NEWS_API_ENABLED', 'false'))
Exemple #18
0
        "required": True,
        "maxlength": 100
    },
    "archive_description": {
        "required": False,
    },
    "byline": {
        "required": False,
    },
}

# max multi day event duration in days
MAX_MULTI_DAY_EVENT_DURATION = int(env('MAX_MULTI_DAY_EVENT_DURATION', 7))

# Highcharts Export Server - default settings
ANALYTICS_ENABLE_SCHEDULED_REPORTS = strtobool(
    env('ANALYTICS_ENABLE_SCHEDULED_REPORTS', 'true'))
ANALYTICS_ENABLE_ARCHIVE_STATS = strtobool(
    env('ANALYTICS_ENABLE_ARCHIVE_STATS', 'true'))
HIGHCHARTS_SERVER_HOST = env('HIGHCHARTS_SERVER_HOST', 'localhost')
HIGHCHARTS_SERVER_PORT = env('HIGHCHARTS_SERVER_PORT', '6060')
HIGHCHARTS_SERVER_WORKERS = env('HIGHCHARTS_SERVER_WORKERS', None)
HIGHCHARTS_SERVER_WORK_LIMIT = env('HIGHCHARTS_SERVER_WORK_LIMIT', None)
HIGHCHARTS_SERVER_LOG_LEVEL = env('HIGHCHARTS_SERVER_LOG_LEVEL', None)
HIGHCHARTS_SERVER_QUEUE_SIZE = env('HIGHCHARTS_SERVER_QUEUE_SIZE', None)
HIGHCHARTS_SERVER_RATE_LIMIT = env('HIGHCHARTS_SERVER_RATE_LIMIT', None)

MIN_BROADCAST_TEXT_WORD_COUNT = int(env('MIN_BROADCAST_TEXT_WORD_COUNT', 120))

# Intelematics Weather API Key
INTELEMATICS_WEATHER_API_KEY = env('INTELEMATICS_WEATHER_API_KEY', None)
        'queue': celery_queue('newsroom'),
        'routing_key': 'newsroom.task',
    }
}

#: celery beat config
CELERY_BEAT_SCHEDULE = {
    'newsroom:company_expiry': {
        'task': 'newsroom.company_expiry_alerts.company_expiry',
        'schedule': crontab(hour=local_to_utc_hour(0),
                            minute=0),  # Runs every day at midnight
    },
    'newsroom:monitoring_schedule_alerts': {
        'task': 'newsroom.monitoring.email_alerts.monitoring_schedule_alerts',
        'schedule': timedelta(seconds=60),
    },
    'newsroom:monitoring_immediate_alerts': {
        'task': 'newsroom.monitoring.email_alerts.monitoring_immediate_alerts',
        'schedule': timedelta(seconds=60),
    }
}

MAX_EXPIRY_QUERY_LIMIT = os.environ.get('MAX_EXPIRY_QUERY_LIMIT', 100)
CONTENT_API_EXPIRY_DAYS = os.environ.get('CONTENT_API_EXPIRY_DAYS', 180)

NEWS_API_ENABLED = strtobool(env('NEWS_API_ENABLED', 'false'))

# Enables the application of product filtering to image references in the API and ATOM responses
NEWS_API_IMAGE_PERMISSIONS_ENABLED = strtobool(
    env('NEWS_API_IMAGE_PERMISSIONS_ENABLED', 'false'))