Exemplo n.º 1
0
    def get_keywords(self, text):
        if not app.config['KEYWORDS_KEY_API']:
            raise SuperdeskApiError.notFoundError(
                _('AlchemyAPI key is not set'))

        params = {
            'apikey': app.config['KEYWORDS_KEY_API'],
            'outputMode': 'json'
        }

        url = app.config['KEYWORDS_BASE_URL'] + '/text/TextGetRankedNamedEntities' + \
            '?' + urllib.parse.urlencode(params)

        values = {'text': text}

        result = ""
        try:
            result = self._http.post(url, data=values)
        except Exception as ex:
            raise SuperdeskApiError.internalError(
                _('Fail to connect to Alchemy service'), exception=ex)

        try:
            keywords = result.json()
            return keywords.get('entities', [])
        except Exception as ex:
            raise SuperdeskApiError.internalError(
                _('Fail to parse the response from Alchemy service'),
                exception=ex)
Exemplo n.º 2
0
    def find_one_raw(self, resource, _id):
        # XXX: preview is used here instead of paid download
        #      see SDNTB-15
        data = {}
        url = self._app.config['SCANPIX_SEARCH_URL'] + '/search'
        data['refPtrs'] = [_id]
        r = self._request(url, data)
        doc = r.json()['data'][0]
        self._parse_doc(doc)

        url = doc['renditions']['baseImage']['href']
        # if MIME type can't be guessed, we default to jpeg
        mime_type = mimetypes.guess_type(url)[0] or 'image/jpeg'

        r = self._request(url, data)
        out = BytesIO(r.content)
        file_name, content_type, metadata = process_file_from_stream(
            out, mime_type)

        logger.debug('Going to save media file with %s ' % file_name)
        out.seek(0)
        try:
            file_id = self._app.media.put(out,
                                          filename=file_name,
                                          content_type=content_type,
                                          metadata=None)
        except Exception as e:
            logger.exception(e)
            raise SuperdeskApiError.internalError('Media saving failed')
        else:
            try:
                inserted = [file_id]
                doc['mimetype'] = content_type
                doc['filemeta'] = decode_metadata(metadata)
                # set the version created to now to bring it to the top of the desk, images can be quite old
                doc['versioncreated'] = utcnow()
                file_type = content_type.split('/')[0]
                rendition_spec = get_renditions_spec()
                renditions = generate_renditions(out,
                                                 file_id,
                                                 inserted,
                                                 file_type,
                                                 content_type,
                                                 rendition_spec,
                                                 url_for_media,
                                                 insert_metadata=False)
                doc['renditions'] = renditions
            except (IndexError, KeyError, json.JSONDecodeError) as e:
                logger.exception("Internal error: {}".format(e))
                delete_file_on_error(doc, file_id)

                raise SuperdeskApiError.internalError(
                    'Generating renditions failed')
        return doc
Exemplo n.º 3
0
    def create(self, docs, **kwargs):
        if not self.provider:
            raise SuperdeskApiError.internalError('Not set a keywords provider')

        try:
            ids = []
            for doc in docs:
                doc['keywords'] = self.provider.get_keywords(doc.get('text', ''))
                ids.append(len(ids))
            return ids
        except Exception as ex:
            raise SuperdeskApiError.internalError(str(ex))
Exemplo n.º 4
0
    def create(self, docs, **kwargs):
        if not self.provider:
            raise SuperdeskApiError.internalError(
                _("Not set a keywords provider"))

        try:
            ids = []
            for doc in docs:
                doc["keywords"] = self.provider.get_keywords(
                    doc.get("text", ""))
                ids.append(len(ids))
            return ids
        except Exception as ex:
            raise SuperdeskApiError.internalError(str(ex), exception=ex)
Exemplo n.º 5
0
    def _save_cropped_image(self, file_stream, original, doc):
        """Saves the cropped image and returns the crop dictionary

        :param file_stream: cropped image stream
        :param original: original rendition
        :param doc: crop data
        :return dict: Crop values
        :raises SuperdeskApiError.internalError
        """
        crop = {}
        try:
            file_name, content_type, metadata = process_file_from_stream(
                file_stream, content_type=original.get("mimetype")
            )
            file_stream.seek(0)
            file_id = app.media.put(
                file_stream, filename=file_name, content_type=content_type, resource="upload", metadata=metadata
            )
            crop["media"] = file_id
            crop["mimetype"] = content_type
            crop["href"] = url_for_media(file_id, content_type)
            crop["CropTop"] = doc.get("CropTop", None)
            crop["CropLeft"] = doc.get("CropLeft", None)
            crop["CropRight"] = doc.get("CropRight", None)
            crop["CropBottom"] = doc.get("CropBottom", None)
            return crop
        except Exception as ex:
            try:
                app.media.delete(file_id)
            except Exception:
                pass
            raise SuperdeskApiError.internalError("Generating crop failed: {}".format(str(ex)), exception=ex)
Exemplo n.º 6
0
 def _check_server(self, text):
     check_url = urljoin(self.base_url, PATH_CHECK)
     r = requests.post(check_url, data={"text": text,
                                        "options": json.dumps(self.grammalecte_config)})
     if r.status_code != 200:
         raise SuperdeskApiError.internalError("Unexpected return code from Grammalecte")
     return self.grammalecte2superdesk(text, r.json())
Exemplo n.º 7
0
    def check(self, text, language=None):
        check_url = API_URL.format(method="spellingchecker")
        data = {
            "key": self.api_key,
            "input": text,
            "startmarker": START_MARKER,
            "endmarker": END_MARKER,
        }
        r = requests.post(check_url, data=data, timeout=self.CHECK_TIMEOUT)
        if r.status_code != 200:
            raise SuperdeskApiError.internalError(
                "Unexpected return code from {}".format(self.name))

        data = r.json()

        err_list = []
        check_data = {"errors": err_list}
        len_end_marker = len(END_MARKER)
        output = data["spellingchecker"]["output"]
        marked = output["marked"].split(START_MARKER)
        # the first item in "marked" is unmarked text, we start our index there
        text_idx = len(marked.pop(0))

        for marked_part in marked:
            mistake = marked_part[:marked_part.find(END_MARKER)]
            ercorr_data = {
                "startOffset": text_idx,
                "text": mistake,
                "type": "spelling",
            }
            err_list.append(ercorr_data)
            text_idx += len(marked_part) - len_end_marker

        return check_data
Exemplo n.º 8
0
 def _save_cropped_image(self, file_stream, original, doc):
     """
     Saves the cropped image and returns the crop dictionary
     :param file_stream: cropped image stream
     :param original: original rendition
     :param doc: crop data
     :return dict: Crop values
     :raises SuperdeskApiError.internalError
     """
     crop = {}
     try:
         file_name, content_type, metadata = process_file_from_stream(file_stream,
                                                                      content_type=original.get('mimetype'))
         file_stream.seek(0)
         file_id = superdesk.app.media.put(file_stream, filename=file_name,
                                           content_type=content_type,
                                           resource='upload',
                                           metadata=metadata)
         crop['media'] = file_id
         crop['mimetype'] = content_type
         crop['href'] = url_for_media(file_id, content_type)
         crop['CropTop'] = doc.get('CropTop', None)
         crop['CropLeft'] = doc.get('CropLeft', None)
         crop['CropRight'] = doc.get('CropRight', None)
         crop['CropBottom'] = doc.get('CropBottom', None)
         return crop
     except Exception as ex:
         try:
             superdesk.app.media.delete(file_id)
         except:
             pass
         raise SuperdeskApiError.internalError('Generating crop failed: {}'.format(str(ex)))
Exemplo n.º 9
0
    def update(self, provider, update):
        """
        Clients consuming Ingest Services should invoke this to get items from the provider.

        :param provider: Ingest Provider Details.
        :type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource`
        :param update: Any update that is required on provider.
        :type update: dict
        :return: a list of articles which can be saved in Ingest Collection.
        :raises SuperdeskApiError.internalError if Provider is closed
        :raises SuperdeskIngestError if failed to get items from provider
        """

        is_closed = provider.get('is_closed', False)

        if isinstance(is_closed, datetime):
            is_closed = False

        if is_closed:
            raise SuperdeskApiError.internalError('Ingest Provider is closed')
        else:
            try:
                return self._update(provider, update) or []
            except SuperdeskIngestError as error:
                self.close_provider(provider, error)
                raise error
Exemplo n.º 10
0
    def update(self, provider, update):
        """
        Clients consuming Ingest Services should invoke this to get items from the provider.

        :param provider: Ingest Provider Details.
        :type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource`
        :param update: Any update that is required on provider.
        :type update: dict
        :return: a list of articles which can be saved in Ingest Collection.
        :raises SuperdeskApiError.internalError if Provider is closed
        :raises SuperdeskIngestError if failed to get items from provider
        """
        if self._is_closed(provider):
            raise SuperdeskApiError.internalError('Ingest Provider is closed')
        else:
            try:
                self._provider = provider
                self._log_msg("Start update execution.")
                self._timer.start('update')

                return self._update(provider, update) or []
            except SuperdeskIngestError as error:
                self.close_provider(provider, error)
                raise error
            finally:
                self._log_msg(
                    "Stop update execution. Exec time: {:.4f} secs.".format(
                        self._timer.stop('update')))
                # just in case stop all timers
                self._timer.stop_all()
 def factory(field):
     if field not in FilterConditionFieldsEnum.__members__:
         vocabulary = get_resource_service('vocabularies').find_one(req=None, _id=field)
         if vocabulary:
             if vocabulary['field_type'] == 'text':
                 return FilterConditionCustomTextField(field)
             else:
                 return FilterConditionControlledVocabularyField(field)
         raise SuperdeskApiError.internalError('Invalid filter conditions field %s' % field)
     if FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.desk:
         return FilterConditionDeskField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.stage:
         return FilterConditionStageField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.anpa_category:
         return FilterConditionCategoryField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.genre:
         return FilterConditionGenreField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.sms:
         return FilterConditionSmsField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.subject:
         return FilterConditionSubjectField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.urgency:
         return FilterConditionUrgencyField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.priority:
         return FilterConditionPriorityField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.place:
         return FilterConditionPlaceField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.ingest_provider:
         return FilterConditionIngestProviderField(field)
     elif FilterConditionFieldsEnum[field] == FilterConditionFieldsEnum.embargo:
         return FilterConditionEmbargoField(field)
     else:
         return FilterConditionField(field)
Exemplo n.º 12
0
 def server_error_handler(error):
     """Log server errors."""
     if getattr(app, 'sentry'):
         app.sentry.captureException()
     logger.exception(error)
     return_error = SuperdeskApiError.internalError()
     return client_error_handler(return_error)
Exemplo n.º 13
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)
        try:
            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            self.backend.update(self.datasource, id, updates, original)
            original.update(updates)

            if archived_item['type'] != 'composite':
                # queue only text items
                self.queue_transmission(original)
                task = self.__send_to_publish_stage(original)
                if task:
                    updates['task'] = task

            # document is saved to change the status
            updates[config.CONTENT_STATE] = 'published'
            item = self.backend.update(self.datasource, id, updates, original)
            original.update(updates)
            user = get_user()
            push_notification('item:publish', item=str(item.get('_id')), user=str(user))
            original.update(super().find_one(req=None, _id=id))
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Exemplo n.º 14
0
 def crop_and_store_file(self, doc, content, filename, content_type):
     # retrieve file name and metadata from file
     file_name, content_type, metadata = process_file_from_stream(content, content_type=content_type)
     # crop the file if needed, can change the image size
     was_cropped, out = crop_image(content, filename, doc)
     # the length in metadata could be updated if it was cropped
     if was_cropped:
         file_name, content_type, metadata_after_cropped = process_file_from_stream(out, content_type=content_type)
         # when cropped, metadata are reseted. Then we update the previous metadata variable
         metadata['length'] = metadata_after_cropped['length']
     try:
         logger.debug('Going to save media file with %s ' % file_name)
         out.seek(0)
         file_id = app.media.put(out, filename=file_name, content_type=content_type,
                                 resource=self.datasource, metadata=metadata)
         doc['media'] = file_id
         doc['mimetype'] = content_type
         doc['filemeta'] = decode_metadata(metadata)
         inserted = [doc['media']]
         file_type = content_type.split('/')[0]
         rendition_spec = config.RENDITIONS['avatar']
         renditions = generate_renditions(out, file_id, inserted, file_type,
                                          content_type, rendition_spec, url_for_media)
         doc['renditions'] = renditions
     except Exception as io:
         logger.exception(io)
         for file_id in inserted:
             delete_file_on_error(doc, file_id)
         raise SuperdeskApiError.internalError('Generating renditions failed')
Exemplo n.º 15
0
 def crop_and_store_file(self, doc, content, filename, content_type):
     # retrieve file name and metadata from file
     file_name, content_type, metadata = process_file_from_stream(
         content, content_type=content_type)
     # crop the file if needed, can change the image size
     was_cropped, out = crop_image(content, filename, doc)
     # the length in metadata could be updated if it was cropped
     if was_cropped:
         file_name, content_type, metadata_after_cropped = process_file_from_stream(
             out, content_type=content_type)
         # when cropped, metadata are reseted. Then we update the previous metadata variable
         metadata['length'] = metadata_after_cropped['length']
     try:
         logger.debug('Going to save media file with %s ' % file_name)
         out.seek(0)
         file_id = app.media.put(out,
                                 filename=file_name,
                                 content_type=content_type,
                                 resource=self.datasource,
                                 metadata=metadata)
         doc['media'] = file_id
         doc['mimetype'] = content_type
         set_filemeta(doc, decode_metadata(metadata))
         inserted = [doc['media']]
         file_type = content_type.split('/')[0]
         rendition_spec = config.RENDITIONS['avatar']
         renditions = generate_renditions(out, file_id, inserted, file_type,
                                          content_type, rendition_spec,
                                          url_for_media)
         doc['renditions'] = renditions
     except Exception as io:
         for file_id in inserted:
             delete_file_on_error(doc, file_id)
         raise SuperdeskApiError.internalError(
             'Generating renditions failed', exception=io)
Exemplo n.º 16
0
 def _save_cropped_image(self, file_stream, original, doc):
     """
     Saves the cropped image and returns the crop dictionary
     :param file_stream: cropped image stream
     :param original: original rendition
     :param doc: crop data
     :return dict: Crop values
     :raises SuperdeskApiError.internalError
     """
     crop = {}
     try:
         file_name, content_type, metadata = process_file_from_stream(
             file_stream, content_type=original.get('mimetype'))
         file_stream.seek(0)
         file_id = superdesk.app.media.put(file_stream,
                                           filename=file_name,
                                           content_type=content_type,
                                           resource='upload',
                                           metadata=metadata)
         crop['media'] = file_id
         crop['mimetype'] = content_type
         crop['href'] = url_for_media(file_id, content_type)
         crop['CropTop'] = doc.get('CropTop', None)
         crop['CropLeft'] = doc.get('CropLeft', None)
         crop['CropRight'] = doc.get('CropRight', None)
         crop['CropBottom'] = doc.get('CropBottom', None)
         return crop
     except Exception as ex:
         try:
             superdesk.app.media.delete(file_id)
         except:
             pass
         raise SuperdeskApiError.internalError(
             'Generating crop failed: {}'.format(str(ex)))
    def put(self, content, filename=None, content_type=None, metadata=None):
        """ Saves a new file using the storage system, preferably with the name
        specified. If there already exists a file with this name name, the
        storage system may modify the filename as necessary to get a unique
        name. Depending on the storage system, a unique id or the actual name
        of the stored file will be returned. The content type argument is used
        to appropriately identify the file when it is retrieved.
        """
        logger.debug('Going to save media file with %s ' % filename)
        found, existing_file = self._check_exists(filename, raise_error=False)
        if found:
            return filename

        try:
            file_metadata = self.transform_metadata_to_amazon_format(metadata)
            res = self.conn.upload(filename,
                                   content,
                                   self.container_name,
                                   content_type=content_type,
                                   headers=file_metadata)
            if res.status_code not in (200, 201):
                raise SuperdeskApiError.internalError(
                    'Uploading file to amazon S3 failed')
            return filename
        except Exception as ex:
            logger.exception(ex)
            raise
Exemplo n.º 18
0
def download_file_from_url(url, request_kwargs=None):
    """Download file from given url.

    In case url is relative it will prefix it with current host.

    :param url: file url
    """

    if not request_kwargs:
        request_kwargs = {}

    try:
        rv = requests.get(url,
                          headers={"User-Agent": "Superdesk-1.0"},
                          timeout=(5, 25),
                          **request_kwargs)
    except requests.exceptions.MissingSchema:  # any route will do here, we only need host
        rv = requests.get(urljoin(
            url_for("static", filename="x", _external=True), url),
                          timeout=15,
                          **request_kwargs)
    if rv.status_code not in (200, 201):
        raise SuperdeskApiError.internalError(
            "Failed to retrieve file from URL: %s" % url)
    content = BytesIO(rv.content)
    content_type = rv.headers.get("content-type", "image/jpeg").split(";")[0]
    content_type = fix_content_type(content_type, content)
    ext = str(content_type).split("/")[1]
    name = str(ObjectId()) + ext
    return content, name, content_type
Exemplo n.º 19
0
    def update(self, provider, update):
        """
        Clients consuming Ingest Services should invoke this to get items from the provider.

        :param provider: Ingest Provider Details.
        :type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource`
        :param update: Any update that is required on provider.
        :type update: dict
        :return: a list of articles which can be saved in Ingest Collection.
        :raises SuperdeskApiError.internalError if Provider is closed
        :raises SuperdeskIngestError if failed to get items from provider
        """

        is_closed = provider.get('is_closed', False)

        if isinstance(is_closed, datetime):
            is_closed = False

        if is_closed:
            raise SuperdeskApiError.internalError('Ingest Provider is closed')
        else:
            try:
                return self._update(provider, update) or []
            except SuperdeskIngestError as error:
                self.close_provider(provider, error)
                raise error
Exemplo n.º 20
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            last_updated = updates.get(config.LAST_UPDATED, utcnow())

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)

            queued_digital = False
            package = None

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
                # if target_for is set the we don't to digital client.
                if not updates.get('targeted_for', original.get('targeted_for')):
                    # check if item is in a digital package
                    package = self.takes_package_service.get_take_package(original)

                    if package:
                        queued_digital = self._publish_takes_package(package, updates, original, last_updated)
                    else:
                        '''
                        If type of the item is text or preformatted then item need to be sent to digital subscribers.
                        So, package the item as a take.
                        '''
                        updated = copy(original)
                        updated.update(updates)

                        if original[ITEM_TYPE] in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED] and \
                                self.sending_to_digital_subscribers(updated):
                            # create a takes package
                            package_id = self.takes_package_service.package_story_as_a_take(updated, {}, None)
                            updates[LINKED_IN_PACKAGES] = updated[LINKED_IN_PACKAGES]
                            package = get_resource_service(ARCHIVE).find_one(req=None, _id=package_id)
                            queued_digital = self._publish_takes_package(package, updates, original, last_updated)

                # queue only text items
                queued_wire = \
                    self.publish(doc=original, updates=updates, target_media_type=WIRE if package else None)

                queued = queued_digital or queued_wire
                if not queued:
                    logger.exception('Nothing is saved to publish queue for story: {} for action: {}'.
                                     format(original[config.ID_FIELD], self.publish_type))

            self._update_archive(original=original, updates=updates, should_insert_into_versions=False)
            push_notification('item:publish', item=str(id), unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
 def update_metadata(self, key, metadata):
     if not metadata:
         return
     metadata = self.transform_metadata_to_amazon_format(metadata)
     res = self.conn.update_metadata(key, metadata, bucket=self.container_name)
     if res.status_code not in (200, 201):
         payload = "Updating metadata for file %s failed" % key
         raise SuperdeskApiError.internalError(payload=payload)
Exemplo n.º 22
0
 def update(self, provider):
     is_closed = provider.get('is_closed', False)
     if isinstance(is_closed, datetime):
         is_closed = False
     if is_closed:
         raise SuperdeskApiError.internalError('Ingest Provider is closed')
     else:
         return self._update(provider) or []
Exemplo n.º 23
0
 def update(self, provider):
     is_closed = provider.get('is_closed', False)
     if isinstance(is_closed, datetime):
         is_closed = False
     if is_closed:
         raise SuperdeskApiError.internalError('Ingest Provider is closed')
     else:
         return self._update(provider) or []
Exemplo n.º 24
0
def download_file_from_url(url):
    rv = requests.get(url, timeout=15)
    if rv.status_code not in (200, 201):
        raise SuperdeskApiError.internalError('Failed to retrieve file from URL: %s' % url)

    mime = magic.from_buffer(rv.content, mime=True).decode('UTF-8')
    ext = mime.split('/')[1]
    name = str(ObjectId()) + ext
    return BytesIO(rv.content), name, mime
Exemplo n.º 25
0
def download_file_from_url(url):
    rv = requests.get(url)
    if rv.status_code not in (200, 201):
        raise SuperdeskApiError.internalError('Failed to retrieve file from URL: %s' % url)

    mime = magic.from_buffer(rv.content, mime=True).decode('UTF-8')
    ext = mime.split('/')[1]
    name = 'stub.' + ext
    return BytesIO(rv.content), name, mime
Exemplo n.º 26
0
def download_file_from_url(url):
    rv = requests.get(url, timeout=15)
    if rv.status_code not in (200, 201):
        raise SuperdeskApiError.internalError('Failed to retrieve file from URL: %s' % url)

    mime = magic.from_buffer(rv.content, mime=True)
    ext = str(mime).split('/')[1]
    name = str(ObjectId()) + ext
    return BytesIO(rv.content), name, str(mime)
Exemplo n.º 27
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected, Killed and TakeDown.
        """
        try:
            user = get_user()
            auto_publish = updates.get("auto_publish", False)

            # unlock the item
            set_unlock_updates(updates)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
            else:
                self._publish_associated_items(original, updates)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                if updates.get(ASSOCIATIONS):
                    self._refresh_associated_items(updated, skip_related=True)  # updates got lost with update

                if updated.get(ASSOCIATIONS):
                    self._fix_related_references(updated, updates)

                signals.item_publish.send(self, item=updated)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)

            from apps.publish.enqueue import enqueue_published

            enqueue_published.apply_async()

            push_notification(
                "item:publish",
                item=str(id),
                unique_name=original["unique_name"],
                desk=str(original.get("task", {}).get("desk", "")),
                user=str(user.get(config.ID_FIELD, "")),
            )

            if updates.get("previous_marked_user") and not updates.get("marked_for_user"):
                # send notification so that marked for me list can be updated
                get_resource_service("archive").handle_mark_user_notifications(updates, original, False)

        except SuperdeskApiError:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(
                message=_("Key is missing on article to be published: {exception}").format(exception=str(e))
            )
        except Exception as e:
            logger.exception(e)
            raise SuperdeskApiError.internalError(
                message=_("Failed to publish the item: {id}").format(id=str(id)), exception=e
            )
Exemplo n.º 28
0
def generate_from_highcharts(options,
                             mimetype=MIME_TYPES.PNG,
                             base64=True,
                             scale=1,
                             width=None,
                             no_download=True):
    try:
        host = app.config.get('HIGHCHARTS_SERVER_HOST', 'localhost')
        port = app.config.get('HIGHCHARTS_SERVER_PORT', '6060')
    except RuntimeError:
        # This can happen when working outside of the Flask context
        # So default to host=localhost, port=6060
        host = 'localhost'
        port = '6060'

    url = 'http://{}:{}'.format(host, port)
    headers = {'Content-Type': 'application/json'}

    # Set the width size of the image to generate
    if 'exporting' not in options:
        options['exporting'] = {}
    options['exporting']['sourceWidth'] = width

    payload = {
        'options': options,
        'type': mimetype,
        'b64': base64,
        'scale': scale,
        'noDownload': no_download
    }

    try:
        response = requests.post(url, headers=headers, json=payload)
    except requests.exceptions.ConnectionError as e:
        raise SuperdeskApiError.internalError(
            'Socket connection error: {}'.format(e))

    try:
        response.raise_for_status()
    except Exception as e:
        logger.exception(e)
        raise SuperdeskApiError.internalError(e)

    return response.content
Exemplo n.º 29
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
            else:
                self._refresh_associated_items(original)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                if updates.get(ASSOCIATIONS):
                    self._refresh_associated_items(
                        updated)  # updates got lost with update

                # process takes package for published or corrected items
                # if no_takes is true but takes package exists then process takes package.
                if self.published_state != CONTENT_STATE.KILLED and \
                        (not app.config.get('NO_TAKES', False) or
                         self.takes_package_service.get_take_package_id(updated)):
                    self._process_takes_package(original, updated, updates)

                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
                self.update_published_collection(
                    published_item_id=original[config.ID_FIELD],
                    updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish',
                              item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(
                                  original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(
                    str(e)))
        except Exception as e:
            raise SuperdeskApiError.internalError(
                message="Failed to publish the item: {}".format(str(id)),
                exception=e)
 def update_metadata(self, key, metadata):
     if not metadata:
         return
     metadata = self.transform_metadata_to_amazon_format(metadata)
     res = self.conn.update_metadata(key,
                                     metadata,
                                     bucket=self.container_name)
     if res.status_code not in (200, 201):
         payload = 'Updating metadata for file %s failed' % key
         raise SuperdeskApiError.internalError(payload=payload)
Exemplo n.º 31
0
 def suggest(self, text, language=None):
     check_url = API_URL.format(method="suggesties")
     data = {
         "key": self.api_key,
         "input": text,
     }
     r = requests.post(check_url, data=data)
     if r.status_code != 200:
         raise SuperdeskApiError.internalError("Unexpected return code from {}".format(self.name))
     return {'suggestions': self.list2suggestions(r.json()['suggesties']['output'].get('suggesties', []))}
Exemplo n.º 32
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            last_updated = updates.get(config.LAST_UPDATED, utcnow())

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, last_updated)

            set_sign_off(updates, original)
            queued_digital = False
            package_id = None

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
                # if target_for is set the we don't to digital client.
                if not updates.get('targeted_for', original.get('targeted_for')):
                    # check if item is in a digital package
                    package_id = TakesPackageService().get_take_package_id(original)

                    if package_id:
                        queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                    original, last_updated)
                    else:
                        # if item is going to be sent to digital subscribers, package it as a take
                        if self.sending_to_digital_subscribers(updates):
                            updated = copy(original)
                            updated.update(updates)
                            # create a takes package
                            package_id = TakesPackageService().package_story_as_a_take(updated, {}, None)
                            original = get_resource_service('archive').find_one(req=None, _id=original['_id'])
                            queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                        original, last_updated)

                # queue only text items
                queued_wire = \
                    self.publish(doc=original, updates=updates, target_media_type=WIRE if package_id else None)

                queued = queued_digital or queued_wire
                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self._set_version_last_modified_and_state(original, updates, last_updated)
            self._update_archive(original=original, updates=updates, should_insert_into_versions=False)
            push_notification('item:publish', item=str(id), unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')), user=str(user.get('_id', '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Exemplo n.º 33
0
    def _suggest_server(self, text):
        if self.version_tuple < (1, 2):
            logger.warning("Suggestions not available with this server version")
            return {'suggestions': []}
        check_url = urljoin(self.base_url, PATH_SUGGEST)
        r = requests.post(check_url, data={"token": text})
        if r.status_code != 200:
            raise SuperdeskApiError.internalError("Unexpected return code from Grammalecte")

        suggestions = r.json().get('suggestions', [])
        return {'suggestions': self.list2suggestions(suggestions)}
Exemplo n.º 34
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            auto_publish = updates.pop('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
            else:
                self._refresh_associated_items(original)
                self._publish_associations(original, id)
                updated = deepcopy(original)
                updated.update(updates)

                if updates.get('associations'):
                    self._refresh_associated_items(
                        updated)  # updates got lost with update

                if self.published_state != CONTENT_STATE.KILLED:
                    self._process_takes_package(original, updated, updates)

                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
                self.update_published_collection(
                    published_item_id=original[config.ID_FIELD],
                    updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish',
                              item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(
                                  original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(
                    str(e)))
        except Exception as e:
            logger.exception(
                "Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(
                message="Failed to publish the item: {}".format(str(e)))
Exemplo n.º 35
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected, Killed and TakeDown.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
            else:
                self._publish_associated_items(original, updates)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                if updates.get(ASSOCIATIONS):
                    self._refresh_associated_items(
                        updated,
                        skip_related=True)  # updates got lost with update

                signals.item_publish.send(self, item=updated)
                self._update_archive(original,
                                     updates,
                                     should_insert_into_versions=auto_publish)
                self.update_published_collection(
                    published_item_id=original[config.ID_FIELD],
                    updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish',
                              item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(
                                  original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(message=_(
                "Key is missing on article to be published: {exception}").
                                                    format(exception=str(e)))
        except Exception as e:
            logger.exception(e)
            raise SuperdeskApiError.internalError(
                message=_("Failed to publish the item: {id}").format(
                    id=str(id)),
                exception=e)
Exemplo n.º 36
0
 def update(self, provider):
     is_closed = provider.get('is_closed', False)
     if isinstance(is_closed, datetime):
         is_closed = False
     if is_closed:
         raise SuperdeskApiError.internalError('Ingest Provider is closed')
     else:
         try:
             return self._update(provider) or []
         except SuperdeskIngestError as error:
             self.close_provider(provider, error)
             raise error
 def _check_exists(self, id_or_filename, raise_error=True):
     try:
         obj = self.conn.get(id_or_filename, self.container_name)
         if obj.status_code not in (200, 201) and raise_error:
             message = "Retrieving file %s from amazon failed" % id_or_filename
             raise SuperdeskApiError.internalError(message)
         return (True, obj)
     except Exception as ex:
         if raise_error:
             logger.exception(ex)
         # File not found
         return (False, None)
 def get_bucket_objects(self, marker=None, bucket=None):
     """
     Fetch the objects available in the specified bucket.
     """
     bucket = bucket or self.container_name
     params = {"marker": marker}
     rv = self.conn.get_bucket_objects(bucket=bucket, extra_params=params)
     if rv.status_code not in (200, 201):
         message = "Retrieving the list of files from bucket %s failed" % bucket
         raise SuperdeskApiError.internalError(message)
     content = rv.content.decode("UTF-8")
     return content
Exemplo n.º 39
0
 def _get_response(self, resp, *status):
     json = json_util.loads(resp.text)
     if resp.status_code not in status:
         if resp.status_code == 400:
             raise SuperdeskApiError.badRequestError(message=json)
         if resp.status_code == 409:
             raise SuperdeskApiError.conflictError(message=json)
         if resp.status_code == 404:
             raise SuperdeskApiError.notFoundError(message=json)
         if resp.status_code == 500:
             raise SuperdeskApiError.internalError(message=json)
     return json
 def _check_exists(self, id_or_filename, raise_error=True):
     try:
         obj = self.conn.get(id_or_filename, self.container_name)
         if obj.status_code not in (200, 201) and raise_error:
             message = 'Retrieving file %s from amazon failed' % id_or_filename
             raise SuperdeskApiError.internalError(message)
         return (True, obj)
     except Exception as ex:
         if raise_error:
             logger.exception(ex)
         # File not found
         return (False, None)
 def get_bucket_objects(self, marker=None, bucket=None):
     '''
     Fetch the objects available in the specified bucket.
     '''
     bucket = bucket or self.container_name
     params = {'marker': marker}
     rv = self.conn.get_bucket_objects(bucket=bucket, extra_params=params)
     if rv.status_code not in (200, 201):
         message = 'Retrieving the list of files from bucket %s failed' % bucket
         raise SuperdeskApiError.internalError(message)
     content = rv.content.decode('UTF-8')
     return content
Exemplo n.º 42
0
 def update(self, provider):
     is_closed = provider.get('is_closed', False)
     if isinstance(is_closed, datetime):
         is_closed = False
     if is_closed:
         raise SuperdeskApiError.internalError('Ingest Provider is closed')
     else:
         try:
             return self._update(provider) or []
         except SuperdeskIngestError as error:
             self.close_provider(provider, error)
             raise error
Exemplo n.º 43
0
 def on_created(self, docs):
     """Send email to user with reset password token."""
     super().on_created(docs)
     resetService = get_resource_service('reset_user_password')
     activate_ttl = app.config['ACTIVATE_ACCOUNT_TOKEN_TIME_TO_LIVE']
     for doc in docs:
         if self.user_is_waiting_activation(doc):
             tokenDoc = {'user': doc['_id'], 'email': doc['email']}
             id = resetService.store_reset_password_token(tokenDoc, doc['email'], activate_ttl, doc['_id'])
             if not id:
                 raise SuperdeskApiError.internalError('Failed to send account activation email.')
             tokenDoc.update({'username': doc['username']})
             send_activate_account_email(tokenDoc, activate_ttl)
Exemplo n.º 44
0
 def on_created(self, docs):
     """Send email to user with reset password token."""
     super().on_created(docs)
     resetService = get_resource_service('reset_user_password')
     activate_ttl = app.config['ACTIVATE_ACCOUNT_TOKEN_TIME_TO_LIVE']
     for doc in docs:
         if self.user_is_waiting_activation(doc):
             tokenDoc = {'user': doc['_id'], 'email': doc['email']}
             id = resetService.store_reset_password_token(tokenDoc, doc['email'], activate_ttl, doc['_id'])
             if not id:
                 raise SuperdeskApiError.internalError('Failed to send account activation email.')
             tokenDoc.update({'username': doc['username']})
             send_activate_account_email(tokenDoc)
Exemplo n.º 45
0
 def enqueue_item(self, item):
     """
     Creates the corresponding entries in the publish queue for the given item
     """
     try:
         self._enqueue_item(item)
     except SuperdeskApiError as e:
         raise e
     except KeyError as e:
         raise SuperdeskApiError.badRequestError(
             message="Key is missing on article to be published: {}".format(str(e)))
     except Exception as e:
         logger.exception("Something bad happened while publishing %s".format(id))
         raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Exemplo n.º 46
0
 def create(self, docs, **kwargs):
     try:
         ids = []
         for doc in docs:
             doc['item'] = self.execute_macro(doc['item'], doc['macro'])
             if doc.get('commit'):
                 item = superdesk.get_resource_service('archive').find_one(req=None, _id=doc['item']['_id'])
                 updates = doc['item'].copy()
                 updates.pop('_id')
                 superdesk.get_resource_service('archive').update(item['_id'], updates, item)
             ids.append(doc['macro'])
         return ids
     except Exception as ex:
         raise SuperdeskApiError.internalError(str(ex))
Exemplo n.º 47
0
    def get_keywords(self, text):
        if not app.config['KEYWORDS_KEY_API']:
            raise SuperdeskApiError.notFoundError('AlchemyAPI key is not set')

        params = {'apikey': app.config['KEYWORDS_KEY_API'],
                  'outputMode': 'json'}

        url = app.config['KEYWORDS_BASE_URL'] + '/text/TextGetRankedNamedEntities' + \
            '?' + urllib.parse.urlencode(params)

        values = {'text': text}

        result = ""
        try:
            result = self._http.post(url, data=values)
        except Exception as ex:
            raise SuperdeskApiError.internalError('Fail to connect to Alchemy service', exception=ex)

        try:
            keywords = result.json()
            return keywords.get('entities', [])
        except Exception as ex:
            raise SuperdeskApiError.internalError('Fail to parse the response from Alchemy service', exception=ex)
Exemplo n.º 48
0
 def store_file(self, doc, content, filename, content_type):
     # retrieve file name and metadata from file
     file_name, content_type, metadata = process_file_from_stream(content, content_type=content_type)
     try:
         content.seek(0)
         file_id = doc['media_id']
         existing = app.media.get(doc['media_id'], self.datasource)
         if not existing:
             file_id = app.media.put(content, filename=file_name, content_type=content_type,
                                     resource=self.datasource, metadata=metadata, _id=ObjectId(doc['media_id']))
         doc['media'] = file_id
         doc['mime_type'] = content_type
         doc['filemeta'] = decode_metadata(metadata)
     except Exception as io:
         raise SuperdeskApiError.internalError('Saving file failed', exception=io)
Exemplo n.º 49
0
 def update(self, id, updates, original):
     archived_item = super().find_one(req=None, _id=id)
     try:
         if archived_item['type'] == 'composite':
             self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])
         user = get_user()
         updates[config.CONTENT_STATE] = 'published'
         item = self.backend.update(self.datasource, id, updates, original)
         push_notification('item:publish', item=str(item.get('_id')), user=str(user))
         return item
     except KeyError:
         raise SuperdeskApiError.badRequestError(message="A non-existent content id is requested to publish")
     except Exception as e:
         logger.error("Something bad happened while publishing %s".format(id), e)
         raise SuperdeskApiError.internalError(message="Failed to publish the item")
Exemplo n.º 50
0
def process_file_from_stream(content, content_type=None):
    content_type = content_type or content.content_type
    content = BytesIO(content.read())
    if 'application/' in content_type:
        content_type = magic.from_buffer(content.getvalue(), mime=True).decode('UTF-8')
        content.seek(0)
    file_type, ext = content_type.split('/')
    try:
        metadata = process_file(content, file_type)
    except OSError:  # error from PIL when image is supposed to be an image but is not.
        raise SuperdeskApiError.internalError('Failed to process file')
    file_name = get_file_name(content)
    content.seek(0)
    metadata = encode_metadata(metadata)
    metadata.update({'length': json.dumps(len(content.getvalue()))})
    return file_name, content_type, metadata
Exemplo n.º 51
0
    def enqueue_item(self, item, content_type=None):
        """Creates the corresponding entries in the publish queue for the given item

        :param item: Item to enqueue
        :param content_type: item content type
        :return bool: True if item is queued else false.
        """
        try:
            return self._enqueue_item(item, content_type)
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)), exception=e)
Exemplo n.º 52
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)
        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in ['published', 'killed', 'scheduled']:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # queue only text items
                any_channel_closed = self.queue_transmission(original)
                task = self.__send_to_publish_stage(original)
                if task:
                    updates['task'] = task

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item['task']['desk']), user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Exemplo n.º 53
0
def download_file_from_url(url):
    """Download file from given url.

    In case url is relative it will prefix it with current host.

    :param url: file url
    """
    try:
        rv = requests.get(url, timeout=15)
    except requests.exceptions.MissingSchema:  # any route will do here, we only need host
        rv = requests.get(urljoin(url_for('static', filename='x', _external=True), url), timeout=15)

    if rv.status_code not in (200, 201):
        raise SuperdeskApiError.internalError('Failed to retrieve file from URL: %s' % url)

    mime = magic.from_buffer(rv.content, mime=True)
    ext = str(mime).split('/')[1]
    name = str(ObjectId()) + ext
    return BytesIO(rv.content), name, str(mime)
Exemplo n.º 54
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
            else:
                self._refresh_associated_items(original)
                self._publish_associations(original, id)
                updated = deepcopy(original)
                updated.update(updates)

                if updates.get('associations'):
                    self._refresh_associated_items(updated)  # updates got lost with update

                if self.published_state != CONTENT_STATE.KILLED and not app.config.get('NO_TAKES', False):
                    self._process_takes_package(original, updated, updates)

                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish', item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Exemplo n.º 55
0
    def put(self, content, filename=None, content_type=None, metadata=None):
        """ Saves a new file using the storage system, preferably with the name
        specified. If there already exists a file with this name name, the
        storage system may modify the filename as necessary to get a unique
        name. Depending on the storage system, a unique id or the actual name
        of the stored file will be returned. The content type argument is used
        to appropriately identify the file when it is retrieved.
        """
        logger.debug('Going to save media file with %s ' % filename)
        found, existing_file = self._check_exists(filename, raise_error=False)
        if found:
            return filename

        try:
            file_metadata = self.transform_metadata_to_amazon_format(metadata)
            res = self.conn.upload(filename, content, self.container_name, content_type=content_type,
                                   headers=file_metadata)
            if res.status_code not in (200, 201):
                raise SuperdeskApiError.internalError('Uploading file to amazon S3 failed')
            return filename
        except Exception as ex:
            logger.exception(ex)
            raise
Exemplo n.º 56
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected, Killed and TakeDown.
        """
        try:
            user = get_user()
            auto_publish = updates.get('auto_publish', False)

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, updates)
                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
            else:
                self._publish_associated_items(original, updates, publish=True)
                updated = deepcopy(original)
                updated.update(deepcopy(updates))

                self._publish_associated_items(updated, updates)  # updates got lost with update

                self._update_archive(original, updates, should_insert_into_versions=auto_publish)
                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)

            from apps.publish.enqueue import enqueue_published
            enqueue_published.apply_async()

            push_notification('item:publish', item=str(id),
                              unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')))
        except SuperdeskApiError:
            raise
        except KeyError as e:
            logger.exception(e)
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e))
            )
        except Exception as e:
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(id)), exception=e)
Exemplo n.º 57
0
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)

        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in PUBLISH_STATES:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates['publish_schedule'] = None
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # check if item is in a digital package
                package_id = TakesPackageService().get_take_package_id(original)
                if package_id:
                    # process the takes to form digital master file content
                    package, package_updates = self.process_takes(take=original, package_id=package_id)
                    package_updates[config.CONTENT_STATE] = self.published_state
                    resolve_document_version(document=package_updates,
                                             resource=ARCHIVE, method='PATCH',
                                             latest_doc=package)
                    self.backend.update(self.datasource, package['_id'], package_updates, package)
                    package.update(package_updates)
                    insert_into_versions(doc=package)

                    # send it to the digital channels
                    any_channel_closed_digital, queued_digital = \
                        self.publish(doc=package, updates=None, target_output_channels=DIGITAL)

                    self.update_published_collection(published_item=package)
                else:
                    any_channel_closed_digital = False
                    queued_digital = False

                # queue only text items
                any_channel_closed_wire, queued_wire = \
                    self.publish(doc=original, updates=updates, target_output_channels=WIRE if package_id else None)

                any_channel_closed = any_channel_closed_digital or any_channel_closed_wire
                queued = queued_digital or queued_wire

                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item.get('task', {}).get('desk', '')),
                              user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))