Beispiel #1
0
    def on_create(self, docs):
        """Create corresponding item on file upload."""

        for doc in docs:
            if 'media' not in doc or doc['media'] is None:
                abort(400, description="No media found")

            file, content_type, metadata = self.get_file_from_document(doc)
            inserted = [doc['media']]
            file_type = content_type.split('/')[0]

            self._set_metadata(doc)

            try:
                doc[ITEM_TYPE] = self.type_av.get(file_type)
                doc[ITEM_STATE] = CONTENT_STATE.PROGRESS
                rendition_spec = get_renditions_spec(no_custom_crops=True)
                with timer('archive:renditions'):
                    renditions = generate_renditions(file, doc['media'], inserted, file_type,
                                                     content_type, rendition_spec, url_for_media)
                doc['renditions'] = renditions
                doc['mimetype'] = content_type
                set_filemeta(doc, metadata)

                add_activity('upload', 'uploaded media {{ name }}',
                             'archive', item=doc,
                             name=doc.get('headline', doc.get('mimetype')),
                             renditions=doc.get('renditions'))
            except Exception as io:
                logger.exception(io)
                for file_id in inserted:
                    delete_file_on_error(doc, file_id)
                abort(500)
Beispiel #2
0
 def create(self, docs, **kwargs):
     with multiprocessing.Lock() as lock:
         with timer("prepopulate"):
             self._create(docs)
         if app.config.get("SUPERDESK_TESTING"):
             for provider in ["paimg", "aapmm"]:
                 if provider not in allowed_search_providers:
                     register_search_provider(provider, provider)
         return ["OK"]
Beispiel #3
0
 def create(self, docs, **kwargs):
     with multiprocessing.Lock() as lock:
         with timer('prepopulate'):
             self._create(docs)
         if app.config.get('SUPERDESK_TESTING'):
             for provider in ['paimg', 'aapmm']:
                 if provider not in allowed_search_providers:
                     register_search_provider(provider, provider)
         return ['OK']
Beispiel #4
0
    def find(self, query, params=None):
        if params is None:
            params = {}

        size = 25  # int(query.get('size', 25))
        page = math.ceil((int(query.get('from', 0)) + 1) / size)
        try:
            sort = query.get('sort')[0]
        except (IndexError, AttributeError, TypeError):
            sort = {'versioncreated': 'desc'}

        kwargs = {
            'pagenumber': page,
            'countperpage': size,
            'fields': ','.join(self.FIELDS),
            'Sort': get_api_sort(sort),
            'format': 'json',
            'DateFormat': 'u',
        }

        query_components = {}

        try:
            query_components['Text'] = query['query']['filtered']['query'][
                'query_string']['query']
        except (KeyError, TypeError):
            pass

        if params:
            if params.get('mediaTypes'):
                selected = [k for k, v in params['mediaTypes'].items() if v]
                if selected:
                    query_components['MediaType'] = '({})'.format(
                        ' OR '.join(selected))

        kwargs['query'] = ' '.join([
            '{}:{}'.format(key, val) for key, val in query_components.items()
            if val
        ])

        if params:
            for param, op in (('from', '>:'), ('to', '<:')):
                if params.get(param):
                    kwargs['query'] = '{} MediaDate{}{}'.format(
                        kwargs['query'], op, params[param]).strip()

        with timer('orange'):
            resp = self._auth_request(SEARCH_API, **kwargs)
            data = resp.json()

        with open('/tmp/resp.json', mode='w') as out:
            out.write(json.dumps(data, indent=2))

        items = self._parse_items(data)
        return OrangelogicListCursor(
            items, data['APIResponse']['GlobalInfo']['TotalCount'])
Beispiel #5
0
 def _login(self):
     with timer('orange.login'):
         resp = self._request(
             AUTH_API,
             method='POST',
             Login=self.config.get('username'),
             Password=self.config.get('password'),
             format='json',
         )
     tokens[self.config['username']] = resp.json()['APIResponse']['Token']
Beispiel #6
0
 def _login(self):
     with timer("orange.login"):
         resp = self._request(
             AUTH_API,
             method="POST",
             Login=self.config.get("username"),
             Password=self.config.get("password"),
             format="json",
         )
     tokens[self.config["username"]] = resp.json()["APIResponse"]["Token"]
    def on_create(self, docs):
        """Create corresponding item on file upload."""

        for doc in docs:
            if 'media' not in doc or doc['media'] is None:
                abort(400, description="No media found")
            # check content type of video by python-magic
            content_type = magic.from_buffer(doc['media'].read(1024),
                                             mime=True)
            doc['media'].seek(0)
            file_type = content_type.split('/')[0]
            if file_type == 'video' and app.config.get("VIDEO_SERVER_ENABLE"):
                if not self.videoEditor.check_video_server():
                    raise SuperdeskApiError(
                        message="Cannot connect to videoserver",
                        status_code=500)
                # upload media to video server
                res, renditions, metadata = self.upload_file_to_video_server(
                    doc)
                # get thumbnails for timeline bar
                self.videoEditor.get_timeline_thumbnails(doc.get('media'), 40)
            else:
                file, content_type, metadata = self.get_file_from_document(doc)
                inserted = [doc['media']]
                # if no_custom_crops is set to False the custom crops are generated automatically on media upload
                # see (SDESK-4742)
                rendition_spec = get_renditions_spec(
                    no_custom_crops=app.config.get("NO_CUSTOM_CROPS"))
                with timer('archive:renditions'):
                    renditions = generate_renditions(file, doc['media'],
                                                     inserted, file_type,
                                                     content_type,
                                                     rendition_spec,
                                                     url_for_media)
            try:
                self._set_metadata(doc)
                doc[ITEM_TYPE] = self.type_av.get(file_type)
                doc[ITEM_STATE] = CONTENT_STATE.PROGRESS
                doc['renditions'] = renditions
                doc['mimetype'] = content_type
                set_filemeta(doc, metadata)
                add_activity('upload',
                             'uploaded media {{ name }}',
                             'archive',
                             item=doc,
                             name=doc.get('headline', doc.get('mimetype')),
                             renditions=doc.get('renditions'))
            except Exception as io:
                logger.exception(io)
                for file_id in inserted:
                    delete_file_on_error(doc, file_id)
                if res:
                    self.videoEditor.delete(res.get('_id'))
                abort(500)
Beispiel #8
0
    def on_create(self, docs):
        """Create corresponding item on file upload."""

        for doc in docs:
            if "media" not in doc or doc["media"] is None:
                abort(400, description="No media found")
            # check content type of video by python-magic
            content_type = app.media._get_mimetype(doc["media"])
            doc["media"].seek(0)
            file_type = content_type.split("/")[0]
            if file_type == "video" and app.config.get("VIDEO_SERVER_ENABLED"):
                # upload media to video server
                res, renditions, metadata = self.upload_file_to_video_server(
                    doc)
                # get thumbnails for timeline bar
                self.video_editor.create_timeline_thumbnails(
                    doc.get("media"), 60)
            else:
                file, content_type, metadata = self.get_file_from_document(doc)
                inserted = [doc["media"]]
                # if no_custom_crops is set to False the custom crops are generated automatically on media upload
                # see (SDESK-4742)
                rendition_spec = get_renditions_spec(
                    no_custom_crops=app.config.get("NO_CUSTOM_CROPS"))
                with timer("archive:renditions"):
                    renditions = generate_renditions(file, doc["media"],
                                                     inserted, file_type,
                                                     content_type,
                                                     rendition_spec,
                                                     url_for_media)
            try:
                self._set_metadata(doc)
                doc[ITEM_TYPE] = self.type_av.get(file_type)
                doc[ITEM_STATE] = CONTENT_STATE.PROGRESS
                doc["renditions"] = renditions
                doc["mimetype"] = content_type
                set_filemeta(doc, metadata)
                add_activity(
                    "upload",
                    "uploaded media {{ name }}",
                    "archive",
                    item=doc,
                    name=doc.get("headline", doc.get("mimetype")),
                    renditions=doc.get("renditions"),
                )
            except Exception as io:
                logger.exception(io)
                for file_id in inserted:
                    delete_file_on_error(doc, file_id)
                if res:
                    self.video_editor.delete(res.get("_id"))
                abort(500)
Beispiel #9
0
 def _auth_request(self, api, **kwargs):
     repeats = 2
     while repeats > 0:
         if not self.token:
             self._login()
         try:
             kwargs['token'] = self.token
             with timer('orange.request'):
                 return self._request(api, **kwargs)
         except HTTPError as err:
             logger.error(err)
             self._login()  # auth error
             repeats -= 1
             if repeats == 0:
                 raise
Beispiel #10
0
    def get_file_from_document(self, doc):
        file = doc.get("media_fetched")
        if file:
            del doc["media_fetched"]
        else:
            content = doc["media"]
            res = process_file_from_stream(content, content_type=content.mimetype)
            file_name, content_type, metadata = res
            logger.debug("Going to save media file with %s " % file_name)
            content.seek(0)
            with timer("media:put.original"):
                doc["media"] = app.media.put(content, filename=file_name, content_type=content_type, metadata=metadata)
            return content, content_type, decode_metadata(metadata)

        return file, file.content_type, file.metadata
Beispiel #11
0
    def _do_request(self, doc):
        desk_id = request.view_args["desk_id"]
        agg_type = request.view_args["agg_type"]
        timer_label = f"{agg_type} overview aggregation {desk_id!r}"
        if agg_type == "users":
            with timer(timer_label):
                doc["_items"] = self._users_aggregation(desk_id)
            return

        if agg_type == "stages":
            collection = "archive"
            desk_field = "task.desk"
            key = "stage"
            field = f"task.{key}"
        elif agg_type == "assignments":
            collection = "assignments"
            desk_field = "assigned_to.desk"
            key = "state"
            field = f"assigned_to.{key}"
        else:
            raise ValueError(f"Invalid overview aggregation type: {agg_type}")

        agg_query = {
            "filter": {
                "bool": {
                    "must_not": [{
                        "terms": {
                            ITEM_STATE: [
                                CONTENT_STATE.PUBLISHED,
                                CONTENT_STATE.SPIKED,
                                CONTENT_STATE.PUBLISHED,
                                CONTENT_STATE.KILLED,
                                CONTENT_STATE.CORRECTED,
                            ]
                        }
                    }]
                }
            }
        }
        filter_bool = agg_query["filter"]["bool"]

        if desk_id != "all":
            filter_bool["must"] = [{"term": {desk_field: desk_id}}]

        agg_query["aggs"] = {"overview": {"terms": {"field": field}}}

        filters = doc.get("filters")
        if filters:
            should = []
            filter_bool.setdefault("must",
                                   []).append({"bool": {
                                       "should": should
                                   }})
            for f_name, f_data in filters.items():
                for text in f_data:
                    should.append({"match": {f_name: text}})

            # with filters we need whole documents, we get them with top_hits
            agg_query["aggs"]["overview"]["aggs"] = {
                "top_docs": {
                    "top_hits": {
                        "size": 100
                    }
                }
            }

        with timer(timer_label):
            response = app.data.elastic.search(agg_query,
                                               collection,
                                               params={"size": 0})

        doc["_items"] = [{
            "count": b["doc_count"],
            key: b["key"]
        } for b in response.hits["aggregations"]["overview"]["buckets"]]

        if filters:
            for idx, bucket in enumerate(
                    response.hits["aggregations"]["overview"]["buckets"]):
                docs = doc["_items"][idx]["docs"] = []
                for hit_doc in bucket["top_docs"]["hits"]["hits"]:
                    docs.append(hit_doc["_source"])
 def test_timer(self, logger):
     with timer('foo'):
         time.sleep(0.1)
     logger.info.assert_called_once_with('%s: %.3fms', 'foo', ANY)
     self.assertAlmostEqual(0.1 * 1000, logger.info.call_args[0][2], 0)
Beispiel #13
0
 def test_timer(self, logger):
     with timer("foo"):
         time.sleep(0.1)
     logger.info.assert_called_once_with("%s: %.3fms", "foo", ANY)
     self.assertEqual(100, int(logger.info.call_args[0][2]))