def get_payload(self, obj): try: serializer = getMultiAdapter((obj, getRequest()), ISerializeToJson) except ComponentLookupError: logger.exception( 'Abort ElasticSearch Indexing for {0}'.format( obj.absolute_url(), ), ) query_blocker.unblock() return try: data = serializer() except ComponentLookupError: logger.exception( 'Abort ElasticSearch Indexing for {0}'.format( obj.absolute_url(), ), ) query_blocker.unblock() return self._reduce_data(data) if HAS_ARCHETYPES: self._fix_at_fields(obj, data) self._expand_rid(obj, data) self._expand_binary_data(obj, data) uid = api.content.get_uuid(obj) es_kwargs = dict( index=index_name(), doc_type='content', id=uid, pipeline=self._es_pipeline_name, body=data, request_timeout=es_config.request_timeout, ) return es_kwargs
def index(self, obj, attributes=None): start = time.time() query_blocker.block() es = get_ingest_client() if es is None: logger.warning('No ElasticSearch client available.', ) return self._check_for_ingest_pipeline(es) self._check_for_mapping(es) # will also create the index try: serializer = getMultiAdapter((obj, getRequest()), ISerializeToJson) except ComponentLookupError: logger.exception( 'Abort ElasticSearch Indexing for {0}'.format( obj.absolute_url(), ), ) return try: data = serializer() except ComponentLookupError: logger.exception( 'Abort ElasticSearch Indexing for {0}'.format( obj.absolute_url(), ), ) return logging.info("TOOK after serializing: {0:2.3f}".format(time.time() - start)) self._reduce_data(data) self._expand_rid(obj, data) self._expand_binary_data(obj, data) self._auto_mapping(es, obj, data) uid = api.content.get_uuid(obj) es_kwargs = dict( index=index_name(), doc_type='content', id=uid, pipeline=self._es_pipeline_name, body=data, ) parent = aq_parent(obj) portal = api.portal.get() if aq_base(portal) is aq_base(parent): self._check_and_add_portal_to_index(portal) # annotations = IAnnotations(portal) # es_kwargs['parent'] = annotations[ES_PORTAL_UUID_KEY] pass else: # es_kwargs['parent'] = api.content.get_uuid(parent) pass logging.info("TOOK after preprocessing: {0:2.3f}".format(time.time() - start)) try: es.index(**es_kwargs) except Exception: logger.exception( 'indexing of {0} failed.\n{1}'.format( uid, pformat(es_kwargs, indent=2), ), ) query_blocker.unblock() logging.info("TOOK overall: {0:2.3f}".format(time.time() - start))
def index(self, obj, attributes=None): query_blocker.block() es = get_ingest_client() if es is None: logger.warning('No ElasticSearch client available.', ) query_blocker.unblock() return try: self._check_for_ingest_pipeline(es) self._check_for_mapping(es) # will also create the index except TransportError: logger.exception( 'ElasticSearch connection failed for {0}'.format( obj.absolute_url(), ), ) query_blocker.unblock() return parent = aq_parent(obj) portal = api.portal.get() if aq_base(portal) is aq_base(parent): self._check_and_add_portal_to_index(portal) if es_config.use_celery: path = '/'.join([p for p in obj.getPhysicalPath() if p != '']) index_content.delay(path) else: es_kwargs = self.get_payload(obj) try: es.index(**es_kwargs) except Exception: uid = api.content.get_uuid(obj) logger.exception('indexing of {0} failed.'.format(uid, ), ) import Globals if Globals.DevelopmentMode: logger.debug(pformat(es_kwargs, indent=2)) query_blocker.unblock()