def _get_parsed_data(self): # Error will be set to true if we encounter an error parsed_data = dict(raw=[], error=False, data=[]) source = ElasticsearchSource.objects.get(name=self.source.name) multisearch = MultiSearch() if source.max_concurrent_searches is not None: multisearch.params( max_concurrent_searches=source.max_concurrent_searches) for query in json.loads(self.queries): multisearch = multisearch.add( Search.from_dict(query).params(ignore_unavailable=True, allow_no_indices=True)) try: responses = multisearch.using(source.client).index( source.index).execute() for response in responses: raw_data = response.to_dict() parsed_data['raw'].append(raw_data) if raw_data['hits']['hits'] == []: continue self._check_response_size(raw_data) data = self._parse_es_response([raw_data['aggregations']]) if data == []: continue parsed_data['data'].extend(data) except Exception as e: logger.exception( 'Error executing Elasticsearch queries: {}'.format( self.queries)) parsed_data['error_code'] = type(e).__name__ parsed_data['error_message'] = six.text_type(e) parsed_data['error'] = True return parsed_data