Example #1
0
 def get_permission(self, user_id, file_ids):
     query_conditions = query.Bool(must=[
         query.Terms(file_id=file_ids),
         query.Bool(should=[
             query.Term(owner={
                 'value': user_id,
                 'boost': 100
             }),
             query.Bool(must=[
                 query.Term(share_mode={
                     'value': 1,
                     'boost': 5
                 }),
                 query.Term(users_shared={
                     'value': user_id,
                     'boost': 5
                 })
             ]),
             query.Term(share_mode=2)
         ])
     ])
     file_es = Search() \
         .query(query_conditions) \
         .source(['owner', 'share_mode', 'editable'])
     file_es = file_es[0:1]
     print(json.dumps(file_es.to_dict()))
     responses = file_es.using(self.es).index(self._index).execute()
     return responses
    def search(self, doc_type, query=""):
        """
        Execute search query and retrive results

        :param doc_type: Type in ElasticSearch
        :param query: search query
        :return: list with results
        """
        results = []
        if type(query) in [str, unicode] and type(doc_type) == DocTypeMeta:
            q = Q("multi_match",
                  query=query.lower(),
                  fields=["title"])

            s = Search()
            s = s.using(self.client)
            s = s.index(self.index_name)
            s = s.doc_type(doc_type)
            s = s.query(q)
            print "search query: " + str(s.to_dict())

            response = s.execute()

            for resp in response:
                results.append(resp)
        return results
Example #3
0
    def _search(self, query, first_doc = 0, num_docs = 10):
        '''
        Envía la request a elasticsearch.
        :param request: Es la request a envíar, una instancia preparada de la clase
        elasticsearch_dsl.Search
        :param first_doc Es un índice que indica el primer documento a devolver (sirve para páginar
        los resultados)
        :param num_docs Es el número de documentos a devolver
        :return: Si la request fue ejecutada con éxito, devuelve un listado de los documentos
        obtenidos con la query (con información y metainformación del documento en forma de
        diccionario)
        En caso contrario, si la request fallo, se genera una excepción

        Solo se buscarán documentos en el índice "shokesu" del tipo "posts"
        '''

        request = Search(index = 'shokesu', doc_type = 'posts')
        request = request[first_doc:first_doc+num_docs]
        while True:
            try:
                result = request.using(client = self.client).query(query).execute(ignore_cache = False)
                if not result.success():
                    raise Exception()
                break
            except ConnectionTimeout:
                pass

        data = result.to_dict()
        docs = data['hits']['hits']
        return docs
Example #4
0
 def query_to_check_duplicate_when_upload_or_create(self, folder_id, name):
     conditions = query.Bool(filter=[
         query.Term(parent_id={'value': folder_id}),
         query.Term(file_title__raw={'value': name})
     ])
     file_es = Search() \
         .query(conditions)
     file_es = file_es[0:1]
     responses = file_es.using(self.es).index(self._index).execute()
     return responses
Example #5
0
def elasticsearch_topic_search_content(keyword):
    from elasticsearch import Elasticsearch
    from elasticsearch_dsl import Search
    client = Elasticsearch()
    s = Search(using=client)
    s = s.using(client)
    s = Search().using(client).query("match", title=keyword)
    response = s.execute()
    for hit in s:
        print(hit.title)
    return
Example #6
0
    def get_index_version_counts(self, index, search=None):
        """
        Given an index, return a list of dicts each containing a version and a count of the number
        of records that were changed in that version. The dict is structure like so:

            {
                "version": <version>,
                "changes": <number of changes>
            }

        The returned list is sorted in ascending order by version. If the search argument is
        provided then the versions and counts returned will be limited to the versions covered by
        the search.

        :param index: the prefixed index
        :param search: a Search object, optional
        :return: a list of dicts of version and changes count data
        """
        versions = []
        # if there is no search passed in, make our own
        if search is None:
            search = Search()
        # [0:0] ensures we don't waste time by getting hits back
        search = search.using(self.client).index(index)[0:0]
        # create an aggregation to count the number of records in the index at each version
        search.aggs.bucket(u'versions', u'composite', size=1000,
                           sources={u'version': A(u'terms', field=u'meta.version', order=u'asc')})
        while True:
            # run the search and get the result, ignore_cache makes sure that calling execute gives
            # us back new data from the backend. We need this because we just sneakily change the
            # after value in the aggregation without generating a new search object
            result = search.execute(ignore_cache=True).aggs.to_dict()[u'versions']

            # iterate over the results
            for bucket in result[u'buckets']:
                versions.append({
                    u'version': bucket[u'key'][u'version'],
                    u'changes': bucket[u'doc_count']
                })

            # retrieve the after key for pagination if there is one
            after_key = result.get(u'after_key', None)
            if after_key is None:
                # if there isn't then we're done
                break
            else:
                # otherwise apply it to the aggregation
                search.aggs[u'versions'].after = after_key

        return versions
Example #7
0
def search_log(params):
    level = params.get('level', None)
    page = params.get('currentPage', None)
    keywords = params.get('keywords', None)
    limit_days = params.get('limit_days', None)

    size = 10
    time_start = get_start_time(limit_days)
    time_end = get_time_now()
    index = get_index()

    client = get_es_client()
    s = Search(using=client, index=index)
    s = s.using(client)
    if level and not level == 'all':
        s = s.query("match", levelname=level)
    s = s.query("range", timestamp={"gte": time_start, "lte": time_end})
    if page and size:
        s = s[(page - 1) * size:page * size]
    if keywords:
        s = s.query("match", message=keywords)
    s = s.sort({"timestamp": {"order": "desc"}})
    print(s.to_dict())
    s.execute()
    items = []
    for hit in s:
        items.append(
            {
                "name": hit.name,
                "level": hit.levelname,
                "timestamp": format_tz_time(hit.timestamp),
                "filename": hit.filename,
                "funcName": hit.funcName,
                "lineno": hit.lineno,
                "message": hit.message,
                "host_ip": hit.host_ip,
                "exc_text": hit.exc_text
            }
        )
    print(len(items))
    return {
        "items": items,
        "total": s.count()
    }
Example #8
0
def get_hooks():
    client = MongoClient("mongodb://localhost:27017")
    database = client["api"]
    collection = database["notifications"]
    payload = json.dumps(
        {"text": "Some problems occured! sorry, try again soon"})
    cursor = collection.find({})
    try:
        for doc in cursor:
            text = (doc['text'])
            apis = [
                x.strip() for x in text.split(',')
            ]  #gets an array for each API name entered by notify me command
            res_url = (doc['response_url'])
            #ELK Request
            if len(apis) >= 0:
                #         req_url = "http://35.244.98.50:9200/question/so/_search?q=api:"+apis[0]
                #elk_data = json.dumps({"query":{"bool":{"must":[ {"match":{"api":apis[0]}}]}}})
                #        elk = requests.get(req_url,{})
                #       print(elk, elk.text)
                from elasticsearch import Elasticsearch
                from elasticsearch_dsl import Search
                client = Elasticsearch()
                s = Search(using=client)
                s = s.using(client)
                s = Search().using(client).query("match", api=apis[0])
                response = s.execute()
                msg = "*here are few posts*:\n"
                for hit in s:
                    msg = msg + hit.title + '\n' + "<https://stackoverflow.com/questions/" + str(
                        hit.question_id) + "/> \n"
            else:
                msg = "nothing found"

            payload = json.dumps({"text": msg})
            data = payload.json()
            r = requests.post('http://35.244.98.50:9200/question/so/_search',
                              json=data)
            print(r.text, res_url, payload)
    except KeyError as error:
        print(error)
    return
Example #9
0
def audit_cget(request,
               date_from=None,
               date_to=None,
               user=None,
               order='desc',
               limit=None):
    s = Search(using=request.env.audit.es,
               index="%s-*" % (request.env.audit.audit_es_index_prefix, ))
    s = s.using(request.env.audit.es)
    s = s.sort('%s%s' % ({'asc': '', 'desc': '-'}[order], '@timestamp'))

    if user is not None and user != '__all':
        if user == '__non_empty':
            s = s.query(Q('exists', **{'field': 'user.keyname'}))
        else:
            s = s.query(Q('term', **{'user.keyname': user}))

    if date_from is None and date_to is None:
        return []

    if date_from is not None and date_from != '':
        s = s.query(Q('range', **{'@timestamp': {'gte': date_from}}))

    if date_to is not None and date_to != '':
        s = s.query(Q('range', **{'@timestamp': {'lte': date_to}}))

    def hits(chunk_size=100, limit=None):
        response = s.execute()
        total = response.hits.total.value

        if limit is not None:
            total = min(total, limit)
            chunk_size = min(chunk_size, limit)

        nchunks = int(ceil(total / chunk_size))
        for nchunk in range(nchunks):
            for hit in s[nchunk * chunk_size:(nchunk + 1) * chunk_size]:
                yield hit

    return hits(limit=limit)
Example #10
0
def _es_search(index: str = None,
               body: dict = None,
               search: Search = None,
               timeout: int = "1m") -> ElasticsearchResponse:
    error_template = "[ERROR] ({type}) with ElasticSearch cluster: {e}"
    result = None
    try:
        if search is not None:
            result = search.using(CLIENT).params(timeout=timeout).execute()
        else:
            result = CLIENT.search(index=index, body=body, timeout=timeout)
    except NameError as e:
        logger.error(error_template.format(type="Hostname", e=str(e)))
    except (ConnectionError, ConnectionTimeout) as e:
        logger.error(error_template.format(type="Connection", e=str(e)))
    except NotFoundError as e:
        logger.error(error_template.format(type="404 Not Found", e=str(e)))
    except TransportError as e:
        logger.error(error_template.format(type="Transport", e=str(e)))
    except Exception as e:
        logger.error(error_template.format(type="Generic", e=str(e)))
    return result
Example #11
0
def get_hooks():
    p = {"channel": "C1H9RESGL", "blocks": [{"type": "section","text": {"type": "mrkdwn","text": "Danny Torrence left the following review for your property:"}},{"type": "section","block_id": "section567","text": {"type": "mrkdwn","text": "<https://google.com|Overlook Hotel> \n :star: \n Doors had too many axe holes, guest in room 237 was far too rowdy, whole place felt stuck in the 1920s."}}]}
    client = MongoClient("mongodb://localhost:27017")
    database = client["api"]
    collection = database["notifications"]
    payload =json.dumps({"text":"Some problems occured! sorry, try again soon"})
    cursor = collection.find({})
    try:
     for doc in cursor:
       text = (doc['text'])
       apis = [x.strip() for x in text.split(',')] #gets an array for each API name entered by notify me command
       res_url = (doc['response_url'])
       #ELK Request
       if len(apis) >=0:
#         req_url = "http://35.244.98.50:9200/question/so/_search?q=api:"+apis[0]   
         #elk_data = json.dumps({"query":{"bool":{"must":[ {"match":{"api":apis[0]}}]}}})
 #        elk = requests.get(req_url,{})
  #       print(elk, elk.text)
           from elasticsearch import Elasticsearch
           from elasticsearch_dsl import Search
           client = Elasticsearch()
           s = Search(using=client)
           s = s.using(client)
           s = Search().using(client).query("match", api =apis[0])
           response = s.execute()
           msg = "*here are few posts*:\n"
           for hit in s:
               msg = msg + hit.title + '\n' + "<https://stackoverflow.com/questions/"+str(hit.question_id)+"/> \n"
       else:
            msg= "nothing found"

       payload =json.dumps(p)
       r = requests.post(url = res_url, data = payload)
       print(r.text,res_url, payload)
    except KeyError as error:
        print(error)
    return