コード例 #1
0
     def put(self,data,item_id=None,type="doc",source=None):
         """ add an object to storage
         "link" field inside "data" object is used to generate "Unique Idenfifier" for the object.
         This field is mandatory for all objects of any types put in the index.
         :params dic data: data for object creation
         :params string item_id: unique identifier for the object
         :params string type: type of object **source** or **doc****source** or **doc**
         :params string source: parent unique identifier (mandatory for type doc, it's source id)
         :returns: elasticsearch object
         """
         results=Results(self.logger,1,str(inspect.stack()[0][1])+"."+str(inspect.stack()[0][3]))

         if item_id is None:
             result_uuid=self.generate_uuid(data)
             item_id=result_uuid[0]
             results.add_success(result_uuid[1])

         try:
             if source is not None:
                    data['origin']=source
                    result=self.es.index(index=self.index,doc_type=type,id=item_id,parent=source,body=json.dumps(data,default=self.serializer.to_json),ignore=[400,404,409])
                    results.add_success(result["_id"])
             else:
                    result=self.es.index(index=self.index,doc_type=type,id=item_id,body=json.dumps(data,default=self.serializer.to_json),ignore=[400,404,409])
                    results.add_success(result["_id"])
         except (TransportError,ConnectionError, ConnectionTimeout,RequestError) as e:
             results.add_error(e)

         results.finish()
         return results.results
コード例 #2
0
ファイル: storage.py プロジェクト: geonux/cybersecurity-dfm
    def bulk(self, doc_list):
        """ Simple elasticsearch bulk  wrapper

        :params doc doc_list of elasticsearch docs to update
        :returns: elasticsearch bulk result
        """
        results = Results(
            self.logger, len(doc_list),
            str(inspect.stack()[0][1]) + "." + str(inspect.stack()[0][3]))
        ready_doc_list = []
        for doc in doc_list:
            if "origin" in doc:
                doc['_routing'] = doc['origin']
                doc['_parent'] = doc['origin']
            if "_index" not in doc:
                doc['_index'] = self.index
            if "_type" not in doc:
                doc['_type'] = 'doc'
            if "_id" not in doc:
                if "link" in doc:
                    result_uuid = self.generate_uuid(doc)
                    doc['_id'] = result_uuid[0]
                    results.add_success(result_uuid[0])
                else:
                    result_uuid = self.generate_uuid(doc["doc"])
                    doc['_id'] = result_uuid[0]
                    results.add_success(result_uuid[0])
            if "doc" in doc:
                #json serialize date with special date parser otherwise ES index fail
                doc["_source"] = json.loads(
                    json.dumps(doc["doc"], default=self.serializer.to_json))
                #remove source from doc
                doc.pop("doc")

            ready_doc_list.append(doc)
        try:
            for result in helpers.parallel_bulk(self.es, ready_doc_list):
                if int(result[1]["index"]["status"]) >= 200 and int(
                        result[1]["index"]["status"]) < 300:
                    results.add_success(result)
                else:
                    results.add_fail(result)
        except Exception as e:
            results.add_error(e)
        del ready_doc_list, doc_list
        results.finish()
        gc.collect()
        return results.results