def upsert_doc_to_elastic( elastic_object, message_type, upsert_body, request_id, index_name ): log_mapping.get_log_metadata(elastic_object, message_type, upsert_body, request_id, index_name) upsert_doc = { "doc_as_upsert": True, "doc": upsert_body, } new_content = elastic_object.update( index=index_name, doc_type=log_helper.DOC_TYPE_NAME, id=request_id, body=upsert_doc, retry_on_conflict=3, refresh=True, timeout="60s", ) print( "upserted to doc " + index_name + "/" + (log_helper.DOC_TYPE_NAME if log_helper.DOC_TYPE_NAME != None else "_doc") + "/" + request_id + " adding " + message_type ) sys.stdout.flush() return str(new_content)
def bulk_upsert_doc_to_elastic( elastic_object: Elasticsearch, message_type, doc_body, new_content_part, request_id, index_name ): log_mapping.get_log_metadata(elastic_object, message_type, doc_body, request_id, index_name) no_items_in_batch = len(new_content_part["instance"]) elements = None if "elements" in new_content_part: elements = new_content_part["elements"] def gen_data(): for num, item in enumerate(new_content_part["instance"], start=0): item_body = doc_body.copy() item_body[message_type]["instance"] = item if type(elements) == type([]) and len(elements) > num: item_body[message_type]["elements"] = elements[num] item_request_id = build_request_id_batched( request_id, no_items_in_batch, num ) print( "bulk upserting to doc " + index_name + "/" + (log_helper.DOC_TYPE_NAME if log_helper.DOC_TYPE_NAME != None else "_doc") + "/" + request_id + " adding " + message_type ) yield { "_index": index_name, "_type": log_helper.DOC_TYPE_NAME, "_op_type": "update", "_id": item_request_id, "_source": {"doc_as_upsert": True, "doc": item_body}, } helpers.bulk( elastic_object, gen_data(), refresh=True )