import hashlib
import json
import os

from elasticsearch import Elasticsearch

from shared.utils import read_data_from_json
from shared.LoggerFactory import LoggerFactory

ROOT_DIR = os.environ['ROOT_DIR']
client = Elasticsearch([{'host': '127.0.0.1', 'port': 9200}])
logger = LoggerFactory.get_elastic_logger()


def run_elastic_upload():
    logger.debug("run_elastic_upload")
    logger.info("Starting Index Process!")

    index = 'posts'
    if client.indices.exists(index=index):
        client.indices.delete(index=index, ignore=[400, 404])

    request_body = {
        'mappings': {
            'properties': {
               'geo_location': {'type': 'geo_point'},
            }}
    }

    client.indices.create(index=index, body=request_body)
    logger.info("Finished Indexing!")
Пример #2
0
import os

ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
os.environ['ROOT_DIR'] = ROOT_DIR

from upload_to_elasticsearch.elastic import run_elastic_upload
from shared.LoggerFactory import LoggerFactory
from data_management.DataManager import DataManager

LoggerFactory.get_elastic_logger().info("running elastic upload")
# starts the process of selecting the files to upload to elastic search
DataManager.run_compose_upload_process()
# execute the upload to elastic search
run_elastic_upload()