Beispiel #1
0
async def main(config):
    log.info("Starting up")
    client = ElasticSearchClient(hosts=config["elastic_uri"])
    now = datetime.now()
    #  We keep data for up to 30 days, so we should delete it at 29 days
    delete_time = timedelta(days=29)

    async with client as db:
        logs_ids = []
        async for model in db._scroll(index="logs_str",
                                      body={"_source": "time"}):
            time_created = datetime.fromisoformat(model["_source"]["time"])
            if (now - time_created) > delete_time:
                logs_ids.append(model["_id"])
        await db.bulk_delete(index="logs_str", ids=logs_ids)
        del logs_ids

        message_ids = []
        async for model in db._scroll(index="guild_message_str",
                                      body={"_source": False}):
            id = model["_id"]
            time_created = datetime.utcfromtimestamp(
                ((int(id) >> 22) + DISCORD_EPOCH) / 1000)
            if (now - time_created) > delete_time:
                message_ids.append(id)
        await db.bulk_delete(index="guild_message_str", ids=message_ids)
        del message_ids
    log.info("Stopping")
Beispiel #2
0
            doc['Value'] = value

            doc['Unit'] = data['unit']
            doc['Description'] = data['description']
            doc['location'] = {'lat': CORK_LAT, 'lon': CORK_LON}
            id_ = variable + time
            print(doc)
            res = es.insert_doc(doc, id_=id_)
            print(res)


load_dotenv()
es = ElasticSearchClient(
    os.getenv('ES_HOST'),
    os.getenv('ES_PORT'),
    use_ssl=os.getenv('ES_USE_SSL', False),
    verify_certs=os.getenv('ES_VERIFY_CERTS', False),
    http_auth=(os.getenv('ES_USER'),
               os.getenv('ES_PASSWORD')) if os.getenv('ES_USER') else None,
    ca_certs=os.getenv('ES_CA_CERTS', None))

geo_point_mapping = es.define_geo_point_mapping()

es.create_index(ELASTICSEARCH_INDEX, geo_point_mapping)

kafka_consumer = KafkaConsumer(
    KAFKA_TOPIC,
    bootstrap_servers=[
        "{}:{}".format(os.getenv('KAFKA_HOST'), os.getenv('KAFKA_PORT'))
    ],
    # auto_offset_reset='earliest',
    security_protocol=os.getenv('KAFKA_SECURITY_PROTOCOL', 'PLAINTEXT'),
Beispiel #3
0
"""

import os
import json
from kafka import KafkaConsumer
from elastic import ElasticSearchClient, RequestError
from dotenv import load_dotenv
from constants import *

load_dotenv()

es = ElasticSearchClient(
    os.getenv('ES_HOST'),
    os.getenv('ES_PORT'),
    use_ssl=os.getenv('ES_USE_SSL', False),
    verify_certs=os.getenv('ES_VERIFY_CERTS', False),
    http_auth=(os.getenv('ES_USER'),
               os.getenv('ES_PASSWORD')) if os.getenv('ES_USER') else None,
    ca_certs=os.getenv('ES_CA_CERTS', None),
    timeout=60)

geo_point_mapping = es.define_custom_geo_shape_mapping("geometry")

es.create_index(ELASTICSEARCH_INDEX, geo_point_mapping)

kafka_consumer = KafkaConsumer(
    KAFKA_TOPIC,
    bootstrap_servers=[
        "{}:{}".format(os.getenv('KAFKA_HOST'), os.getenv('KAFKA_PORT'))
    ],
    # auto_offset_reset='earliest',
Beispiel #4
0
load_dotenv()

producer = KafkaProducer(
    bootstrap_servers=[
        "{}:{}".format(os.getenv('KAFKA_HOST'), os.getenv('KAFKA_PORT'))
    ],
    security_protocol=os.getenv('KAFKA_SECURITY_PROTOCOL', 'PLAINTEXT'),
    ssl_cafile=os.getenv('KAFKA_CA_FILE', None),
    ssl_certfile=os.getenv('KAFKA_CERT_FILE', None),
    ssl_keyfile=os.getenv('KAFKA_KEY_FILE', None),
    value_serializer=lambda m: json.dumps(m).encode('utf8'))

es = ElasticSearchClient(
    os.getenv('ES_HOST'),
    os.getenv('ES_PORT'),
    use_ssl=os.getenv('ES_USE_SSL', False),
    verify_certs=os.getenv('ES_VERIFY_CERTS', False),
    http_auth=(os.getenv('ES_USER'),
               os.getenv('ES_PASSWORD')) if os.getenv('ES_USER') else None,
    ca_certs=os.getenv('ES_CA_CERTS', None))

if es.es.count(index=ELASTICSEARCH_INDEX)['count'] != 0:
    produce_latest_data()
else:
    produce_historical_data()

# Make the assumption that all messages are published and consumed
producer.send(KAFKA_TOPIC_FINISH,
              'All messages are published and consumed successfully!')
producer.flush()
Beispiel #5
0
"""

import os
import json
from kafka import KafkaConsumer
from elastic import ElasticSearchClient
from dotenv import load_dotenv
from constants import *

load_dotenv()

es = ElasticSearchClient(
    os.getenv('ES_HOST'),
    os.getenv('ES_PORT'),
    use_ssl=os.getenv('ES_USE_SSL', False),
    verify_certs=os.getenv('ES_VERIFY_CERTS', False),
    http_auth=(os.getenv('ES_USER'),
               os.getenv('ES_PASSWORD')) if os.getenv('ES_USER') else None,
    ca_certs=os.getenv('ES_CA_CERTS', None))

# We don't need a location (geo_point) field
# for this index
# geo_point_mapping = es.define_geo_point_mapping()

es.create_index(ELASTICSEARCH_INDEX)

kafka_consumer = KafkaConsumer(
    KAFKA_TOPIC,
    bootstrap_servers=[
        "{}:{}".format(os.getenv('KAFKA_HOST'), os.getenv('KAFKA_PORT'))
    ],
Beispiel #6
0
"""

import os
import json
from kafka import KafkaConsumer
from elastic import ElasticSearchClient
from dotenv import load_dotenv
from constants import *


load_dotenv()

es = ElasticSearchClient(os.getenv('ES_HOST'), os.getenv('ES_PORT'),
                         use_ssl=os.getenv('ES_USE_SSL', False),
                         verify_certs=os.getenv('ES_VERIFY_CERTS', False),
                         http_auth=(os.getenv('ES_USER'), os.getenv('ES_PASSWORD')) if os.getenv('ES_USER') else None,
                         ca_certs=os.getenv('ES_CA_CERTS', None))

# We don't need a location field in this index
# geo_point_mapping = es.define_geo_point_mapping()

# this particular index should follow the previous index's
# format time which is "2017/01/01 00:15:00"
# instead of "2017/01/01T00:15:00" we're having it in all of our new indices
# So apply an explicit mapping for the field 'date'

date_mapping = es.define_custom_date_mapping_format(date_field_name='date', format="yyyy/MM/dd HH:mm:ss")

es.create_index(ELASTICSEARCH_INDEX, date_mapping)
Beispiel #7
0
"""

import os
import json
import datetime
from kafka import KafkaConsumer
from elastic import ElasticSearchClient
from dotenv import load_dotenv
from constants import *

load_dotenv()

es = ElasticSearchClient(
    os.getenv('ES_HOST'),
    os.getenv('ES_PORT'),
    use_ssl=os.getenv('ES_USE_SSL', False),
    verify_certs=os.getenv('ES_VERIFY_CERTS', False),
    http_auth=(os.getenv('ES_USER'),
               os.getenv('ES_PASSWORD')) if os.getenv('ES_USER') else None,
    ca_certs=os.getenv('ES_CA_CERTS', None))

# for this particular index we don't want the geo-point field to be named "location",
# but have two separate fields "location before" and "location after".
# So call the custom function that you pass as a parameter the geo point field name
geo_point_mapping_before = es.define_custom_geo_point_mapping(
    'location_before')
geo_point_mapping_after = es.define_custom_geo_point_mapping('location_after')

es.create_index(ELASTICSEARCH_INDEX, geo_point_mapping_before,
                geo_point_mapping_after)

kafka_consumer = KafkaConsumer(