Ejemplo n.º 1
0
 def insert_bulk(data, i):
     session_t = Session()
     logger.info(f"Inserting {len(data)} profiles in bulk ({i})")
     session_t.bulk_insert_mappings(ProfileModel, data)
     session_t.commit()
     session_t.close()
     engine.dispose()
Ejemplo n.º 2
0
def attach_zones_to_mayors():

    logger.info("STARTING attach_zones_to_mayors")
    """
        A razão dessa função ser executada em outro momento é por causa da frequencia que o governo libera os dados.
        Como a informação de zonas eleitorias é liberada a cada 1 minuto e os prefeitos a cada 10 segundos não achei prudente
        executar essa funcionalidade a cada 10 segundos.
    """

    try:

        cities = get_cities()

        for city in cities.values():
            city_name, state = slugify(city['n']), city['e'].lower()
            zone = read_from_mock_s3(f"/mock_s3/zone-{state}-{city_name}.json")
            """
            Not will keep >5000 files in the github repo, just keep the major one zone-sp-sao-paulo.json
            """
            if zone:
                mayor_key = f"/cities/mayors-{state}-{city_name}.json"
                mayors_per_city = json.loads(redis_mayors.get(mayor_key))
                mayors_per_city["zone"] = zone
                redis_mayors.set(mayor_key, json.dumps(mayors_per_city))

    except Exception as ex:
        logger.exception(ex)
Ejemplo n.º 3
0
    def populate_model(count=1000000):

        data = []
        for i in range(count):
            data.append(ScenarioBuilder.build_fake())

            if i % 10000 == 0:
                ScenarioBuilder.insert_bulk(data, i)
                data = []
                logger.info(f"Loading fake data index({i})")

        session.bulk_insert_mappings(ProfileModel, data)
Ejemplo n.º 4
0
 def create_database():
     # remove database name from string connection before create the new schema
     tmp_engine = create_engine(os.path.join(os.getenv("POSTGRES_URL"),
                                             "postgres"),
                                echo=False)
     with tmp_engine.connect() as conn:
         conn.execute("commit")
         try:
             conn.execute(
                 f"CREATE DATABASE {os.getenv('DATABASE_NAME')} WITH OWNER postgres TABLESPACE pg_default"
             )
             logger.info(f"Database {os.getenv('DATABASE_NAME')} created")
         except ProgrammingError as ex:
             logger.error(ex)
Ejemplo n.º 5
0
def subscriber():
    if os.getenv("QUEUE") == "REDIS":

        while True:
            message = redis_connection.lpop(os.getenv("CHANNEL_NAME"))
            if message:
                message = json.loads(message.decode("utf-8"))
                if 'p' in message and message[
                        'p'] and 'm' in message and message['m']:
                    logger.info(
                        f"RECEBENDO REDIS({redis_connection.llen(os.getenv('CHANNEL_NAME'))}) {message['p']}, {message['m']}"
                    )
                    push_data.delay(message['p'], message['m'])
            else:
                logger.info(f"Waiting...")

    elif os.getenv("QUEUE") == "SQS":

        while True:
            for message in sqs_queue.receive_messages(MaxNumberOfMessages=10):
                message_dict = json.loads(message.body)
                if 'i' in message_dict:
                    logger.info(
                        f"RECEBENDO SQS ID: {message_dict['i']}, {message_dict['p']}, {message_dict['m']}"
                    )
                else:
                    logger.info(
                        f"RECEBENDO SQS {message_dict['p']}, {message_dict['m']}"
                    )
                push_data.delay(message_dict['p'], message_dict['m'])
                message.delete()
def getAllSavedSearchesByEmail():
    query_params = request.args;
    logger.info('Query Params: %s', query_params);

    email = query_params.get('email');
    
    if ((email is None) or (len(email) == 0)):
        msg = 'Please specify valid Email.';
        logger.error(msg);
        return jsonify({'message': msg}), 400;
    
    searches = t_advance_search.objects(email=email).all();
    logger.info('Successfully fetched all records for email=%s. Response size = %s', email, len(searches));
    searches = sorted(searches, key=lambda search: search.last_modified_time, reverse=True);
    return jsonify({'results': [search.toJson() for search in searches]});
def delete_saved_search():
    query_params = request.args;
    logger.info('Query Params: %s', query_params);
    
    id = query_params.get('id');
    email = query_params.get('email');
    
    if ((id is None) or (email is None)):
        msg = 'Please specify ' + ('Email' if id else 'Id') +' in the request.';
        logger.error(msg);
        return jsonify({'message': msg}), 400;
    
    savedSearch = None;
    
    try:
        savedSearch = t_advance_search.objects(id=id, email=email).first();

        if savedSearch is None:
            logger.info('No record found for id=%s and email=%s', id, email);
            return jsonify({'message': 'No records found.'}), 404;
        
        savedSearch.delete();
    except Exception as ex:
        logger.error(ex);
        return jsonify({'message': str(ex)}), 500;
    
    logger.info('Successfully deleted the record by id=%s and email=%s', savedSearch.id, savedSearch.email);
    return jsonify({'ID': savedSearch.id}), 204;
def save_search():
    request_body = request.get_json();
    logger.info('Request Body: %s', request_body);
    
    if 'email' not in request_body.keys():
        msg = 'email is missing in the request body.';
        logger.error(msg);
        return jsonify({'message': msg}), 400;
    if 'search_name' not in request_body.keys():
        msg = 'search_name is missing in the request body.';
        logger.error(msg);
        return jsonify({'message': msg}), 400;
    if 'search_query' not in request_body.keys():
        msg = 'search_query is missing in the request body.';
        logger.error(msg);
        return jsonify({'message': msg}), 400;

    email = request_body.get('email');
    search_name = request_body.get('search_name');
    search_query = request_body.get('search_query');
        
    logger.info('Parsed data: email=%s, search_name=%s, search_query=%s', email, search_name, search_query);
    
    savedSearch = None;
    try:
        savedSearch = t_advance_search.create(email=email, search_query=search_query, search_name=search_name);
    except Exception as ex:
        logger.error(ex);
        return jsonify({'message': str(ex)}), 500;
    
    logger.info('Successfully saved the data.');
    return jsonify({'ID': savedSearch.id}), 201;
def getSavedSearchByIdAndEmail():
    query_params = request.args;
    logger.info('Query Params: %s', query_params);

    id = query_params.get('id');
    email = query_params.get('email');
    
    if ((id is None) or (email is None)):
        msg = 'Please specify ' + ('Email' if id else 'Id') +' in the request.';
        logger.error(msg);
        return jsonify({'message': msg}), 400;
    
    try:
        savedSearch = t_advance_search.objects(id=id, email=email).first();
    except Exception as ex:
        logger.error(ex);
        return jsonify({'message': str(ex)}), 500;
    
    if savedSearch is None:
        logger.info('No record found for id=%s and email=%s', id, email);
        #return Response(status=404); # Another way to return response
        #abort(404, 'No Saved Search found.');
        return jsonify({'message': 'No records found.'}), 404;
    
    logger.info('Successfully fetched data by id=%s and email=%s. response = %s', id, email, savedSearch);
    return jsonify(savedSearch.toJson());
Ejemplo n.º 10
0
def slice_mayors_by_city():
    """
    Essa função deverá fatiar um arquivo de 2mb que contém todas as cidade do país e salvar no REDIS utilizando
    uma parte da URL que o client side irá acessar como chave do cache.
    Os arquivos são enviados pelo governo para uma central que faz o upload no S3 em tempo real
    High frequency offen 10 seconds during the votes counting
    """
    logger.info("STARTING slice_mayors_by_city")

    try:

        released_votes = read_from_mock_s3('/mock_s3/mayors.json')
        cities = get_cities()
        last_update = date_parse(
            f"{released_votes['last_updated']['date']} {released_votes['last_updated']['time']}"
        )

        if redis_mayors.get("last_mayors_update"):
            last_update_redis = date_parse(
                redis_mayors.get("last_mayors_update").decode("utf-8"))
            """
            For the testing purpose the mayors.json never will be updated by the government, so will be needed to bypass
            """
            if last_update_redis <= last_update and False:
                logger.info("UPDATE NOT REQUIRED")
                return

        mayors = {}
        for id_city, city in released_votes["municipios"].items():
            state, city_name = cities[id_city]["e"].lower(), slugify(
                city["nm"])
            mayors[f"/cities/mayors-{state}-{city_name}.json"] = json.dumps(
                city)

        redis_mayors.set("last_mayors_update", last_update.isoformat())
        redis_mayors.mset(mayors)

    except Exception as ex:
        logger.exception(ex)
Ejemplo n.º 11
0
def build_redis_cache():

    if os.getenv("CLEAR_GEO_REDIS_BEFORE_INSERT", "1") == "1":
        redis_conn.delete(os.getenv("REDIS_GEOCACHE_NAME"))

    logger.info(f"quering database")
    profiles = session.query(ProfileModel).all()

    logger.info(f"Copy data to REDIS")
    for profile in profiles:
        redis_conn.geoadd(os.getenv("REDIS_GEOCACHE_NAME"),
                          float(profile.longitude), float(profile.latitude),
                          profile.to_json())

    logger.info(f"Added {len(profiles)}")
Ejemplo n.º 12
0
 def create_model():
     try:
         base.metadata.create_all(engine)
         logger.info(f"Models profile created")
     except Exception as ex:
         logger.info(f"Models profile already created")
Ejemplo n.º 13
0
 def create_extension_postgis():
     with engine.connect() as conn:
         conn.execute(f"CREATE EXTENSION IF NOT EXISTS postgis")
         logger.info(f"Instalação da extensão postgis")
Ejemplo n.º 14
0
 def __init__(self):
     logger.info(f"Scenario Builder execute")
     ScenarioBuilder.create_database()
     ScenarioBuilder.create_extension_postgis()
     ScenarioBuilder.create_model()
     ScenarioBuilder.populate_model()
Ejemplo n.º 15
0
import json
import os
import random
from time import sleep

from app_config import fake, redis_connection, logger, sqs_queue

for i in range(100):
    phone_number = fake.phone_number()
    message = fake.sentence()

    if os.getenv("QUEUE") == "REDIS":
        redis_connection.rpush(
            os.getenv("CHANNEL_NAME"),
            json.dumps(dict(p=phone_number, m=message, i=i)))
        logger.info(
            f"PUBLICACAO ID: {i} FILA REDIS {redis_connection.llen(os.getenv('CHANNEL_NAME'))}"
        )

    elif os.getenv("QUEUE") == "SQS" and sqs_queue:
        response = sqs_queue.send_message(
            MessageBody=json.dumps(dict(p=phone_number, m=message, i=i)),
            MessageGroupId=f"Group_{os.getenv('QUEUE')}")
        logger.info(f"PUBLICACAO ID: {i} FILA SQS")
    else:
        raise NotImplemented()

    if os.getenv("PUBLISHER_DELAY"):
        sleep(random.uniform(0, float(os.getenv("DELAY"))))
Ejemplo n.º 16
0
def push_data(phone, message):
    sleep(1)
    logger.info(f"Pushed {phone} - {message}")