Exemple #1
0
def kafka_consumer_batching_main(config_path):
    with open(config_path) as fp:
        config = json.load(fp)
    shutil.rmtree(os.path.join(config['hume.tmp_dir'], 'kafka_msgs'),
                  ignore_errors=True)
    shutil.rmtree(os.path.join(config['hume.tmp_dir'], 'cdrs_tmp'),
                  ignore_errors=True)
    app = create_app_batching(config)
    logging.basicConfig(
        level=logging.getLevelName(os.environ.get('LOGLEVEL', 'INFO')))
    loop = asyncio.get_event_loop()
    worker = Worker(app,
                    loop=loop,
                    loglevel=logging.getLevelName(
                        os.environ.get('LOGLEVEL', 'INFO')))
    try:
        loop.run_until_complete(start_worker(worker))
    finally:
        loop.run_until_complete(worker.stop())
        loop.close()
Exemple #2
0
        # update kafka table
        key = f'{msg.latitude},{msg.longitude}'
        earthquake_table[key] += 1


async def query_earthquake(start_time: str):
    parameters = {"format": "geojson", "starttime": start_time}
    server = 'https://earthquake.usgs.gov/fdsnws/event/1/query'
    async with aiohttp.ClientSession() as session:
        async with session.get(url=server, params=parameters) as response:
            response = await response.json()
    datasets = []

    for feature in response['features']:
        record = EQRecord(title=feature['properties']['title'],
                          place=feature['properties']['place'],
                          type=feature['properties']['type'],
                          mag=feature['properties']['mag'],
                          magType=feature['properties']['magType'],
                          time=feature['properties']['time'],
                          latitude=feature['geometry']['coordinates'][0],
                          longitude=feature['geometry']['coordinates'][1])
        datasets.append(record)
    logging.info(f'Query EarthQuake {len(datasets)} records at {start_time}')
    return datasets


if __name__ == '__main__':
    worker = Worker(app=app, loglevel=logging.INFO)
    worker.execute_from_commandline()