示例#1
0
def run_consumer():
    logger = logging.getLogger('consumer')
    logger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler()
    handler.setFormatter(
        logging.Formatter('%(asctime)-15s %(levelname)-8s %(message)s'))
    logger.addHandler(handler)

    consumer = Consumer(conf)
    consumer.subscribe(topics=config.resolve_config("CONSUMER_TOPICS"))

    try:
        while True:
            msg = consumer.poll(timeout=1.0)
            if msg is None:
                continue
            if msg.error():
                raise KafkaException(msg.error())
            else:
                # Proper message
                # sys.stderr.write('%% %s [%d] at offset %d with key %s:\n' %
                #                 (msg.topic(), msg.partition(), msg.offset(),
                #                  str(msg.key())))
                print(msg.value())
    except KeyboardInterrupt:
        sys.stderr.write('%% Aborted by user\n')

    finally:
        # Close down consumer to commit final offsets.
        consumer.close()
示例#2
0
def produce(message, topic=None, count=1):
    _topic = topic or config.resolve_config("CONSUMER_TOPICS")
    try:
        for i in range(count):
            producer.produce(_topic,
                             message + ": " + str(i),
                             on_delivery=on_delivery)
    except Exception as e:
        sys.stderr.write("Exception " + e.__class__.__name__ + " :: " + e)
示例#3
0
def create_influx():
    """
    Contextmanager that will create and teardown a session.
    """
    config = resolve_config()
    influx = InfluxDB(host=config.INFLUXDB_HOST, database=config.INFLUXDB_DATABASE)

    yield influx
    influx.close()
示例#4
0
import sys

import config
from confluent_kafka.cimpl import Producer

conf = {'bootstrap.servers': config.resolve_config("BROKER")}


def on_delivery(err, msg):
    if err:
        sys.stderr.write('%% Message failed delivery: %s\n' % err)
    else:
        sys.stdout.write('%% Message delivered to %s [%d] @ %d\n' %
                         (msg.topic(), msg.partition(), msg.offset()))


producer = Producer(**conf)


def produce(message, topic=None, count=1):
    _topic = topic or config.resolve_config("CONSUMER_TOPICS")
    try:
        for i in range(count):
            producer.produce(_topic,
                             message + ": " + str(i),
                             on_delivery=on_delivery)
    except Exception as e:
        sys.stderr.write("Exception " + e.__class__.__name__ + " :: " + e)
示例#5
0
                    default=200)
parser.add_argument("-v",
                    "--verbose",
                    help="allow verbosity",
                    action="store_true")

args = parser.parse_args()


def create_history(pair: str, timeframe: str, limit: int, verbose: bool):
    series: Series = get_candles(pair=pair, timeframe=timeframe, limit=limit)
    print(
        f"Creating {series.pair}{series.timeframe}. Size: {series.close.size}")
    atoms = Atomics(series)
    atoms.remake(verbose)


if args.pair and args.timeframe:
    verbose = True if args.verbose else False
    # Setup proper config
    Config = resolve_config()
    # Create app
    app = create_app(Config)
    #  Generate history
    with app.app_context():
        create_history(pair=args.pair,
                       timeframe=args.timeframe,
                       limit=args.limit,
                       verbose=verbose)
    exit(0)
示例#6
0
import logging
import sys
import os

import config
from confluent_kafka.cimpl import Consumer, KafkaException

conf = {
    "bootstrap.servers": config.resolve_config("BROKER"),
    "group.id": config.resolve_config("GROUP_ID"),
    "session.timeout.ms": 6000,
    "auto.offset.reset": "earliest",
    "sasl.mechanisms": "PLAIN",
    "security.protocol": "SASL_SSL",
    "sasl.username": os.environ.get("CONF_API_KEY"),
    "sasl.password": os.environ.get("CONF_API_SECRET")
}


def run_consumer():
    logger = logging.getLogger('consumer')
    logger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler()
    handler.setFormatter(
        logging.Formatter('%(asctime)-15s %(levelname)-8s %(message)s'))
    logger.addHandler(handler)

    consumer = Consumer(conf)
    consumer.subscribe(topics=config.resolve_config("CONSUMER_TOPICS"))

    try: