Exemple #1
0
 def test_kafka_consumer(self):
     """
     Test that the HerokuKafkaConsumer does not create any errors 
     and is an instance of KafkaConsumer (python_kafka)
     """
     consumer = HerokuKafkaConsumer(TOPIC1, **kafka_params)
     assert isinstance(consumer, KafkaConsumer)
     consumer.close()
Exemple #2
0
def receiveFromKafka(mode):

    consumer = HerokuKafkaConsumer(
        #KAKFA_TOPIC, # Optional: You don't need to pass any topic at all
        url=KAFKA_URL,  # Url string provided by heroku
        ssl_cert=KAFKA_CLIENT_CERT,  # Client cert string
        ssl_key=KAFKA_CLIENT_CERT_KEY,  # Client cert key string
        ssl_ca=KAFKA_TRUSTED_CERT,  # Client trusted cert string
        prefix=KAFKA_PREFIX,  # Prefix provided by heroku,
        auto_offset_reset="smallest",
        max_poll_records=10,
        enable_auto_commit=True,
        auto_commit_interval_ms=10,
        #group_id=KAFKA_GROUP_ID,
        api_version=(0, 9))
    """
    To subscribe to topic(s) after creating a consumer pass in a list of topics without the
    KAFKA_PREFIX.
    """
    partition = 1

    tp = TopicPartition(KAFKA_PREFIX + KAFKA_TOPIC_READ, partition)
    if (mode == "subscribe"):
        consumer.subscribe(topics=(KAFKA_TOPIC_READ))
    elif (mode == "assign"):
        consumer.assign([tp])

    # display list of partition assignerd
    assignments = consumer.assignment()
    for assignment in assignments:
        logger.debug(assignment)

    partitions = consumer.partitions_for_topic(KAFKA_PREFIX + KAFKA_TOPIC_READ)
    if (partitions):
        for partition in partitions:
            logger.debug("Partition=" + str(partition))

    topics = consumer.topics()
    if (topics):
        for topic in topics:
            logger.debug("Topic:" + topic)
    #exit(1)
    logger.debug('waiting ..')
    """
    .assign requires a full topic name with prefix
    """
    """
    Listening to events it is exactly the same as in kafka_python.
    Read the documention linked below for more info!
    """
    i = 0
    for message in consumer:
        #logger.debug (' %s : %s ' % (i, message))
        logger.debug("%i %s:%d:%d: key=%s value=%s" %
                     (i, message.topic, message.partition, message.offset,
                      message.key, message.value))
        #consumer.commit()
        i += 1
Exemple #3
0
 def test_kafka_consumer_multiple_topic(self):
     """
     Test that the consumer works with multiple topics and prefixes them correctly
     """
     consumer = HerokuKafkaConsumer(TOPIC1, TOPIC2, **kafka_params)
     assert consumer.subscription() == {
         TOPIC1_WITH_PREFIX, TOPIC2_WITH_PREFIX
     }
     consumer.close()
Exemple #4
0
 def test_kafka_consumer_single_topic(self):
     """
     Test that consumer works with a single topic and prefixes it correctly
     """
     consumer = HerokuKafkaConsumer(TOPIC1, **kafka_params)
     assert consumer.subscription() == {
         TOPIC1_WITH_PREFIX,
     }
     consumer.close()
Exemple #5
0
 def test_kafka_consumer_subscribe(self):
     """
     Test that the consumer works with subscribe and prefixes topics correctly
     """
     consumer = HerokuKafkaConsumer(**kafka_params)
     consumer.subscribe(topics=(TOPIC1, TOPIC2))
     assert consumer.subscription() == {
         TOPIC1_WITH_PREFIX, TOPIC2_WITH_PREFIX
     }
     consumer.close()
Exemple #6
0
def receiveFromKafka_EDF(mode):

    consumer = HerokuKafkaConsumer(
        #KAKFA_TOPIC, # Optional: You don't need to pass any topic at all
        url=KAFKA_URL,  # Url string provided by heroku
        ssl_cert=KAFKA_CLIENT_CERT,  # Client cert string
        ssl_key=KAFKA_CLIENT_CERT_KEY,  # Client cert key string
        ssl_ca=KAFKA_TRUSTED_CERT,  # Client trusted cert string
        prefix=KAFKA_PREFIX,  # Prefix provided by heroku,
        auto_offset_reset="smallest",
        max_poll_records=10,
        enable_auto_commit=True,
        auto_commit_interval_ms=100,
        group_id=KAFKA_GROUP_ID,
        api_version=(0, 9))
    """
    To subscribe to topic(s) after creating a consumer pass in a list of topics without the
    KAFKA_PREFIX.
    """
    partition = 1

    tp = TopicPartition(KAFKA_PREFIX + KAFKA_TOPIC_READ, partition)
    if (mode == "subscribe"):
        consumer.subscribe(topics=(KAFKA_TOPIC_READ))
    elif (mode == "assign"):
        consumer.assign([tp])

    # display list of partition assignerd
    assignments = consumer.assignment()
    for assignment in assignments:
        logger.debug(assignment)

    partitions = consumer.partitions_for_topic(KAFKA_PREFIX + KAFKA_TOPIC_READ)
    if (partitions):
        for partition in partitions:
            logger.debug("Partition=" + str(partition))

    topics = consumer.topics()
    if (topics):
        for topic in topics:
            logger.debug("Topic:" + topic)
    logger.debug('waiting ..')
    """
    .assign requires a full topic name with prefix
    """
    """
    Listening to events it is exactly the same as in kafka_python.
    Read the documention linked below for more info!
    """
    i = 0
    for message in consumer:
        logger.debug("\n ######################################")
        #logger.debug (' %s : %s ' % (i, message))
        logger.debug("%i %s:%d:%d: key=%s value=%s" %
                     (i, message.topic, message.partition, message.offset,
                      message.key, message.value))
        i += 1
        try:
            dictValue = ujson.loads(message.value)
            logger.debug(dictValue)
            sfid = dictValue['data']['payload']['IdObject__c']
            typeEvent = dictValue['data']['payload']['TypeEvent__c']

            if (typeEvent == 'EJ'):
                # generate sql request to retrieve all data about the account objecft using sfid
                data = postgres.__getAccountBySfId(sfid)
                dumped = ujson.dumps(data)
                sendToKafka_EDF(dumped)
                # now sends the data to another topic
            consumer.commit()
        except Exception as e:
            import traceback
            traceback.print_exc()
            logger.debug("merci Mohammed")
            #consumer.commit()


#receiveFromKafka("subscribe")
#import ujson
#data = ujson.dumps({'key':'value'})
#print(data)
#sendToKafka_HardCoded(data)
#testKafkaHelperRCV()
Exemple #7
0
def receiveFromKafka(mode, topic_override=None):

    TOPIC = KAFKA_TOPIC_READ
    if (topic_override != None):
        TOPIC = topic_override

    logger.info("Will use topic = {}".format(TOPIC))
    consumer = HerokuKafkaConsumer(
        #KAKFA_TOPIC, # Optional: You don't need to pass any topic at all
        url= KAFKA_URL, # Url string provided by heroku
        ssl_cert= KAFKA_CLIENT_CERT, # Client cert string
        ssl_key= KAFKA_CLIENT_CERT_KEY, # Client cert key string
        ssl_ca= KAFKA_TRUSTED_CERT, # Client trusted cert string
        prefix= KAFKA_PREFIX, # Prefix provided by heroku,
        auto_offset_reset="smallest",
        max_poll_records=100,
        enable_auto_commit=True,
        auto_commit_interval_ms=100,
        group_id=KAFKA_GROUP_ID,
        api_version = (0,9)
    )

    """
    To subscribe to topic(s) after creating a consumer pass in a list of topics without the
    KAFKA_PREFIX.
    """
    partition=1
    
    tp = TopicPartition(KAFKA_PREFIX + TOPIC, partition)
    if (mode == "subscribe"):
        consumer.subscribe(topics=(TOPIC))
    elif (mode == "assign"):
        consumer.assign([tp])

    # display list of partition assignerd
    assignments = consumer.assignment()
    for assignment in assignments:
        logger.debug(assignment)
    
    partitions=consumer.partitions_for_topic(KAFKA_PREFIX + TOPIC)
    if (partitions):
        for partition in partitions:
            logger.debug("Partition="+str(partition))
    
    
    topics=consumer.topics()
    if (topics):
        for topic in topics:
            logger.debug("Topic:"+topic)
    #exit(1)
    logger.debug('waiting ..')
    """
    .assign requires a full topic name with prefix
    """
    

    """
    Listening to events it is exactly the same as in kafka_python.
    Read the documention linked below for more info!
    """
    i=0
    for message in consumer:
        try:
            logger.debug ("%i %s:%d:%d: key=%s value=%s" % (i, message.topic, message.partition,
                                              message.offset, message.key,
                                              message.value))

            dictValue = ujson.loads(message.value)
            logger.debug(dictValue)
            
            # check value in the field Action Type
            if ("payload" in dictValue):
                if (dictValue['payload']['Action_Type__c'] == 'PushNotification'):
                    logger.info("about to send a BROWSER NOTIFICATION using PUSHER")
                    message = dictValue['payload']['message__c']
                    userid = dictValue['payload']['userid__c'],
                    notification.sendNotification(userid, message)  
            """
            if ('channel' in  dictValue): # means it's coming from a Platform EVENT via kafka
            if ('host_accept_guest__e'  in dictValue['channel'].lower()): 
                logger.info("about to send a SMS using BLOWER")
                message = "Dear {} {} , {} {} is aware of your arrival and will be here shortly".format(
                    dictValue['data']['payload']['Guest_Firstname__c'],
                    dictValue['data']['payload']['Guest_Lastname__c'],
                    dictValue['data']['payload']['Host_Firstname__c'],
                    dictValue['data']['payload']['Host_Lastname__c'],
                )
                blower.sendMessage(message, dictValue['data']['payload']['Guest_Phone_Number__c'])
            elif ('send_smss__e' in dictValue['channel'].lower()):
                logger.info("about to send a SMS using BLOWER")
                message = dictValue['data']['payload']['message__c']
                phone_Number = dictValue['data']['payload']['phone_Number__c'],
                blower.sendMessage(message, phone_Number)   
            #{'schema': 'h7kPS4B7NEsigjlW7748lg', 
            #   'payload': {
            #           'CreatedById': '0051t000002FB13AAG', 
            #            'message__c': 'Hello ! ', 
            #            'Action_Type__c': 'PushNotification', 
            #            'CreatedDate': '2020-06-16T15:52:45.535Z', 
            #            'userid__c': 'dac11bb3-148e-4b27-a6f2-caf0af09fb0a'}, 'event': {'replayId': 14570697}}}    
            elif ('push_notification__e' in dictValue['channel'].lower()):
                logger.info("about to send a BROWSER NOTIFICATION using PUSHER")
                message = dictValue['data']['payload']['message__c']
                userid = dictValue['data']['payload']['userid__c'],
                notification.sendNotification(userid, message)  
            """
            consumer.commit()
        except Exception as e :
            import traceback
            traceback.print_exc()
            consumer.commit()

        i += 1
Exemple #8
0
producer = HerokuKafkaProducer(
    url=KAFKA_URL,  # Url string provided by heroku
    ssl_cert=KAFKA_CLIENT_CERT,  # Client cert string
    ssl_key=KAFKA_CLIENT_CERT_KEY,  # Client cert key string
    ssl_ca=KAFKA_TRUSTED_CERT,  # Client trusted cert string
    prefix=KAFKA_PREFIX  # Prefix provided by heroku,       
    #,partitioner="0"
)

consumer = HerokuKafkaConsumer(
    #KAKFA_TOPIC, # Optional: You don't need to pass any topic at all
    url=KAFKA_URL,  # Url string provided by heroku
    ssl_cert=KAFKA_CLIENT_CERT,  # Client cert string
    ssl_key=KAFKA_CLIENT_CERT_KEY,  # Client cert key string
    ssl_ca=KAFKA_TRUSTED_CERT,  # Client trusted cert string
    prefix=KAFKA_PREFIX,  # Prefix provided by heroku,
    auto_offset_reset="smallest",
    max_poll_records=500,
    enable_auto_commit=True,
    auto_commit_interval_ms=10,
    #group_id=KAFKA_GROUP_ID,
    api_version=(0, 9))


def sendToKafka(data):
    """
    The .send method will automatically prefix your topic with the KAFKA_PREFIX
    NOTE: If the message doesn't seem to be sending try `producer.flush()` to force send.
    """
    producer.send(KAKFA_TOPIC, data, partition=0)