def run(self): self._initializeSession() ctx = ServiceContext() queue = ctx.getQueueService() while(True): event = queue.get() dispatcher.send(signal=event.eventName,sender=event,session=self.session)
def checksvc(accountid): ctx = ServiceContext() queue = ctx.getQueueService() map = {"accountId":accountid,"eventName":"TENANT_CHECK"} event = EventFactory.getEvent("TENANT_CHECK",map) queue.put(event) logger.info("trigger account(%s) tenant check" % accountid)
def run(self): ctx = ServiceContext() config = ctx.getConfigService() queue = ctx.getQueueService() self.schema = avro.schema.parse(avro_schema) constructor="KafkaConsumer(%s,group_id=%s,bootstrap_servers=%s)" topics = config.get("Input Plugin: kafka_collector","kafka_topics") group_id = config.get("Input Plugin: kafka_collector","kafka_groupid") bootstrap_server = config.get("Message","kafka_broker") str = constructor % (topics,group_id,bootstrap_server) self.consumer = eval(str) for msg in self.consumer: value = bytearray(msg.value) topic = msg.topic bytes_reader = io.BytesIO(value[5:]) decoder = avro.io.BinaryDecoder(bytes_reader) reader = avro.io.DatumReader(self.schema) kafkamsg = reader.read(decoder) try: jsondata = json.loads(kafkamsg['rawdata']) eventType = jsondata["eventName"] jsondata['topic'] = topic queue.put(EventFactory.getEvent(eventType,jsondata)) except InputError,e: self.error(str(e)) except:
def checksvc(accountid): ctx = ServiceContext() queue = ctx.getQueueService() map = {"accountId": accountid, "eventName": "TENANT_CHECK"} event = EventFactory.getEvent("TENANT_CHECK", map) queue.put(event) logger.info("trigger account(%s) tenant check" % accountid)
def run(self): ctx = ServiceContext() queue = ctx.getQueueService() self._initializeschema() self._initializeconsumer() for msg in self.consumer: value = bytearray(msg.value) topic = msg.topic bytes_reader = io.BytesIO(value[5:]) decoder = avro.io.BinaryDecoder(bytes_reader) reader = avro.io.DatumReader(self.schema) kafkamsg = reader.read(decoder) try: jsondata = json.loads(kafkamsg['rawdata']) eventType = jsondata["eventName"] jsondata['topic'] = topic print EventFactory.getEvent(eventType,jsondata) queue.put(EventFactory.getEvent(eventType,jsondata)) except: self.error("has excetpion when resovle kafka message.")
def run(self): ctx = ServiceContext() queue = ctx.getQueueService() config = ctx.getConfigService() constructor = "KafkaConsumer(%s,group_id=%s,bootstrap_servers=%s)" topics = config.get("Input Plugin: event_collector", "event_topic") group_id = config.get("Input Plugin: event_collector", "event_groupid") bootstrap_server = config.get("Message", "kafka_broker") str = constructor % (topics, group_id, bootstrap_server) self.consumer = eval(str) for msg in self.consumer: # value = bytearray(msg.value) topic = msg.topic try: jsondata = json.loads(msg.value) eventType = jsondata["eventName"] jsondata['topic'] = topic queue.put(EventFactory.getEvent(eventType, jsondata)) except IndexError, e: self.error(e)
def run(self): ctx = ServiceContext() queue = ctx.getQueueService() config = ctx.getConfigService() constructor="KafkaConsumer(%s,group_id=%s,bootstrap_servers=%s)" topics = config.get("Input Plugin: event_collector","event_topic") group_id = config.get("Input Plugin: event_collector","event_groupid") bootstrap_server = config.get("Message","kafka_broker") str = constructor % (topics,group_id,bootstrap_server) self.consumer = eval(str) for msg in self.consumer: # value = bytearray(msg.value) topic = msg.topic try: jsondata = json.loads(msg.value) eventType = jsondata["eventName"] jsondata['topic'] = topic queue.put(EventFactory.getEvent(eventType,jsondata)) except IndexError,e: self.error(e)