class TestMessageSerializer(unittest.TestCase):

    def setUp(self):
        # need to set up the serializer
        self.client = MockSchemaRegistryClient()
        self.ms = MessageSerializer(self.client)

    def assertMessageIsSame(self, message, expected, schema_id):
        self.assertTrue(message)
        self.assertTrue(len(message) > 5)
        magic,sid = struct.unpack('>bI',message[0:5])
        self.assertEqual(magic, 0)
        self.assertEqual(sid, schema_id)
        decoded = self.ms.decode_message(message)
        self.assertTrue(decoded)
        self.assertEqual(decoded, expected)

    def test_encode_with_schema_id(self):
        adv = Util.parse_schema_from_string(data_gen.ADVANCED_SCHEMA)
        basic = Util.parse_schema_from_string(data_gen.BASIC_SCHEMA)
        subject = 'test'
        schema_id = self.client.register(subject, basic)

        records = data_gen.BASIC_ITEMS
        for record in records:
            message = self.ms.encode_record_with_schema_id(schema_id, record)
            self.assertMessageIsSame(message, record, schema_id)

        subject = 'test_adv'
        adv_schema_id = self.client.register(subject, adv)
        self.assertNotEqual(adv_schema_id, schema_id)
        records = data_gen.ADVANCED_ITEMS
        for record in records:
            message = self.ms.encode_record_with_schema_id(adv_schema_id, record)
            self.assertMessageIsSame(message, record, adv_schema_id)


    def test_encode_record_for_topic(self):
        topic = 'test'
        basic = Util.parse_schema_from_string(data_gen.BASIC_SCHEMA)
        subject = 'test-value'
        schema_id = self.client.register(subject, basic)

        records = data_gen.BASIC_ITEMS
        for record in records:
            message = self.ms.encode_record_for_topic(topic, record)
            self.assertMessageIsSame(message, record ,schema_id)

    def test_encode_record_with_schema(self):
        topic = 'test'
        basic = Util.parse_schema_from_string(data_gen.BASIC_SCHEMA)
        subject = 'test-value'
        schema_id = self.client.register(subject, basic)
        records = data_gen.BASIC_ITEMS
        for record in records:
            message = self.ms.encode_record_with_schema(topic, basic, record)
            self.assertMessageIsSame(message, record ,schema_id)
class TestMessageSerializer(unittest.TestCase):
    def setUp(self):
        # need to set up the serializer
        self.client = MockSchemaRegistryClient()
        self.ms = MessageSerializer(self.client)

    def assertMessageIsSame(self, message, expected, schema_id):
        self.assertTrue(message)
        self.assertTrue(len(message) > 5)
        magic, sid = struct.unpack('>bI', message[0:5])
        self.assertEqual(magic, 0)
        self.assertEqual(sid, schema_id)
        decoded = self.ms.decode_message(message)
        self.assertTrue(decoded)
        self.assertEqual(decoded, expected)

    def test_encode_with_schema_id(self):
        adv = Util.parse_schema_from_string(data_gen.ADVANCED_SCHEMA)
        basic = Util.parse_schema_from_string(data_gen.BASIC_SCHEMA)
        subject = 'test'
        schema_id = self.client.register(subject, basic)

        records = data_gen.BASIC_ITEMS
        for record in records:
            message = self.ms.encode_record_with_schema_id(schema_id, record)
            self.assertMessageIsSame(message, record, schema_id)

        subject = 'test_adv'
        adv_schema_id = self.client.register(subject, adv)
        self.assertNotEqual(adv_schema_id, schema_id)
        records = data_gen.ADVANCED_ITEMS
        for record in records:
            message = self.ms.encode_record_with_schema_id(
                adv_schema_id, record)
            self.assertMessageIsSame(message, record, adv_schema_id)

    def test_encode_record_for_topic(self):
        topic = 'test'
        basic = Util.parse_schema_from_string(data_gen.BASIC_SCHEMA)
        subject = 'test-value'
        schema_id = self.client.register(subject, basic)

        records = data_gen.BASIC_ITEMS
        for record in records:
            message = self.ms.encode_record_for_topic(topic, record)
            self.assertMessageIsSame(message, record, schema_id)

    def test_encode_record_with_schema(self):
        topic = 'test'
        basic = Util.parse_schema_from_string(data_gen.BASIC_SCHEMA)
        subject = 'test-value'
        schema_id = self.client.register(subject, basic)
        records = data_gen.BASIC_ITEMS
        for record in records:
            message = self.ms.encode_record_with_schema(topic, basic, record)
            self.assertMessageIsSame(message, record, schema_id)
Beispiel #3
0
def test_select(cluster):
    # type: (ClickHouseCluster) -> None

    schema_registry_client = cluster.schema_registry_client
    serializer = MessageSerializer(schema_registry_client)

    schema = avro.schema.make_avsc_object({
        'name':
        'test_record',
        'type':
        'record',
        'fields': [{
            'name': 'value',
            'type': 'long'
        }]
    })

    buf = io.BytesIO()
    for x in range(0, 3):
        message = serializer.encode_record_with_schema('test_subject', schema,
                                                       {'value': x})
        buf.write(message)
    data = buf.getvalue()

    instance = cluster.instances["dummy"]  # type: ClickHouseInstance
    schema_registry_url = "http://{}:{}".format(cluster.schema_registry_host,
                                                cluster.schema_registry_port)

    run_query(instance,
              "create table avro_data(value Int64) engine = Memory()")
    settings = {'format_avro_schema_registry_url': schema_registry_url}
    run_query(instance, "insert into avro_data format AvroConfluent", data,
              settings)
    stdout = run_query(instance, "select * from avro_data")
    assert list(map(str.split, stdout.splitlines())) == [
        ["0"],
        ["1"],
        ["2"],
    ]
	def setup(self):
		'''
			Initializes de hand
			@return: True if OK, False otherwise
		'''
		'''
		Initialize the client
		'''
		
		self.client = CachedSchemaRegistryClient(url='http://localhost:8081')
		#self.client = CachedSchemaRegistryClient(url='http://eagle5.di.uoa.gr:8081')


		'''
		Create Serializer
		'''
		
		self.serializer = MessageSerializer(self.client)
		
		self.initialized = True
		
		return 0
class StdOutListener(tweepy.StreamListener):
    def on_status(self, message):
        logging.debug(datetime.now().strftime("%A, %d. %B %Y %I:%M%p"))
        try:
            mls = createPayload(message)
            print mls
            timevar = datetime.utcnow() - datetime.strptime(
                mls[11], '%Y-%m-%d %H:%M:%S')
            print datetime.utcnow(), datetime.strptime(mls[11],
                                                       '%Y-%m-%d %H:%M:%S')
            print "Minutes and Seconds : ", divmod(
                timevar.days * 86400 + timevar.seconds, 60)
        except Exception, e:
            logging.debug(
                'There was an error in creating the payload. The error is: %s'
                % e)
            print 'Error in Payload Creation : ', str(e)
            twitter_utils.sendErrorMail(
                'There was an error in creating the payload. The error is %s' %
                e)
            return True

        try:
            #converts the payload into the avro format in preparation for loading into hbase
            avro_schema = Util.parse_schema_from_string(
                open('/**/**/twitter.avsc').read())
            client = CachedSchemaRegistryClient(url='http://192.168.**:8081')
            schema_id = client.register('twitter_avro_schema_stream4',
                                        avro_schema)
            avro_schema = client.get_by_id(schema_id)
            schema_id, avro_schema, schema_version = client.get_latest_schema(
                'twitter_avro_schema_stream4')
            schema_version = client.get_version('twitter_avro_schema_stream4',
                                                avro_schema)
            serializer = MessageSerializer(client)
            encoded = serializer.encode_record_with_schema(
                topicname, avro_schema, {
                    "authid": mls[0],
                    "screen_name": mls[1],
                    "description": mls[2],
                    "favourites_count": convert_long(mls[3]),
                    "followers_count": convert_long(mls[4]),
                    "friends_count": convert_long(mls[5]),
                    "listed_count": convert_long(mls[6]),
                    "location": mls[7],
                    "id_str": mls[8],
                    "time_zone": mls[9],
                    "statuses_count": convert_long(mls[10]),
                    "created_at": mls[11],
                    "favorite_count": convert_long(mls[12]),
                    "tid": mls[13],
                    "in_reply_to_status_id_str": mls[14],
                    "in_reply_to_user_id_str": mls[15],
                    "lang": mls[16],
                    "possibly_sensitive": mls[17],
                    "retweet_count": convert_long(mls[18]),
                    "text": mls[19],
                    "entities_url": mls[20],
                    "entities_expanded_url": mls[21],
                    "entities_media_url": mls[22],
                    "disgust": convert_long(mls[23]),
                    "fear": convert_long(mls[24]),
                    "sadness": convert_long(mls[25]),
                    "surprise": convert_long(mls[26]),
                    "trust": convert_long(mls[27]),
                    "negative": convert_long(mls[28]),
                    "positive": convert_long(mls[29]),
                    "neutral": convert_long(mls[30]),
                    "celebrities": (mls[31]),
                    "events": (mls[32]),
                    "brands": (mls[33]),
                    "accessories": (mls[34])
                })
        except Exception, e:
            logging.debug(
                'There was an error in the generation of the avro file. The error is: %s'
                % e)
            print 'Error in avro generation : ', e
            print mls
            twitter_utils.sendErrorMail(
                'There was an error in the generation of the avro file. The error is %s. This is likely due to an error in the schema. Please check the schema file under twitter_avro_schema.avsc'
                % e)
            return True
 def setUp(self):
     # need to set up the serializer
     self.client = MockSchemaRegistryClient()
     self.ms = MessageSerializer(self.client)
class RKConsumer:
	
	def __init__(self, args):
		
		self.node_name = rospy.get_name().replace('/','')
		self.desired_freq = args['desired_freq'] 
		# Checks value of freq
		if self.desired_freq <= 0.0 or self.desired_freq > MAX_FREQ:
			rospy.loginfo('%s::init: Desired freq (%f) is not possible. Setting desired_freq to %f'%(self.node_name,self.desired_freq, DEFAULT_FREQ))
			self.desired_freq = DEFAULT_FREQ
	
		
		self.real_freq = 0.0
		
		# Saves the state of the component
		self.state = State.INIT_STATE
		# Saves the previous state
		self.previous_state = State.INIT_STATE
		# flag to control the initialization of the component
		self.initialized = False
		# flag to control the initialization of ROS stuff
		self.ros_initialized = False
		# flag to control that the control loop is running
		self.running = False
		# Variable used to control the loop frequency
		self.time_sleep = 1.0 / self.desired_freq
		# State msg to publish
		self.msg_state = State()
		# Timer to publish state
		self.publish_state_timer = 1
		
		self.t_publish_state = threading.Timer(self.publish_state_timer, self.publishROSstate)
		
		self.location_x = 0.0
		self.location_y = 0.0
		self.goal_received = False
		self.goal_sent = False
		self.abort_received = False
		
		self.rp = rospkg.RosPack()

		self.tranformWGS = WGS.WGS84toNED()

			
	def setup(self):
		'''
			Initializes de hand
			@return: True if OK, False otherwise
		'''
		'''
		Initialize the client
		'''
		
		self.client = CachedSchemaRegistryClient(url='http://*****:*****@return: 0 if it's performed successfully, -1 if there's any problem or the component is running
		'''
		if self.running or not self.initialized:
			return -1
		rospy.loginfo('%s::shutdown'%self.node_name)
		
		# Cancels current timers
		self.t_publish_state.cancel()
		
		self._state_publisher.unregister()
		
		self.initialized = False
		
		return 0
	
	
	def rosShutdown(self):
		'''
			Shutdows all ROS components
			@return: 0 if it's performed successfully, -1 if there's any problem or the component is running
		'''
		if self.running or not self.ros_initialized:
			return -1
		
		# Performs ROS topics & services shutdown
		self._state_publisher.unregister()
		
		self.ros_initialized = False
		
		return 0
			
	
	def stop(self):
		'''
			Creates and inits ROS components
		'''
		self.running = False
		
		return 0
	
	
	def start(self):
		'''
			Runs ROS configuration and the main control loop
			@return: 0 if OK
		'''
		self.rosSetup()
		
		if self.running:
			return 0
			
		self.running = True
		
		self.controlLoop()
		
		return 0
	
	
	def controlLoop(self):
		'''
			Main loop of the component
			Manages actions by state
		'''
		
		while self.running and not rospy.is_shutdown():
			t1 = time.time()
			
			if self.state == State.INIT_STATE:
				self.initState()
				
			elif self.state == State.STANDBY_STATE:
				self.standbyState()
				
			elif self.state == State.READY_STATE:
				self.readyState()
				
			elif self.state == State.EMERGENCY_STATE:
				self.emergencyState()
				
			elif self.state == State.FAILURE_STATE:
				self.failureState()
				
			elif self.state == State.SHUTDOWN_STATE:
				self.shutdownState()
				
			self.allState()
			
			t2 = time.time()
			tdiff = (t2 - t1)
			
			
			t_sleep = self.time_sleep - tdiff
			
			if t_sleep > 0.0:
				try:
					rospy.sleep(t_sleep)
				except rospy.exceptions.ROSInterruptException:
					rospy.loginfo('%s::controlLoop: ROS interrupt exception'%self.node_name)
					self.running = False
			
			t3= time.time()
			self.real_freq = 1.0/(t3 - t1)
		
		self.running = False
		# Performs component shutdown
		self.shutdownState()
		# Performs ROS shutdown
		self.rosShutdown()
		rospy.loginfo('%s::controlLoop: exit control loop'%self.node_name)
		
		return 0
		
		
	def rosPublish(self):
		'''
			Publish topics at standard frequency
		'''
					
		return 0
		
	
	def initState(self):
		'''
			Actions performed in init state
		'''
		
		if not self.initialized:
			self.setup()
			
		else: 		
			self.switchToState(State.STANDBY_STATE)
		
		
		return
	
	
	def standbyState(self):
		'''
			Actions performed in standby state
		'''
		self.switchToState(State.READY_STATE)
		
		return
	
	
	def readyState(self):
		'''
			Actions performed in ready state
		'''
					 
		# decode a message from kafka

		try:
			for msg in self.goal_consumer:
				if msg.partition == 6:
					goal_decoded_object = self.serializer.decode_message(msg.value)
					goal_location = goal_decoded_object.get('location')
					goal_header = goal_decoded_object.get('header')
					
					#(41.186809, -8.703597) origin parking lot/pass to radians
					#(41.1872237, -8.7040693) origin corridor/pass to radians
					#pointA = {'latitude' : 0.718845850137 , 'longitude' : -0.151905701075,'height' : 0}
					pointA = {'latitude' : 0.718852663 , 'longitude' : -0.151914668,'height' : 0}
					pointB = {}

					pointB['latitude'] = goal_location.get('latitude')
					pointB['longitude'] = goal_location.get('longitude')
					pointB['height'] = goal_location.get('height')
					ned = self.tranformWGS.displacement(pointA,pointB)
					print ned
					goal_x = ned.get('north')
					goal_y = ned.get('east')
					goal_time = goal_header.get('time')
					self.goal_received = True
					print ("Goal Received")
					break
				else:
					print ("Nothing in partition 6")
					break
		except :
			pass

		'''	
		try:
			for msg in self.abort_consumer:
				abort_decoded_object = self.serializer.decode_message(msg.value)
				abort_header = abort_decoded_object.get('header')
				abort_time = abort_header.get('time')

				self.abort_received = True
				print ("Abort command Received")
				break
		except :
			pass
			
		'''	
		
		if self.goal_received:
			#Simple Action Client
			self.sac = actionlib.SimpleActionClient('move_base', MoveBaseAction )

			#create goal
			goal = MoveBaseGoal()
			
			#set goal
			goal.target_pose.pose.position.x = goal_x
			goal.target_pose.pose.position.y = -1 * goal_y
			goal.target_pose.pose.orientation.w = 1
			print goal.target_pose.pose
			goal.target_pose.header.frame_id = 'odom'
			goal.target_pose.header.stamp = rospy.Time.now()

			#start listner
			print ("Waiting for server to come up")			
			self.sac.wait_for_server()
			#send goal
			self.sac.send_goal(goal)
			print ("Goal Sent")
			self.goal_received = False
			self.goal_sent = True

		#finish
		if self.goal_sent:
			self.sac.wait_for_result(rospy.Duration.from_sec(5.0))
			#print self.sac.get_state()
				
			if self.sac.get_state()== 3 :		   
				rospy.loginfo("the base reached the goal")
				self.goal_sent = False
				print self.sac.get_result()
							
			if (self.sac.get_state()!= 1 and self.sac.get_state()!= 3) or self.abort_received :
				self.sac.cancel_goal()
				rospy.loginfo("Navigation Failed")
				self.goal_sent = False
				self.abort_received = False
				print self.sac.get_result()
		
		return
		
	
	def shutdownState(self):
		'''
			Actions performed in shutdown state 
		'''
		if self.shutdown() == 0:
			self.switchToState(State.INIT_STATE)
		
		return
	
	
	def emergencyState(self):
		'''
			Actions performed in emergency state
		'''
		
		return
	
	
	def failureState(self):
		'''
			Actions performed in failure state
		'''
		
			
		return
	
	
	def switchToState(self, new_state):
		'''
			Performs the change of state
		'''
		if self.state != new_state:
			self.previous_state = self.state
			self.state = new_state
			rospy.loginfo('%s::switchToState: %s'%(self.node_name, self.stateToString(self.state)))
		
		return
	
		
	def allState(self):
		'''
			Actions performed in all states
		'''
		self.rosPublish()
		
		return
	
	
	def stateToString(self, state):
		'''
			@param state: state to set
			@type state: State
			@returns the equivalent string of the state
		'''
		if state == State.INIT_STATE:
			return 'INIT_STATE'
				
		elif state == State.STANDBY_STATE:
			return 'STANDBY_STATE'
			
		elif state == State.READY_STATE:
			return 'READY_STATE'
			
		elif state == State.EMERGENCY_STATE:
			return 'EMERGENCY_STATE'
			
		elif state == State.FAILURE_STATE:
			return 'FAILURE_STATE'
			
		elif state == State.SHUTDOWN_STATE:
			return 'SHUTDOWN_STATE'
		else:
			return 'UNKNOWN_STATE'
	
		
	def publishROSstate(self):
		'''
			Publish the State of the component at the desired frequency
		'''
		self.msg_state.state = self.state
		self.msg_state.state_description = self.stateToString(self.state)
		self.msg_state.desired_freq = self.desired_freq
		self.msg_state.real_freq = self.real_freq
		self._state_publisher.publish(self.msg_state)
		
		self.t_publish_state = threading.Timer(self.publish_state_timer, self.publishROSstate)
		self.t_publish_state.start()
	
		
	"""
 def setUp(self):
     # need to set up the serializer
     self.client = MockSchemaRegistryClient()
     self.ms = MessageSerializer(self.client)
Beispiel #9
0
 def get_message_serializer(self):
     schema_registry_url = self.get_schema_registry_url()
     logger.debug('loading schema registry: ' + schema_registry_url)
     schema_client = CachedSchemaRegistryClient(url=schema_registry_url)
     return MessageSerializer(schema_client)
from pyspark import SparkContext
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
from sys import argv
from config import KAFKA_URL, KAFKA_BROKER_LIST

topic = argv[1]  #enter topic as parameter when running script
schema_registry_url = argv[
    2]  # enter schema registry url (ex. http://localhost:8081)
if len(argv) > 3 and argv[3] == 'reset':
    auto_offset_reset = 'smallest'
else:
    auto_offset_reset = 'largest'

schema_registry_client = CachedSchemaRegistryClient(url=schema_registry_url)
serializer = MessageSerializer(schema_registry_client)


# simple decode to replace Kafka-streaming's built-in decode decoding UTF8 ()
def decoder(s):
    decoded_message = serializer.decode_message(s)
    return decoded_message


# Spark Streaming from Kafka
master = 'local[2]'
app_name = 'kafka_consumer'
sc = SparkContext(master, app_name)
ssc = StreamingContext(sc, 60)
kvs = KafkaUtils.createDirectStream(ssc, [topic], {
    "metadata.broker.list": KAFKA_BROKER_LIST,
def writeToavro(p, mls):
    #converts the payload into the avro format in preparation for loading into hbase
    try:
        avro_schema = Util.parse_schema_from_string(
            open('/root/quest/twitter_avro_schema.avsc').read())
        client = CachedSchemaRegistryClient(url='http://192.168.111.12:8081')
        schema_id = client.register('twitter_avro__schema_stream4',
                                    avro_schema)
        avro_schema = client.get_by_id(schema_id)
        schema_id, avro_schema, schema_version = client.get_latest_schema(
            'twitter_avro__schema_stream4')
        schema_version = client.get_version('twitter_avro__schema_stream4',
                                            avro_schema)
        serializer = MessageSerializer(client)
        encoded = serializer.encode_record_with_schema(
            topicname, avro_schema, {
                "authid": mls[0],
                "screen_name": mls[1],
                "description": mls[2],
                "favourites_count": convert_long(mls[3]),
                "followers_count": convert_long(mls[4]),
                "friends_count": convert_long(mls[5]),
                "listed_count": convert_long(mls[6]),
                "location": mls[7],
                "id_str": mls[8],
                "time_zone": mls[9],
                "statuses_count": convert_long(mls[10]),
                "created_at": mls[11],
                "favorite_count": convert_long(mls[12]),
                "tid": mls[13],
                "in_reply_to_status_id_str": mls[14],
                "in_reply_to_user_id_str": mls[15],
                "lang": mls[16],
                "possibly_sensitive": mls[17],
                "retweet_count": convert_long(mls[18]),
                "text": mls[19],
                "entities_url": mls[20],
                "entities_expanded_url": mls[21],
                "entities_media_url": mls[22],
                "disgust": convert_long(mls[23]),
                "fear": convert_long(mls[24]),
                "sadness": convert_long(mls[25]),
                "surprise": convert_long(mls[26]),
                "trust": convert_long(mls[27]),
                "negative": convert_long(mls[28]),
                "positive": convert_long(mls[29]),
                "neutral": convert_long(mls[30]),
                "celebrities": (mls[31]),
                "events": (mls[32]),
                "brands": (mls[33]),
                "accessories": (mls[34])
            })
    except Exception, e:
        logging.debug(
            'There was an error in the generation of the avro file. The error is: %s'
            % e)
        print 'Error in avro generation : ', e
        print mls
        twitter_utils.sendErrorMail(
            'There was an error in the generation of the avro file. The error is %s'
            % e)
        return True