def _convertFrom(this, oldClass: type): logger.debug( "CTU_Host._convertFrom(): Converting self from class %s to class CTU_Host..." % str(oldClass)) # Create the model/proxy for the node's GPS module. this.node.gps_module = gps.GPS_Module(this.node) # Create the GPS Manager object to manage the state of the node's GPS module. this.node.gps_manager = gps.GPS_Manager(this.node) # Create an "inbox" (really, WatchBox) to track the most recently- # received PPSCNTR message from the real CTU. this.pps_inbox = utils.WatchBox(lock=this._lock) # Create a "publisher" object to allow other entities to subscribe # to be notified of each message from the CTU of a given type # (currently, it only produces messages of type 'PPSCNTR'). The # advantage of this over the WatchBox mechanism is that it # guarantees that no messages will be missed. this.publisher = publisher.Publisher() # Change the node's class as well. this.node.become(CTU_Node)
def __init__(self): import publisher import printer self.publisher = publisher.Publisher() self.printer = printer.BoardPrinter() self.run_printer_in_thread() self.new_game()
def main(): if len(sys.argv) == 1: # no args cmd = "torcs" elif sys.argv[1] == 'bot': cmd = ["torcs", "-r", "/vagrant/torcs-1.3.6/src/raceman/quickrace.xml"] # start the pyclient runserver = ["python", "/vagrant/python_svr/pyclient.py"] subprocess.Popen(runserver, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # Run command p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # ZMQ setup ip = "127.0.0.1" port = 8690 list_of_topics = ['simulator='] ZMQp = pub.Publisher(ip, port, list_of_topics) # Print stdout buffer for line in iter(p.stdout.readline, b''): pyout = {} splitline = line.split('\t') for info in splitline: nameValue = info.split(':') if(len(nameValue) == 2): pyout[nameValue[0]] = nameValue[1] jsonout=json.dumps(pyout) #print(jsonout) ZMQp.send_message('simulator=', jsonout)
def _convertFrom(this, oldClass: type): logger.debug( "ShowerDetectorHost._convertFrom(): Converting self from class %s to class ShowerDetectorHost..." % str(oldClass)) # TODO: Create sub-objects representing the various threshold-level DACs. # Also, create sub-objects representing the various input channels. # Create a fixed tuple of sub-objects representing the three pulse- # form input channels. They are all initialized to a state indicating # that no data has yet been received on that channel. this.input_channels = (PulseformChannel(1), PulseformChannel(2), PulseformChannel(3)) # NOTE: These are not yet used. # These watch-boxes allow inspecting the last message that occurred # of a given type, or waiting for the next one to occur. this.daclevs_inbox = utils.WatchBox(lock=this._lock) this.ncpuls_inbox = utils.WatchBox(lock=this._lock) this.fifull_inbox = utils.WatchBox(lock=this._lock) this.conpuls_inbox = utils.WatchBox(lock=this._lock) this.lostpuls_inbox = utils.WatchBox(lock=this._lock) # The Publisher interface allows subscribers to register callbacks # to be called on every message of a given type. this.publisher = publisher.Publisher()
def test_read_and_publish(self): data_publisher = publisher.Publisher(self.power_supplies, self.google_cloud_project_id, self.pubsub_topic_name) data_publisher.read_data_and_publish() for ps in self.power_supplies: ps.get_voltage_level.assert_called_once_with() ps.get_current_level.assert_called_once_with() ps.get_display_voltage.assert_called_once_with() ps.get_display_current.assert_called_once_with()
def main(): def on_publish(client, userdata, mid): print("move to successful:",mid) sqllogger.move_to_successful(mid) conneted_broker = False running = True sqllogger = SQL_Lite_Logger.SQL_Lite_Logger("test.db") #init the publisher object MQTT_publisher = publisher.Publisher('iot.eclipse.org', out_topic="fonagotouch", on_publish=on_publish)#init the object MQTT_publisher.start() try: MQTT_publisher.start() except Exception as error: print( '\033[91mCannot create the publisher \033[0m') print(error) if MQTT_publisher.status(): print('\033[94mPublisher connected \033[0m') # MQTT_publisher.start()#start the thread else: print('\033[91mPublisher not connected \033[0m') try: cont = 0 prom = 0 while running: if(not MQTT_publisher.status()): MQTT_publisher.start() start = time.time() # time.sleep(0.5) latitude = 93 + random.random()/88855 # closing to CR longitude = -86 + random.random()/88855 # closing to CR status = randint(1, 2) ## values {1 or 2} speed =random.uniform(1, 180) ## km/h altitude =random.uniform(3100, 3600) ##meters diction = {'date': utils.getTime(),'latitude':latitude, 'longitude': longitude, 'status': status,'speed': speed, 'altitude': altitude} message_id = -1 print("Connection status:",MQTT_publisher.status()) print("Client status:",MQTT_publisher.client._state) if (MQTT_publisher.status()): message_id = MQTT_publisher.publish_data(str(diction)) # print(message_id) sqllogger.backup(diction, message_id) end = time.time() # print("elapsed time",end - start) cont+=1 prom+= (end - start) # print("prom time:", prom/cont) except(KeyboardInterrupt, SystemExit): if(conneted_broker): MQTT_publisher.stop() running = False print("bye")
def main(): global pub numberOfRobots = 2 rows = 40 columns = 40 sendDelay = 1.0 lastTime = time.time() ''' while True: try: numberOfRobots = int(raw_input("number of robots: ")) break except: print("wrong input, try again") ''' print ''' ########## CALIBRATION STARTING ########## ''' playField, positionColor, teamColors = ceilingCam.calibrate_everything( int(numberOfRobots)) mainGrid = grid.Grid(playField[2], playField[3], cols=columns, rows=rows, colors=teamColors) pub = publisher.Publisher(50007) pub.start() ##THE ACTUAL GAME LOOP while True: posret = ceilingCam.detectRobots(playField, positionColor, teamColors) if not posret: continue mainGrid.update(posret) #mainGrid.draw_grid() if time.time() - lastTime > sendDelay: gridData, positionData = mainGrid.get_data() pub.publish(gridData, positionData, (playField[2], playField[3]), (columns, rows)) #(gridData,positionData)) lastTime = time.time()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() self.video_queue_client = mock.Mock() self.video_queue_client.pop.return_value = ( video_queue.Video(self.VIDEO_ID, self.VIDEO_NAME)) self.videos_client = mock.Mock() self.publisher = publisher.Publisher( video_queue_client=self.video_queue_client, videos_client=self.videos_client) self.publisher._get_now = mock.Mock() self.publisher._get_now.return_value = self.NOW
def main(): config = { "service-port": 9091, "block-publish-port": 9092, "tx-publish-port": 9093, "database": "/home/genjix/database", "stop-secret": "surfing2", "dbhost": "localhost", "dbname": "bccache", "dbuser": "******", "dbpassword": "******" } # Load config here. node = fullnode.FullNode() print "Starting node..." if not node.start(config): return 1 print "Node started." print "Starting publisher..." publish = publisher.Publisher(node) if not publish.start(config): return 1 print "Publisher started." print "Starting QTable..." table = qtable.QueryCacheTable(node) if not table.start(config): return 1 print "QTable started." print 'Starting the server...' serv = service.ServiceServer() serv.start(config, node) while not serv.stopped: table.update() time.sleep(1) print "Server stopped." print "Stopping node..." if not node.stop(): return 1 print "Node stopped." return 0
def send_request(table, streamtype): event = threading.Event() pause_events[table][streamtype] = event pub.Publisher(event, table, streamtype).run()
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER app.upload_path = Path(os.path.join(APP_ROOT, 'static/uploads')) app.config.update( #SESSION_COOKIE_SECURE=True, SESSION_COOKIE_HTTPONLY=True, SESSION_COOKIE_SAMESITE='Lax', ) # path to the app directory cwd = os.path.dirname(os.path.realpath(__file__)) redis = redis.Redis() app.queue = publisher.Publisher("localhost", "slyko-exchange", "resize") def sendMessage(msg): payload = { "openpath": msg['openpath'], "savepath": msg['savepath'], "exp": (datetime.datetime.utcnow() + datetime.timedelta(days=1)) } send = jwt.encode(payload, app.jwt_secret_key, algorithm='HS256') app.queue.publish(send) return def checkFormat(filepath): tmp = filepath.split(".")[-1]
def __init__(self): threading.Thread.__init__(self) self.publisher = publisher.Publisher()
def publish(self, project_path=None): """ Publishes the current file base on the file name and location. """ if not project_path: project_path = self.get_project_path() # Check that the environment is valid if environment.is_valid(software=self.get_software()): # Check if the file has been modified if self.is_project_modified(): if self.file_not_saved_dlg(): self._save() else: return False # Define directories proj_dir = os.path.dirname(project_path) raw_proj_name, proj_ext = os.path.splitext(os.path.basename(project_path)) self.debug_msg("project basename = " + os.path.basename(project_path)) archive_dir = os.path.join(proj_dir, 'archive') publish_dir = os.path.join(proj_dir, 'publish') # Append 'PUBLISH' to file before archiving it archive_name, ext = os.path.splitext(os.path.basename(project_path)) archive_name = self.remove_temp_suffix(archive_name) archive_name = archive_name + '_PUBLISH' + ext # Create publish name pub_name = self.create_pub_name(os.path.basename(project_path)) publisher_dlg = publisher.Publisher(publish_dir, pub_name) if publisher_dlg.exec_(): pub_name = publisher_dlg.get_name() # If a valid name comes back from the dialog, copy it to the publish directory if pub_name: # Create archive directory if it doesn't exist try: os.makedirs(archive_dir) except OSError: if not os.path.isdir(archive_dir): raise copyfile(project_path, os.path.join(archive_dir, archive_name)) # Create publish folder if it doesn't exist try: os.makedirs(publish_dir) except OSError: if not os.path.isdir(publish_dir): raise copyfile(project_path, os.path.join(publish_dir, pub_name)) if publisher_dlg.get_del_state(): if raw_proj_name.endswith(TEMP_FILE_SUFFIX): if os.path.isfile(project_path): self.debug_msg("Deleting this file: " + project_path) os.remove(project_path) # If a temporary file is currently open, open the versioned file instead versioned_file = self.remove_temp_suffix(project_path) if versioned_file != project_path and os.path.isfile(versioned_file): self.debug_msg("Opening versioned file") self.open_project(versioned_file) else: self.debug_msg("Environment is not valid!")
cold = [ 4, 5, 6 ] hot = [ 0, 1, 2, 3 ] hotring = 1 coldring = 2 def order_received_callback(message): # check order if (message.order_type in cold): order_queue.put((message.order_type, coldring)) elif (message.order_type in hot): order_queue.put((message.order_type, hotring)) else: rospy.loginfo("Error unrecognised drink ordered: " + str(message.order_type)) def setup_subscriber(): rospy.Subscriber('orders', Order, order_received_callback) if __name__ == '__main__': try: rospy.init_node('director') setup_subscriber() pub = publisher.Publisher() prepare_thread = preparethread.PrepareThread() prepare_thread.daemon = True prepare_thread.start() rospy.spin() except: preparethread.join() pass
import publisher import random from random import randint import time import os import datetime #init the publisher object connected = False MQTT_publisher = None try: MQTT_publisher = publisher.Publisher() #init the object connected = MQTT_publisher.isConnected # it is connected? if (connected): print("MQTT publisher is connected") else: print("MQTT publisher is not connected") MQTT_publisher.start() #start the thread except Exception: print(" Error MQTT creating the publisher") try: counter = 0 while connected: time.sleep(2) value = raw_input('Enter the message:') diction = {'data': value} MQTT_publisher.publish_data(str(diction))
if __name__ == '__main__': print(" Connect to Redis") redis_host = os.environ['REDIS'] if "REDIS" in os.environ else "127.0.0.1" r = redis.Redis(host=redis_host, port=6379, password="******") r.("routing", routing_key) p = r.get("routing") print(p) time.sleep(30) print(" Starting publisher!'") sys.stdout.flush() pub = publisher.Publisher(exchange_name, routing_key) pub.connect_to_rabbit_blocking() pub.channel.queue_declare(queue='friends.talk.back') try: app.run(host='0.0.0.0') finally: pub.connection.close()