def pubData(publisher: zmq.Socket, topic: str): while True: for i in range(100): publisher.send_string("%s 1" % topic) time.sleep(0.10) # 10hz for i in range(100): publisher.send_string("%s 0" % topic) time.sleep(0.10) # 10hz
async def push_game_state(game_state: GameState, sock: Socket) -> None: try: while True: sock.send_string(game_state.to_json()) await asyncio.sleep(1 / SERVER_TICK) except asyncio.CancelledError: pass
def pubData(publisher: zmq.Socket, topic: str): # Create IMU objects accGyro = LSM6DS33() accGyro.enableLSM() mag = LIS3MDL() mag.enableLIS() if config.USE_MEDIAN_FILTER: # Median Filters axF = MedianFilter(config.MF_WINDOW_SIZE) ayF = MedianFilter(config.MF_WINDOW_SIZE) azF = MedianFilter(config.MF_WINDOW_SIZE) gxF = MedianFilter(config.MF_WINDOW_SIZE) gyF = MedianFilter(config.MF_WINDOW_SIZE) gzF = MedianFilter(config.MF_WINDOW_SIZE) mxF = MedianFilter(config.MF_WINDOW_SIZE) myF = MedianFilter(config.MF_WINDOW_SIZE) mzF = MedianFilter(config.MF_WINDOW_SIZE) while True: try: # Read IMU values ax, ay, az = accGyro.getAccelerometerRaw() gx, gy, gz = accGyro.getGyroscopeRaw() mx, my, mz = mag.getMagnetometerRaw() if config.USE_MEDIAN_FILTER: # Go through median filters ax = int(axF.filt(ax)) ay = int(ayF.filt(ay)) az = int(azF.filt(az)) gx = int(gxF.filt(gx)) gy = int(gyF.filt(gy)) gz = int(gzF.filt(gz)) mx = int(mxF.filt(mx)) my = int(myF.filt(my)) mz = int(mzF.filt(mz)) # Publish onto topic publisher.send_string("%s %i %i %i %i %i %i %i %i %i" % (topic, ax, ay, az, gx, gy, gz, mx, my, mz)) print("'%s': %i %i %i %i %i %i %i %i %i" % (topic, ax, ay, az, gx, gy, gz, mx, my, mz)) time.sleep(0.020) # 50hz except KeyboardInterrupt: break
def zmq_export(sock: zmq.Socket, topic: str, data, datatype: str = "pyobj"): """ Author: Alexander Heilmeier & Tim Stahl Description: Sends data via ZMQ. Inputs: sock: ZMQ socket (see below how to create it) topic: ZMQ topic to use data: data to send datatype: string that indicates if it should be sent as Python object (pyobj), json (json) or string (str) Hint: To send an object as string it must be converted to a string at first. Conversion of Python objects to json objects is handled by PyZMQ and therefore must not be done by hand if sending a json. How to create a ZMQ socket to export data? import zmq zmq_context = zmq.Context() sock = zmq_context.socket(zmq.PUB) sock.bind("tcp://*:%s" % port) """ # ------------------------------------------------------------------------------------------------------------------ # FUNCTION BODY ---------------------------------------------------------------------------------------------------- # ------------------------------------------------------------------------------------------------------------------ sock.send_string(topic, zmq.SNDMORE) if datatype == "pyobj": sock.send_pyobj(data) elif datatype == "json": sock.send_json(data) elif datatype == "str": sock.send_string(data) else: raise RuntimeError("Specified datatype is not supported!")
def _publish_message(message: Message, publish_socket: zmq.Socket): message_str = message.formatted() _logger.info("CHAT: {}".format(message_str)) publish_socket.send_string("ALL {}".format(message_str))
def send_message(soc: zmq.Socket, msg: str) -> None: """ メッセージを送信する """ soc.send_string(msg)
def pubMock(publisher: zmq.Socket, topic: str, filePath: str): #set working directory os.chdir(config.MOCK_DATA_FOLDER) #find all csv files in the folder all_filenames = [] dir_files = os.listdir() for f in config.MOCK_DATA_PATHS: if f in dir_files: all_filenames.append(f) print(all_filenames) #combine all files in the list ## Note: Xavier the dataset files all have header so will need to account for that. I removed the header in the ## test data in mock_data to test first combined_csv = pd.concat( [pd.read_csv(f, header=None) for f in all_filenames]) #export to csv if os.path.isfile("combined.csv"): os.remove("combined.csv") combined_csv.to_csv("combined.csv", index=False, encoding='utf-8-sig', header=None) stream = open("combined.csv", newline='') csvFile = csv.reader(stream, delimiter=',') if config.USE_MEDIAN_FILTER: # Median Filters axF = MedianFilter(config.MF_WINDOW_SIZE) ayF = MedianFilter(config.MF_WINDOW_SIZE) azF = MedianFilter(config.MF_WINDOW_SIZE) gxF = MedianFilter(config.MF_WINDOW_SIZE) gyF = MedianFilter(config.MF_WINDOW_SIZE) gzF = MedianFilter(config.MF_WINDOW_SIZE) mxF = MedianFilter(config.MF_WINDOW_SIZE) myF = MedianFilter(config.MF_WINDOW_SIZE) mzF = MedianFilter(config.MF_WINDOW_SIZE) while True: try: # Read IMU values r = next(csvFile) ax = float(r[1]) ay = float(r[2]) az = float(r[3]) gx = float(r[4]) gy = float(r[5]) gz = float(r[6]) mx = float(r[7]) my = float(r[8]) mz = float(r[9]) if config.USE_MEDIAN_FILTER: # Go through median filters ax = int(axF.filt(ax)) ay = int(ayF.filt(ay)) az = int(azF.filt(az)) gx = int(gxF.filt(gx)) gy = int(gyF.filt(gy)) gz = int(gzF.filt(gz)) mx = int(mxF.filt(mx)) my = int(myF.filt(my)) mz = int(mzF.filt(mz)) # Publish onto topic publisher.send_string("%s %i %i %i %i %i %i %i %i %i" % (topic, ax, ay, az, gx, gy, gz, mx, my, mz)) print("'%s': %i %i %i %i %i %i %i %i %i" % (topic, ax, ay, az, gx, gy, gz, mx, my, mz)) time.sleep(0.020) # 50hz except (KeyboardInterrupt, StopIteration) as e: break # Clean up os.remove("combined.csv")
def send_json(socket: zmq.Socket, data: dict, **kw) -> None: """ Send a dictionary. """ s_data = json.dumps(data) socket.send_string(s_data, **kw)
def send_string(socket: zmq.Socket, data: str, **kw) -> None: """ Send a string. """ socket.send_string(data, **kw)
def pubMock(publisher: zmq.Socket, topic: str, filePath: str): left_data = [] right_data = [] for root, dirs, files in os.walk(config.MOCK_DATA_FOLDER, topdown=False): for folder in dirs: cwd = os.getcwd() + "/" os.chdir(cwd + os.path.join(root, folder)) print(os.getcwd()) print("Testing on trial data:", folder) #Getting left and right data readings for file in os.listdir(): if 's2.csv' in file: left_data.append(file) if 's3.csv' in file: right_data.append(file) #combine all files in the list left_csv = pd.concat([pd.read_csv(f, header=None, skiprows = 1) for f in left_data]) right_csv = pd.concat([pd.read_csv(f, header=None, skiprows = 1) for f in right_data]) #export to csv if os.path.isfile("left_data.csv"): os.remove("left_data.csv") if os.path.isfile("right_data.csv"): os.remove("right_data.csv") left_csv.to_csv("left_data.csv", index=False, encoding='utf-8-sig', header=None) right_csv.to_csv("right_data.csv", index=False, encoding='utf-8-sig', header=None) #Creating L and R feet data streams L_stream = open("left_data.csv", newline='') L_csvFile = csv.reader(L_stream, delimiter=',') R_stream = open("right_data.csv", newline='') R_csvFile = csv.reader(R_stream, delimiter=',') # Wait for Predictor to be ready for data context = zmq.Context() client = context.socket(zmq.REQ) client.connect(config.PREDICT_READY_SOCK) request = "Ready?".encode() print("Waiting for Predictor to be ready...") client.send(request) if (client.poll() & zmq.POLLIN) != 0: reply = client.recv().decode() if reply == "Yes": print("Predictor is ready!") if config.WAIT_FOR_USER: userInput = input("Press something to start...") print("Publishing data") while True: try: # Read IMU values from left and right data stream from CSV files l = next(L_csvFile) r = next(R_csvFile) lwx = float(l[1]); lwy = float(l[2]); lwz = float(l[3]) lax = float(l[4]); lay = float(l[5]); laz = float(l[6]) rwx = float(r[1]); rwy = float(r[2]); rwz = float(r[3]) rax = float(r[4]); ray = float(r[5]); raz = float(r[6]) gt = float(r[10]) # Publish onto topic publisher.send_string("%s %i %i %i %i %i %i %i %i %i %i %i %i %i" % (topic, lwx,lwy,lwz,lax,lay,laz, rwx,rwy,rwz,rax,ray,raz, gt)) #print("%s %i %i %i %i %i %i %i %i %i %i %i %i %i" % (topic, lwx,lwy,lwz,lax,lay,laz, rwx,rwy,rwz,rax,ray,raz, gt)) time.sleep(0.020) # 50hz except (KeyboardInterrupt, StopIteration) as e: break # Clean up os.remove("left_data.csv") os.remove("right_data.csv")