def start_json_server(queue_object, port=9999, host="0.0.0.0"): server = Server(host, 9999) while True: data = server.accept().recv() print("data: ", data) if not data: server.send({"status": "error", "message": "not valid json"}) else: server.send({"status": "ok"}) queue_object.put(data)
def main(): server = Server(host, port) # For each accepted device create a thread to execute the "server" function for dev in accepted_devices: thread = Thread(target=threaded_func, args=(server, str(dev))) thread.start() threads.append(thread) print(":main info: thread finished...exiting") # Wait for all threads to complete for t in threads: t.join() print(":main info: exiting Main Thread") server.close()
def __init__(self, host_name, port, key, num_slots): """Initialize a server process Arguments: host_name {string} -- host ip port {int} -- socket port num_slots {int} -- total number of slots for this worker Raises: OSError -- happens when resolving host or creating socket """ host_ip = socket.gethostbyname(host_name) # docker daemon self.docker = docker.from_env() # socket server self.server = Server(host_ip, port) self._key = key self._total_slots = num_slots self._avail_slots = num_slots self._job_queue = [] # because it's highly likely to receive multiple jobs at once # and each job can take very long, we don't want to block following # jobs, so we use one thread for each job received and use these # data structures to keep track of threads, indexed by _job_id # we use {} instead of [] as they are more robust for multi-threads self._threads = {} self._thread_stops = {} self._running_jobs = {} # signal handler to kill the process signal.signal(signal.SIGINT, self._gracefully_exit) signal.signal(signal.SIGTERM, self._gracefully_exit) # image maintaince, image&tag as key, last updated time as value self._last_checked = {}
def iradio_req_process(): # Create jsonserver object server = Server(host, port) # Accepting client requests idefinately print("ready to listen to requests") # Keeping track of no. of clients count_clients = 0 while True: server.accept() data = server.recv() _process(data) # testing with dummy data from file_parser import hard_coded server.send(hard_coded().__dict__) print("Radio list sent to client #{}".format(count_clients)) count_clients += 1 print(count_clients)
help="IP for host") ap.add_argument("-p", "--port", type=str, default=8000, help="port for host") print_mode = True args = vars(ap.parse_args()) GLOBAL_CLOUD_RECOGNIZER = pickle.loads(open(args["cloud_recognizer"], "rb").read()) GLOBAL_CLOUD_LE = pickle.loads(open(args["cloud_le"], "rb").read()) host = args['host'] port = args['port'] # Server code: server = Server(host, port) # Read until video is completed print('ready to accept connections at server') while(True): # Capture frame-by-frame server.accept() received_query = server.recv() frame = received_query['frame'] embedding_vec = np.array([float(x) for x in received_query['emb']]).reshape(1,-1) if print_mode: print(' ') print('server query at frame: ', frame)