def run(self): global queue while True: # if the device has been destroyed, recreate. This is forced by setting it to None if self.serial == None: self.createSerial() continue try: #line = self.serial.readline (None, '\r\n') line = self.serial.readline() if len(line) == 0: # print ("empty line: %s" % line) # this happens after timeout; the call to getRI causes an exception if the device has been disconnected # we use this as a side effect to force to recreate the device after timeout ri = self.serial.getRI() ## should the call work and the line is down, recreate #if not ri: # print ("RI is down, try to reconnect") # time.sleep (5) # self.serial = None continue except serial.serialutil.SerialException as exc: # this happens when the cable is disconnected, but not on the Stylistic print("SerialException: ", exc) self.serial.close() time.sleep(1) self.serial = None continue except serial.portNotOpenError as exc: print("portNotOpenError serial exception: ", exc) self.serial.close() time.sleep(1) self.serial = None continue except IOError as exc: print("IOError serial exception: ", exc) self.serial.close() time.sleep(5) self.serial = None continue except Exception as exc: print("other serial exception: ", exc, exc.__class__.__name__) self.serial.close() time.sleep(5) self.serial = None continue # print ("put: %s" % line) print("type of line: ", line.__class__.__name__) if queue.full(): print("throw away: ", queue.get()) else: queue.put(line)
def run(self): with self.serviced: if queue.full(): print("full") return print(threading.current_thread, "joined") queue.put(self) self.serviced.notify()
def publishQueueItems(queue): while not queue.full(): queue.put(1) print("{} appended an item to the queue, current size is {}".format( threading.current_thread().name, queue.qsize())) # time.sleep(0.1) # tell the queue the current task is done, so .join() knows the work is done queue.task_done()
def productor(): while True: if not queue.full(): item = random.randint(1, 10) queue.put(item) logging.info(f'Este es el numero elemento en la cola {item}') time_to_sleep = random.randint(1, 3) time.sleep(time_to_sleep)
def PutDiscard(queue, item): ''' Put something on a queue and discard the oldest entry if it's full. Has a race (consumer might call get() just after this func finds the queue is full and we will needlessly discard an item) but this should only happen in the case of CPU starvation or data that's not relevant anyway. ''' if queue.full(): queue.get() queue.put(item)
def run(self): data = [] i = 2 for row in ws.iter_rows(row_offset=2): d = { "name": "", "contact": "", "state": "", "city": "", "zipCode": "", "street": "", "title": "", "salary": "", "visaType": "H-1B", "workState": "", "workCity": "", "workZipCode": "" } i += 1 for cell in row: if not cell.value: continue col = get_column_letter(cell.column) if col == "H": d["name"] = cell.value elif col == "I": d["street"] = cell.value elif col == "K": d["city"] = cell.value elif col == "L": d["state"] = cell.value elif col == "M": d["zipCode"] = cell.value elif col == "P": d["contact"] = cell.value elif col == "U": d["title"] = cell.value elif col == "AA": d["salary"] = "$" + cell.value elif col == "AB": d["salary"] += "/" + cell.value elif col == "AK": d["workCity"] = cell.value elif col == "AM": d["workState"] = cell.value elif col == "AN": d["workZipCode"] = cell.value elif col == "E" and cell.value != "H-1B": continue data.append(d) if len(data) >= 1000: while queue.full(): pass queue.put(data) data = []
def _proxy_custom(self, queue): """一个代理生产队列""" proxies = self._get_proxy() for proxy in proxies: while True: if queue.full(): continue else: queue.put(proxy[0]) break
def producer(): while True: if not queue.full(): item = random.randint(1, 10) queue.put(item) logging.info(f'Nuevo elemento dentro de la cola {item}') time_to_sleep = random.randint(1, 3) time.sleep(time_to_sleep)
def generate_coord(queue, event): # n_tracts, tracker, seed = inp print("Start generate_coord\n") """Pretend we're getting a number from the network.""" # print(f"This is event {event.is_set()}") while not event.is_set() or not queue.full(): time.sleep(0.5) print("Enter generate_coord\n") seed = np.array([[-8.49, -8.39, 2.5]]) queue.put(seed) visualize_coord(seed, "generate_coord")
def run(self): print("SendThread.run: running") while not exit: lock.acquire() if not queue.full(): data = random.randrange(1, 100, 1) queue.put(data) print("SendThread.run: put %s" % (data)) lock.release() time.sleep(1) print("SendThread.run: exit")
def run(self): global queue file = open(self.filename, 'r') while True: time.sleep(0.02) # the socket does not accept str which is unicode line = bytes(file.readline(), 'ascii') if len(line) == 0: continue # print ("put: %s" % line) if queue.full(): print("throw away: ", queue.get()) else: queue.put(line)
def sendPendingOutput(self): """This method will take any outgoing data sent to us from a child component and stick it on a queue to the outside world.""" if python_lang_type == 2: items = self.childOutboxMapping.iteritems() else: items = self.childOutboxMapping.items() for childSource, parentSink in items: queue = self.outQueues[childSource] while self.dataReady(parentSink): if not queue.full(): msg = self.recv(parentSink) # TODO - what happens when the wrapped component terminates itself? We keep on going. Not optimal. queue.put_nowait(msg) else: break
def run(self): data = [] i = 2 for row in ws.iter_rows(row_offset = 2): d = {"name": "", "contact": "", "state": "", "city": "", "zipCode": "", "street": "", "title": "", "salary": "", "visaType": "H-1B", "workState": "", "workCity":"", "workZipCode": ""} i += 1 for cell in row: if not cell.value: continue col = get_column_letter(cell.column) if col == "H": d["name"] = cell.value elif col == "I": d["street"] = cell.value elif col == "K": d["city"] = cell.value elif col == "L": d["state"] = cell.value elif col == "M": d["zipCode"] = cell.value elif col == "P": d["contact"] = cell.value elif col == "U": d["title"] = cell.value elif col == "AA": d["salary"] = "$" + cell.value elif col == "AB": d["salary"] += "/" + cell.value elif col == "AK": d["workCity"] = cell.value elif col == "AM": d["workState"] = cell.value elif col == "AN": d["workZipCode"] = cell.value elif col == "E" and cell.value != "H-1B": continue; data.append(d) if len(data) >= 1000: while queue.full(): pass queue.put(data) data = []
def recieve_message(): body = request.get_json() try: message: str = body.get("message", {}) decrypted = json.loads( gpg.decrypt(message, passphrase=passphrase).data) if type(decrypted) != dict: return invalid_input('invalid message, message has to json') try: validate(instance=decrypted, schema=property_schema) except: return invalid_input('invalid message schema') if decrypted: if queue.full(): executor.submit(create_block) # queue.join() queue.put({"decrypted": decrypted, "message": message}) return jsonify({'message': 'valid'}) return invalid_input("invalid pgp key") except: pass return invalid_error('unknown error')
def _show_progress(last_known_progress, end_event, queue): progress_values = [i for i in range(0, 110, 10)] # [0, 10, ..., 100] chars = '|/-\\' msg = None while True: if not queue.full(): # nothing in the queue yet, keep showing the last known progres progress = last_known_progress else: update = queue.get() # figure out what kind of update is being requested if update[0] == CmdProgressBarUpdateTypes.UPDATE_PROGRESS: progress = update[1] last_known_progress = progress else: msg = update[1] # signal that the value has been consumed queue.task_done() num_progress_vals = bs(progress_values, progress) progress_info = '..'.join([''.join((str(i), '%')) for i in progress_values[:num_progress_vals]]) progress_info = ''.join((progress_info, '.' * (53 - len(progress_info)))) # for info msg updates, display the message if msg != None: sys.stdout.write(''.join(('\r', ' ' * 70, '\r'))) sys.stdout.write(''.join((msg, '\n'))) msg = None # show pogress for c in chars: sys.stdout.write('\r[ {0} ..{1}.. ]'.format(c, progress_info)) sys.stdout.flush() time.sleep(0.4) if end_event.is_set(): break
def run(self): link_status_file = os.path.join(self.base_dir, 'db', 'link_status.txt') self.server_bind() conn_is_login = [] conn_not_login = [] rlist = [ self.server, ] wlist = [] wdata = {} while True: rl, wl, xl = select.select(rlist, wlist, [], 0.5) for sock in rl: if sock == self.server: conn, caddr = self.server.accept() #如果有客户端连接,如果队列没有满,则放入caddr,放入后如果队列满,则会创建文件,该文件的存在与否表示队列是否满载 if conn: if not queue.full(): queue.put(caddr) if queue.full(): set_file.write_file(link_status_file, 'w') if queue.empty(): os.remove(link_status_file) rlist.append(conn) conn_not_login.append(conn) else: if sock in conn_not_login: t = Thread(target=self.login, args=(sock, wdata)) t.start() t.join() conn_is_login.append(sock) conn_not_login.remove(sock) break while True: try: cmd_dict = set_struct.recv_message(sock) cmd = cmd_dict['cmd'] if not cmd: queue.get() os.remove(link_status_file) print(self.put_response_code['203']) sock.close() rlist.remove(sock) wdata.pop(sock) break wlist.append(sock) wdata[sock].append(cmd) break except BlockingIOError: break except Exception: queue.get() #通过文件是否存在控制并发量,如果客户端断开则删除该文件,表示允许客户端连接 if os.path.exists(link_status_file): os.remove(link_status_file) print(self.put_response_code['203']) sock.close() #在客户端登录时,还没有获取到rlist和wdata的值,所以当客户端重新连接时,服务端会报错 try: rlist.remove(sock) wdata.pop(sock) except KeyError: break break for sock in wl: request_method = wdata[sock][1].split()[0] if hasattr(self, request_method): #不同长度的命令分别处理,需要该判断,因为调用函数时传递的参数不同 if len(wdata[sock][1].split()) == 2: request_content = wdata[sock][1].split()[1] func = getattr(self, request_method) #从之前的通过反射判断如果方法存在,那么就执行func()变为发起一个线程去执行这个方法 t = Thread(target=func, args=(request_content, sock, wdata, rlist)) t.start() rlist.remove(sock) wlist.remove(sock) #之前没加这一句,下载的都是同一个文件 del wdata[sock][1] else: func = getattr(self, request_method) t = Thread(target=func, args=(sock, wdata, rlist)) t.start() rlist.remove(sock) wlist.remove(sock) del wdata[sock][1]
def writer(imgnames, masknames, config_dict, queue): ''' This funtion creates Frame objects which read images from the disk, detects features and feature descriptors using settings in the config file. It then puts the object into a multi-process queue. This function is designed to run in a separate heap and thus takes everything it needs in form of parameteres and doesn't rely on global variables. ''' #TILE_KP = config_dict['use_tiling_non_max_supression'] USE_MASKS = config_dict['use_masks'] USE_CLAHE = config_dict['use_clahe'] FEATURE_DETECTOR_TYPE = config_dict['feature_detector_type'] FEATURE_DESCRIPTOR_TYPE = config_dict['feature_descriptor_type'] vslog.info("FEATURE_DETECTOR_TYPE: {} FEATURE_DESCRIPTOR_TYPE: {}".format( FEATURE_DETECTOR_TYPE, FEATURE_DESCRIPTOR_TYPE)) USE_CACHING = False #RADIAL_NON_MAX = config_dict['radial_non_max'] #detector = cv2.ORB_create(**config_dict['ORB_settings']) Frame.K = np.array(config_dict['K']) Frame.D = np.array(config_dict['D']) Frame.config_dict = config_dict if FEATURE_DETECTOR_TYPE == 'orb': Frame.detector = cv2.ORB_create(**config_dict['ORB_settings']) feature_detector_config = config_dict['ORB_settings'] elif FEATURE_DETECTOR_TYPE == 'zernike': Frame.detector = MultiHarrisZernike(**config_dict['ZERNIKE_settings']) feature_detector_config = config_dict['ZERNIKE_settings'] elif FEATURE_DETECTOR_TYPE == 'sift': Frame.detector = cv2.xfeatures2d.SIFT_create( **config_dict['SIFT_settings']) feature_detector_config = config_dict['SIFT_settings'] elif FEATURE_DETECTOR_TYPE == 'surf': Frame.detector = cv2.xfeatures2d.SURF_create( **config_dict['SURF_settings']) feature_detector_config = config_dict['SURF_settings'] else: assert False, "Specified feture detector not available" if FEATURE_DESCRIPTOR_TYPE == FEATURE_DETECTOR_TYPE: Frame.descriptor = Frame.detector elif FEATURE_DESCRIPTOR_TYPE == 'orb': Frame.descriptor = cv2.ORB_create(**config_dict['ORB_settings']) feature_descriptor_config = config_dict['ORB_settings'] elif FEATURE_DESCRIPTOR_TYPE == 'zernike': Frame.descriptor = MultiHarrisZernike( **config_dict['ZERNIKE_settings']) feature_descriptor_config = config_dict['ZERNIKE_settings'] elif FEATURE_DESCRIPTOR_TYPE == 'sift': Frame.descriptor = cv2.xfeatures2d.SIFT_create( **config_dict['SIFT_settings']) feature_descriptor_config = config_dict['SIFT_settings'] elif FEATURE_DESCRIPTOR_TYPE == 'surf': Frame.descriptor = cv2.xfeatures2d.SURF_create( **config_dict['SURF_settings']) feature_descriptor_config = config_dict['SURF_settings'] else: vslog.fatal("Asserting") assert False, "Specified feture descriptor not available" if sys.platform == 'darwin': gt_pose_file = config_dict.get('osx_ground_truth_poses') else: gt_pose_file = config_dict.get('linux_ground_truth_poses') if gt_pose_file: Frame.groundtruth_pose_dict = read_metashape_poses(gt_pose_file) #settings_hash_string = str(hash(frozenset(a.items()))).replace('-','z') settings_string = ''.join([ '_%s' % feature_detector_config[k] for k in sorted(feature_detector_config.keys()) ]) local_temp_dir = os.path.dirname(os.path.abspath(__file__)) + '/temp_data' img_folder_name = os.path.dirname(imgnames[0]).replace('/', '_')[1:] temp_obj_folder = local_temp_dir + '/' + img_folder_name + settings_string os.makedirs(temp_obj_folder, exist_ok=True) vslog.info("K: \t" + str(Frame.K).replace('\n', '\n\t\t')) vslog.info("D: \t" + str(Frame.D)) if USE_CLAHE: Frame.clahe_obj = cv2.createCLAHE(**config_dict['CLAHE_settings']) Frame.is_config_set = True ''' if TILE_KP: tiling=[config_dict['tiling_non_max_tile_y'], config_dict['tiling_non_max_tile_x']] else: tiling = None if RADIAL_NON_MAX: RADIAL_NON_MAX_RADIUS = config_dict['radial_non_max_radius'] else: RADIAL_NON_MAX_RADIUS = None ''' vslog.info('Starting writer process...') try: for i in range(len(imgnames)): frame_obj_filename = temp_obj_folder + '/' + imgnames[i].split( '/')[-1] + '.pkl' if os.path.isfile(frame_obj_filename) and USE_CACHING: vslog.debug(Fore.GREEN + "File exisits, reusing ..." + Style.RESET_ALL) fr = Frame.load_frame(frame_obj_filename) Frame.last_id += 1 queue.put(fr) else: if queue.empty(): vslog.debug(Fore.RED + "Queue empty, reading is slow..." + Style.RESET_ALL) while queue.full(): time.sleep(0.01) #print(Fore.GREEN+"Writer queue full, waiting..."+Style.RESET_ALL) if USE_MASKS: fr = Frame(imgnames[i], mask_name=masknames[i]) else: fr = Frame(imgnames[i]) queue.put(fr) if USE_CACHING: Frame.save_frame(fr, frame_obj_filename) except KeyboardInterrupt: vslog.critical("Keyboard interrupt from me") pass except: traceback.print_exc(file=sys.stdout) vslog.info("Finished pre-processing all images")
def run(self, queue): # TODO : Refomat the server3.py ! Done # TODO : Create a thread Class ! Done 2:25 PM print("Server is listening for incoming Data ") # while True: if not queue.full(): conn, address = self._server.accept() titleData = 0 print('client connected ... ' + str(address[0]) + ":" + str(address[1])) # We create a temporary files # The title we don't need it so we delete it ! createFolder(self.clientId) titleFile = tempfile.NamedTemporaryFile('w+b', dir=self.clientId, delete=True) dataFile = tempfile.NamedTemporaryFile('w+b', dir=self.clientId, delete=True) print(str(self.threadId)) while self.pending: data = conn.recv(self.bufferSize) if not data: self.pending = False break if titleData >= 4096: dataFile.write(data) print('writing file .... temp_data ... ', len(data)) if titleData < 4096: if titleData + len(data) > 4096: print('EXCEPTION') titleFile.write(data[:4096 - titleData]) print('writing file .... temp_title ... ', 4096 - titleData) dataFile.write(data[4096 - titleData:]) print('writing file .... temp_data ... ', len(data[4096 - titleData:])) titleData = titleData + len(data) else: titleFile.write(data) print('writing file .... temp_title ... ', len(data)) titleData = titleData + len(data) print('split done') try: titleFile.seek(0) title = titleFile.read().decode("utf-8").replace('\x00', "") print(title) original_filename = dataFile.name print(original_filename) path = os.getcwd() + "\\" + self.clientId + "\\" + title # path = "%s'\\'%s'\\'%s" % os.getcwd() % self.clientId % title os.link(original_filename, path) except Exception as e: print(e) pass titleFile.close() dataFile.close() queue.put(path) logging.debug('Putting ' + str(path) + ' : ' + str(queue.qsize()) + ' items in queue') print('finished writing file') conn.close() print('client disconnected') return queue
queue = queue.Queue(maxSize) tmp = read_file.readline() array = tmp.split("\t") count = 0 while tmp != "": if tmp.startswith("#"): write_file.writelines(tmp) tmp = read_file.readline() array = tmp.split("\t") elif array[2] == "exon": if queue.full(): queue.get() queue.put(tmp) tmp = read_file.readline() array = tmp.split("\t") elif 'transcript' == array[2] or 'gene' == array[2]: while not queue.empty(): write_file.writelines(queue.get()) write_file.writelines(tmp) tmp = read_file.readline() array = tmp.split("\t") else:
def __start(self, queue:queue.Queue, sleepTime:int, randomUpperRange:float, randomLowerRange:float): while True: if queue.full(): queue.get() self.measurementsQueue.put(Measurement(int(datetime.datetime.now().timestamp()), random.uniform(randomLowerRange, randomUpperRange), random.uniform(randomLowerRange, randomUpperRange))) time.sleep(sleepTime)
def write_data(): for i in range(0,200): global queue queue.put(i) print(queue.full())
def myPublisher(queue): while not queue.full(): queue.put(1) print("{} Appended 1 to queue:{}".format(threading.current_thread(), queue.qsize())) time.sleep(1)
os.remove('warehouse.csv') os.rename('inter_warehouse.csv', 'warehouse.csv') # Pretty printing of data def prettyprint(data): col_width = max(len(word) for row in data for word in row) + 2 # padding for row in data: print("".join(word.ljust(col_width) for word in row)) # start of REPL; Ctrl+Z to exit the REPL while True: try: if queue.full(): # Logging in batches of 2 with open('pmsoftware.log', 'a', newline='\n') as logfile: logfile.write( str(datetime.datetime.now().strftime("%m/%d/%Y %I:%M:%S")) + " \t" + queue.get() + "\n") logfile.write( str(datetime.datetime.now().strftime("%m/%d/%Y %I:%M:%S")) + " \t" + queue.get() + "\n") run = input("Enter the command:") # print("Here: ", type(run)) queue.put(run) replArgs = run.split(" ") # noOfArgs = len(replArgs) # print(replArgs)