import socket, videosocket import io from videofeed import VideoFeed import sys import time import threading import multiprocessing as mp import cv2 import queue #import numpy as np myqueue = queue.Queue(1) class Client: def __init__(self, ip_addr="169.254.162.107"): self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.client_socket.connect((ip_addr, 6000)) self.vsock = videosocket.videosocket(self.client_socket) self.videofeed = VideoFeed(1, "client", 1) self.data = io.StringIO() def mysend(self): reFrameCnt = 0 fps = 0 tt = 0 while True: reFrameCnt = reFrameCnt + 1 if reFrameCnt % 10 == 0: t1 = time.time()
# # a=queue.Queue() # a.put("hello") # a.task_done() # print(a.get()) # a.put("python") # a.task_done() # a.put("php") # a.task_done() # a.put("java") # a.task_done() # print(a.get()) # print(a.get()) # print(a.get()) urlqueue = queue.Queue() headers = ( "user-agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36" ) opener = urllib.request.build_opener() opener.addheaders = [headers] urllib.request.install_opener(opener) listurl = [] def use_proxy(proxy_addr, url): try: proxy = urllib.request.ProxyHandler({'http': proxy_addr}) opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler) urllib.request.install_opener(opener)
... out_q.put(copy.deepcopy(data)) # A thread that consumes data def consumer(in_q): while True: # Get some data data = in_q.get() # Process the data ... import queue q = queue.Queue() try: data = q.get(block=False) except queue.Empty: ... try: q.put(item, block=False) except queue.Full: ... try: data = q.get(timeout=5.0) except queue.Empty: ... def producer(q):
"same as queuetest2.py, but uses threading, not _threads" numconsumers = 2 # how many consumers to start numproducers = 4 # how many producers to start nummessages = 4 # messages per producer to put import threading, queue, time, sys safeprint = threading.Lock() # else prints may overlap dataQueue = queue.Queue() # shared global, infinite size def producer(idnum, dataqueue): for msgnum in range(nummessages): time.sleep(idnum) dataqueue.put('[producer id=%d, count=%d]' % (idnum, msgnum)) def consumer(idnum, dataqueue): while True: time.sleep(0.1) try: data = dataqueue.get(block=False) except queue.Empty: pass else: with safeprint: print('consumer', idnum, 'got =>', data) if __name__ == '__main__': for i in range(numconsumers):
def comformation(self): # glovar.ComList.clear() # Inform other PoS node process to stop glovar.ComChange = 1 notend = True while notend: notend = False for each in glovar.ComList: if each[6]: notend = True break time.sleep(0.05) glovar.ComList.clear() # glovar.broadLock.acquire() # broadnum = glovar.broadqueue.qsize() # glovar.broadqueue.queue.clear() # broadnumafter = glovar.broadqueue.qsize() # glovar.broadLock.release() # # logcontent = "broadqueue number:" + str(broadnum) + " braodqueue number after:" + str(broadnumafter) # self.logger.info(logcontent) # # # Clear messages generated by last committees # glovar.msgLock.acquire() # msgnum = glovar.msgqueue.qsize() # glovar.msgqueue.queue.clear() # msgnumafter = glovar.msgqueue.qsize() # glovar.msgLock.release() # logcontent = "message queue number:" + str(msgnum) + " message queue after number:" + str(msgnumafter) # self.logger.info(logcontent) # Send the last block if len(glovar.BLOCKCHAIN): newblock = glovar.BLOCKCHAIN[len(glovar.BLOCKCHAIN)-1] beforesend = {'type':'syncblock','No':1,'content':newblock} temp = str(beforesend) hashvalue = hashlib.sha256(temp.encode('utf-8')).hexdigest() senddata = {'messageid':hashvalue,'type':'syncblock','No':1,'content':newblock} glovar.messageLock.acquire() glovar.MessageList.append(newblock[1]) glovar.messageLock.release() broadMessage(senddata) logcontent = "Send a syncblock:" + str(newblock[8]) self.logger.info(logcontent) time.sleep(RANBROAD_TIME) cur_time = int(time.time()) prev_time = cur_time - (cur_time % CLEAR_TIME) while True: cur_time = int(time.time()) if cur_time > prev_time + CLEAR_TIME: break else: time.sleep(0.5) glovar.ComChange = 0 self.logger.info('--------------------------------------------------') # Start committee formation random.seed() genrandom = random.uniform(0,1) glovar.ranLock.acquire() glovar.RanList.append(str(genrandom)) glovar.ranLock.release() # self.logger.info('put random number to RanList.') content = {'genrandom':genrandom} beforesend = {'type':'comrandom','No':1,'content':content} temp = str(beforesend) hashvalue = hashlib.sha256(temp.encode('utf-8')).hexdigest() senddata = {'messageid':hashvalue,'type':'comrandom','No':1,'content':content} glovar.messageLock.acquire() glovar.MessageList.append(hashvalue) glovar.messageLock.release() # glovar.broadLock.acquire() # broadnum = glovar.broadqueue.qsize() # glovar.broadqueue.queue.clear() # broadnumafter = glovar.broadqueue.qsize() # glovar.broadLock.release() broadMessage(senddata) logcontent = 'Send a random number: ' + str(genrandom) self.logger.info(logcontent) # Wait one or two RANBROAD_TIME duration for generating hashvalue time.sleep(RANBROAD_TIME) logcontent = 'Received random numbers: ' + str(len(glovar.RanList)) self.logger.info(logcontent) # self.logger.info(glovar.RanList) # self.logger.info('Generate hash value of random numbers.') glovar.ranLock.acquire() randomstring = "".join(glovar.RanList) glovar.ranLock.release() hashvalue = int(hashlib.sha256(randomstring.encode('utf-8')).hexdigest(), 16) glovar.hashLock.acquire() glovar.HashList.append(hashvalue) if ( glovar.HashSeed == 0 or hashvalue < glovar.HashSeed ): logcontent = "Replace HashSeed:\n" +str(glovar.HashSeed) + \ " with hashvalue:\n" + str(hashvalue) self.logger.info(logcontent) glovar.HashSeed = hashvalue glovar.hashLock.release() content = {'ranhash':hashvalue,'ranlist':glovar.RanList} beforesend = {'type':'comrandom','No':2,'content':content} temp = str(beforesend) hashvalue = hashlib.sha256(temp.encode('utf-8')).hexdigest() senddata = {'messageid':hashvalue,'type':'comrandom','No':2,'content':content} glovar.messageLock.acquire() glovar.MessageList.append(hashvalue) glovar.messageLock.release() broadMessage(senddata) # Wait one or two RANBROAD_TIME duration for agree on the smallest hashvalue time.sleep(RANBROAD_TIME) logcontent = 'Received hashvalue list numbers: ' + str(len(glovar.HashList)) self.logger.info(logcontent) # self.logger.info(glovar.HashList) logcontent = 'The final HashSeed is:\n' + str(glovar.HashSeed) self.logger.info(logcontent) # Generate random number to choose members for each committee nodenum = glovar.Firstcommem * glovar.Firstcomno + glovar.Secondcommem comnum = glovar.Firstcomno + 1 random.seed(glovar.HashSeed) for i in range(nodenum): glovar.PosList.append(random.uniform(0,glovar.Stakesum)) logcontent = 'PosList:' + str(len(glovar.PosList)) + '\n' + str(glovar.PosList) self.logger.info(logcontent) # logcontent = 'NodeId: ' + str(glovar.NodeId) # self.logger.info(logcontent) # logcontent = 'Stakemin: ' + str(glovar.Stakemin) # self.logger.info(logcontent) # logcontent = 'Stakemax: ' + str(glovar.Stakemax) # self.logger.info(logcontent) # logcontent = 'Firstcommem: ' + str(glovar.Firstcommem) # self.logger.info(logcontent) # logcontent = 'Firstcomno: ' + str(glovar.Firstcomno) # self.logger.info(logcontent) # logcontent = 'Secondcommem: ' + str(glovar.Secondcommem) # self.logger.info(logcontent) # Inform other PoS node process to stop # glovar.ComChange = 1 # notend = True # while notend: # notend = False # for each in glovar.ComList: # if each[6]: # notend = True # break # # time.sleep(0.05) # # glovar.ComList.clear() # glovar.ComChange = 0 # Record the status of committee for i in range (nodenum): if (glovar.PosList[i] > glovar.Stakemin and glovar.PosList[i] <= glovar.Stakemax): incomno = i // glovar.Firstcommem + 1 if (incomno < comnum): commember = glovar.PosList[(incomno-1)*glovar.Firstcommem:incomno*glovar.Firstcommem] else: commember = glovar.PosList[(incomno-1)*glovar.Firstcommem:] newblock = [] commitlist = [] commitblocklist = [] addfirstlist = [] transactionlist = [] secondcommitlist = [] newsecondblock = [] comstatus = {'genblock':0, 'blockhash':'0', 'stage':0, 'commit':0, 'commitlist':commitlist, \ 'newblock':newblock, 'verify':0, 'commitblocklist':commitblocklist, \ 'transactionlist':transactionlist, 'secondblockhash':'0', \ 'secondcommit':0, 'secondcommitlist':secondcommitlist, \ 'newsecondblock':newsecondblock, 'addfirstlist':addfirstlist} comnodequeue = queue.Queue() commemberLock = threading.Lock() process = 1 cominfo = [incomno, glovar.PosList[i], commember, comstatus, comnodequeue, commemberLock, process] glovar.ComList.append(cominfo) self.logger.info('--------------------------------------------') if len(glovar.ComList): logcontent = 'Node is selected in committee:\n' for each in glovar.ComList: logcontent += str(each) + '\n' self.logger.info(logcontent) for each in glovar.ComList: # Run the node in Firstcommittee if each[0] <= glovar.Firstcomno: block_generation = BlockProcessing(each, self.logdirectory) block_generation.start() else: block_verify = VerifyProcessing(each, self.logdirectory) block_verify.start() # Clear global status glovar.ranLock.acquire() glovar.RanList.clear() glovar.ranLock.release() glovar.hashLock.acquire() glovar.HashSeed = 0 glovar.HashList.clear() glovar.hashLock.release() glovar.PosList.clear()
def init_nonserializables(self): self._input = queue.Queue() self.input_is_available = Event() self._output = TextBuffer()
def __init__(self, read_from_actuator): self.read = read_from_actuator self.memory = queue.Queue() self._present_value = 0
def __init__( # pylint: disable = too-many-arguments, too-many-locals, too-many-branches, too-many-statements self, source=0, y_tube=False, backend=0, colorspace=None, logging=False, time_delay=0, transform=None, downsample=None, buffer_size=96, **options): if downsample is None: downsample = 1 self.downsample = downsample self.buffer_size = buffer_size # enable logging if specified self.__logging = False self.transform = transform if logging: self.__logging = logging # check if Youtube Mode is ON (True) if y_tube: try: # import pafy and parse youtube stream url import pafy #pylint: disable = import-outside-toplevel # validate video_url = youtube_url_validator(source) if video_url: source_object = pafy.new(video_url) vo_source = source_object.getbestvideo("webm", ftypestrict=True) va_source = source_object.getbest("webm", ftypestrict=False) # select the best quality if vo_source is None or (va_source.dimensions >= vo_source.dimensions): source = va_source.url else: source = vo_source.url if self.__logging: LOGGER.debug("YouTube source ID: %s, Title: %s", video_url, source_object.title) else: raise RuntimeError( "Invalid `{}` Youtube URL cannot be processed!".format( source)) except Exception as exc: if self.__logging: LOGGER.exception(str(exc)) raise ValueError( "[CamGear:ERROR] :: YouTube Mode is enabled and the input YouTube URL is " "incorrect!") # youtube mode variable initialization self.__youtube_mode = y_tube # assigns special parameter to global variable and clear self.__threaded_queue_mode = options.pop("THREADED_QUEUE_MODE", True) if not isinstance(self.__threaded_queue_mode, bool): # reset improper values self.__threaded_queue_mode = True self.__queue = None # initialize deque for video files only if self.__threaded_queue_mode and isinstance(source, str): # import deque from collections import deque #pylint: disable = import-outside-toplevel # define deque and assign it to global var # max len self.buffer_size to check overflow self.__queue = queue.Queue(maxsize=self.buffer_size) # log it if self.__logging: LOGGER.debug( "Enabling Threaded Queue Mode for the current video source!" ) else: # otherwise disable it self.__threaded_queue_mode = False # log it if self.__logging: LOGGER.warning( "Threaded Queue Mode is disabled for the current video source!" ) # stream variable initialization self.stream = None if backend and isinstance(backend, int): # add backend if specified and initialize the camera stream if check_CV_version() == 3: # Different OpenCV 3.4.x statement self.stream = cv2.VideoCapture(source + backend) else: # Two parameters are available since OpenCV 4+ (master branch) self.stream = cv2.VideoCapture(source, backend) LOGGER.debug("Setting backend %s for this source.", backend) else: # initialize the camera stream self.stream = cv2.VideoCapture(source) # initializing colorspace variable self.color_space = None # apply attributes to source if specified options = {str(k).strip(): v for k, v in options.items()} for key, value in options.items(): propty = capPropId(key) if not (propty is None): self.stream.set(propty, value) # handle colorspace value if not (colorspace is None): self.color_space = capPropId(colorspace.strip()) if self.__logging and not (self.color_space is None): LOGGER.debug("Enabling %s colorspace for this video stream!", colorspace.strip()) # initialize and assign frame-rate variable self.framerate = 0.0 _fps = self.stream.get(cv2.CAP_PROP_FPS) if _fps > 1.0: self.framerate = _fps # applying time delay to warm-up webcam only if specified if time_delay: time.sleep(time_delay) # frame variable initialization (grabbed, self.frame) = self.stream.read() # check if valid stream if grabbed: # render colorspace if defined if not (self.color_space is None): self.frame = cv2.cvtColor(self.frame, self.color_space) if self.__threaded_queue_mode: if self.transform: try: self.frame = self.transform(self.frame) except AttributeError: LOGGER.error( "[slave thread] Failed to transform the input video frame. " "Setting stop state of reader.", exc_info=True) self.__terminate = True return # initialize and append to queue self.__queue.put(self.frame) else: raise RuntimeError( "[CamGear:ERROR] :: Source is invalid, CamGear failed to intitialize stream on " "this source!") # thread initialization self.__thread = None # initialize termination flag self.__terminate = False
def write(self, data): self._getlock() self.f.write(data) if data == "\n": self._droplock() def flush(self): self._getlock() self.f.flush() self._droplock() sys.stdout = ThreadSafeFile(sys.stdout) tests_queue = queue.Queue() results_queue = queue.Queue() def runner(): global results_queue global tests_queue broken = False try: # Start up a shared watchman instance for the tests. inst = WatchmanInstance.Instance({"watcher": args.watcher}, debug_watchman=args.debug_watchman) inst.start() # Allow tests to locate this default instance WatchmanInstance.setSharedInstance(inst)
#python language import queue data_queue = queue.Queue() data_queue.put("coding") data_queue.put(1) data_queue.qsize() #2 data_queue.get() #coding data_queue.qsize() #0 data_ququq.get() #1 queue_list = list() def enqueue(data): queue_list.append(data) def dequeue(): data = queue_list[0] del queue_list[0] return data
def runpilogger(dhtSensorDict=g_dhtSensor, serialDict=g_serial): # save to this file """ # make directory if necc. if not os.path.exists(g_savePath): os.makedirs(g_savePath) """ print('runpilogger start') inSerialQueue = queue.Queue() # put commands here to send out serial port outSerialQueue = queue.Queue() # receive data coming in on the serial port errorSerialQueue = queue.Queue() mySerialThread = SerialThread(inSerialQueue, outSerialQueue, errorSerialQueue, serialDict['port'], serialDict['baud']) mySerialThread.daemon = True mySerialThread.start() print('runpilogger started SerialThread') hostname = socket.gethostname() print('runpilogger g_savePath:', g_savePath) # make file with header if necc. if not os.path.isfile(g_savePath): with open(g_savePath, 'a') as f: headerLine = 'Hostname,Date,Time,Seconds,Temperature,Humidity' + '\n' f.write(headerLine) # initialize lastTimeSeconds = 0 while True: # todo: merge nowSeconds and datetime_now (we are sampling time twice) nowSeconds = time.time() # epoch in seconds since January 1 1970 datetime_now = datetime.now() theDate = datetime_now.strftime('%Y-%m-%d') theTime = datetime_now.strftime('%H:%M:%S') # # serial in from arduino # if we get data in outSerialQueue then process it # this happens when arduino spits out a temperature value try: # expecting serialReceive to just be a number serialReceive = outSerialQueue.get(block=False, timeout=0) except (queue.Empty) as e: pass else: if serialReceive: #print('serialReceive:', serialReceive) # this is for original fake date #temperature = float(serialReceive) #temperature = round(temperature,2) serialReceive = serialReceive.strip() temperature = serialReceive humidity = '' oneLine = hostname + ',' + theDate + ',' + theTime + ',' + str( nowSeconds) + "," + str(temperature) + "," + str( humidity) + "\n" #print(g_savePath) print(oneLine.strip()) with open(g_savePath, 'a') as f: f.write(oneLine) # # 20180705, turned off DHT for now # this needs to be a background thread # # DHT sensor hooked up to Pi, read it at an interval if Adafruit_DHT is not None and dhtSensorDict[ 'enable'] and nowSeconds > (lastTimeSeconds + dhtSensor['intervalSeconds']): print('reading') humidity, temperature = Adafruit_DHT.read_retry( dhtSensor['sensorType'], dhtSensor['pin']) if humidity is not None and temperature is not None: humidity = round(humidity, 2) temperature = round(temperature, 2) #print(theDate, theTime, temperature, humidity) else: humidity = '' temperature = '' print('Failed to get DHT reading.') # even when we fail lastTimeSeconds = nowSeconds oneLine = hostname + ',' + theDate + ',' + theTime + ',' + str( nowSeconds) + "," + str(temperature) + "," + str( humidity) + "\n" print(oneLine) with open(g_savePath, 'a') as f: f.write(oneLine) time.sleep(0.2) # just so this code does not hang the system print('runpilogger stop')