def step2(tstamp): """Display the image, stamped with framerate.""" fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate2.tick()) util.writeOSD(common[tstamp]['image_diff'], (fps_text,)) cv2.imshow('diff average 4', common[tstamp]['image_diff']) cv2.waitKey(1) # Allow HighGUI to process event. return tstamp
def doTask(self, tstamp): """Compute difference between given image and accumulation, then accumulate and set result with the difference. Initialize accumulation if needed (if opacity is 100%.)""" # Compute the alpha value. alpha, self.tstamp_prev = util.getAlpha(self.tstamp_prev) image = common[tstamp]['image_in'] # Initalize accumulation if so indicated. if self.image_acc is None: self.image_acc = np.empty(np.shape(image)) # Allocate shared memory for the diff image. shape = np.shape(image) dtype = image.dtype image_diff = sharedmem.empty(shape, dtype) # Compute difference. cv2.absdiff( self.image_acc.astype(image.dtype), image, image_diff, ) # Write the framerate on top of the image. fps_text = '{:.2f}, {:.2f}, {:.2f} fps process'.format(*self.framerate.tick()) util.writeOSD(image_diff, ('', fps_text,)) # First line is blank (written to later.) # Write diff image (actually, reference thereof) to process-shared table. hello = common[tstamp] hello['image_diff'] = image_diff common[tstamp] = hello # Propagate result to the next stage. self.putResult(tstamp) # Accumulate. hello = cv2.accumulateWeighted( image, self.image_acc, alpha, )
class Postprocessor(mpipe.OrderedWorker): def doTask(self, (tstamp, rects, )): """Augment the input image with results of processing.""" # Make a flat list from a list of lists . rects = [item for sublist in rects for item in sublist] # Draw rectangles. for x1, y1, x2, y2, color in rects: cv2.rectangle( images[tstamp], (x1, y1), (x1 + x2, y1 + y2), color=color, thickness=2, ) # Write image dimensions and framerate. size = np.shape(images[tstamp])[:2] fps_text = '{:.2f} fps'.format(*framerate.tick()) util.writeOSD( images[tstamp], ('{0}x{1}'.format(size[1], size[0]), fps_text), ) return tstamp
import coils import util DEVICE = int(sys.argv[1]) WIDTH = int(sys.argv[2]) HEIGHT = int(sys.argv[3]) DURATION = float(sys.argv[4]) # Monitor framerates for the given seconds past. framerate = coils.RateTicker((1,5,10)) # Create the OpenCV video capture object. cap = cv2.VideoCapture(DEVICE) cap.set(3, WIDTH) cap.set(4, HEIGHT) # Create the display window. title = 'playing OpenCV capture' cv2.namedWindow(title, cv2.cv.CV_WINDOW_NORMAL) end = datetime.datetime.now() + datetime.timedelta(seconds=DURATION) while end > datetime.datetime.now(): # Take a snapshot, write framerate on it, and display it. hello, image = cap.read() fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD(image, (fps_text,)) cv2.imshow(title, image) cv2.waitKey(1)
# Compute alpha value. alpha, tstamp_prev = util.getAlpha(tstamp_prev) # Initalize accumulation if so indicated. if image_acc is None: image_acc = np.empty(np.shape(image)) # Compute difference. image_diff = cv2.absdiff( image_acc.astype(image.dtype), image, ) # Accumulate. hello = cv2.accumulateWeighted( image, image_acc, alpha, ) # Write the framerate on top of the image. fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD(image_diff, (fps_text, )) # Display the image. cv2.imshow('diff average 1', image_diff) # Allow HighGUI to process event. cv2.waitKey(1)
class Postprocessor(mpipe.OrderedWorker): def doTask(self, (tstamp, rects,)): first = True """Augment the input image with results of processing.""" size = np.shape(images[tstamp])[:2] # Make a flat list from a list of lists . rects = [item for sublist in rects for item in sublist] # Draw rectangles. for x1, y1, x2, y2, color in rects: x1 *= 3 y1 *= 3 x2 *= 3 y2 *= 3 cv2.rectangle( images[tstamp], (x1, y1), (x1+x2, y1+y2), color=color, thickness=2, ) if first == True: global lastpan global lasttilt global camwaittime global avcount global avx global avy now = datetime.datetime.now() if camwaittime < now: #Find face center x = float(x1 + (x2 / 2)) y = float(y1 + (y2 / 2)) first = False #print(x, y, 'face center', size) ixc = float((size[1] / 2)) iwc = float((size[0] / 2)) #print(ixc, iwc, 'image center') offsetx = float(((ixc - x) / ixc)) offsety = float(((y - iwc) / iwc)) #print(offsetx, offsety, 'percent off center') #print(lastpan, lasttilt) aovx = (offsetx * 27.0) aovy = (offsety * 20.5) if avcount < 10: avcount += 1 avx += aovx avy += aovy else: avox = avx / 10 avoy = avy / 10 print(aovx, aovy, 'Angle of view') if abs(aovx) > abs(aovy): camwaitsecs = ((abs(aovx) / 90) * 6) else: camwaitsecs = ((abs(aovy) / 90) * 6) print(camwaitsecs, 'Cam wait secs') camwaittime = now + datetime.timedelta(seconds=abs(camwaitsecs)) nextpan = int(lastpan + aovx) nexttilt = int(lasttilt + aovy) #print(nextpan, nexttilt) pan(nextpan) tilt(nexttilt) lastpan = nextpan lasttilt = nexttilt avcount = 0 # Write image dimensions and framerate. fps_text = '{:.2f} fps'.format(*framerate.tick()) util.writeOSD( images[tstamp], ('{0}x{1}'.format(size[1], size[0]), fps_text), ) return tstamp
import coils import util DEVICE = int(sys.argv[1]) WIDTH = int(sys.argv[2]) HEIGHT = int(sys.argv[3]) DURATION = float(sys.argv[4]) # Monitor framerates for the given seconds past. framerate = coils.RateTicker((1, 5, 10)) # Create the OpenCV video capture object. cap = cv2.VideoCapture(DEVICE) cap.set(3, WIDTH) cap.set(4, HEIGHT) # Create the display window. title = 'playing OpenCV capture' cv2.namedWindow(title, cv2.cv.CV_WINDOW_NORMAL) end = datetime.datetime.now() + datetime.timedelta(seconds=DURATION) while end > datetime.datetime.now(): # Take a snapshot, write framerate on it, and display it. hello, image = cap.read() fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD(image, (fps_text, )) cv2.imshow(title, image) cv2.waitKey(1)
def step2(image): """Display the image, stamped with framerate.""" fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD(image, (fps_text,)) cv2.imshow('diff average 2', image) cv2.waitKey(1) # Allow HighGUI to process event.
def step2(image): """Display the image, stamped with framerate.""" fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD(image, (fps_text, )) cv2.imshow('diff average 2', image) cv2.waitKey(1) # Allow HighGUI to process event.
for classi in util.cascade.classifiers: rects = classi.detectMultiScale( image, scaleFactor=1.3, minNeighbors=3, minSize=tuple([x/20 for x in size]), maxSize=tuple([x/2 for x in size]), ) if len(rects): for a,b,c,d in rects: result.append((a,b,c,d, util.cascade.colors[classi])) # Draw the rectangles. for x1, y1, x2, y2, color in result: cv2.rectangle( image, (x1, y1), (x1+x2, y1+y2), color=color, thickness=2, ) # Write image dimensions and framerate. fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD( image, ('{0}x{1}'.format(size[1], size[0]), fps_text), ) cv2.imshow('object detection 1', image) cv2.waitKey(1)
rects = classi.detectMultiScale( image, scaleFactor=1.3, minNeighbors=3, minSize=tuple([x / 20 for x in size]), maxSize=tuple([x / 2 for x in size]), ) if len(rects): for a, b, c, d in rects: result.append((a, b, c, d, util.cascade.colors[classi])) # Draw the rectangles. for x1, y1, x2, y2, color in result: cv2.rectangle( image, (x1, y1), (x1 + x2, y1 + y2), color=color, thickness=2, ) # Write image dimensions and framerate. fps_text = '{:.2f}, {:.2f}, {:.2f} fps'.format(*framerate.tick()) util.writeOSD( image, ('{0}x{1}'.format(size[1], size[0]), fps_text), ) cv2.imshow('object detection 1', image) cv2.waitKey(1)