def main(): global totalFlow log = open('log', 'w') lastTimeStamp = -1 time.clock() f = open('auth', 'r') user = f.readline().strip() pw = f.readline().strip() server = f.readline().strip() f.close() ftp = FTP(server, user, pw) running = True lastVideo = None lastWait = 256 app = App() while running: # get most recent day directory ftp.cwd('/usr/local/bee/beemon/pit1') ret = ftp.retrlines('LIST', splitDirLine) sortDirsByDate(dirs) newestDir = dirs[1] ftp.cwd("{0}/video".format(newestDir)) # get most recent video file ret = ftp.retrlines('LIST', splitFileLine) sortFilesByTime(files) newestFile = files[0] if newestFile == lastVideo: waitTime = lastWait * 2 print("Waiting for {0}ms for next video".format(waitTime)) if tools.handle_keys(waitTime) == 1: break lastWait = waitTime continue else: lastWait = 256 with open('tempfile.h264', 'wb') as tempfile: ret = ftp.retrbinary("RETR %s" % newestFile, tempfile.write) print(ret) app.openNewVideo('tempfile.h264') cv2.namedWindow('Tracking') cv2.namedWindow('Mask') app.run() totalFlow += app.arrivals totalFlow -= app.departures # Log timeStr = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime()) logStr = "{0} {1}\n".format(timeStr, str(totalFlow)) log.write(logStr) arrivals.append(app.arrivals) departures.append(app.departures) # print("Arrivals: {0} Departures: {1}".format(app.arrivals, app.departures)) os.remove('tempfile.h264') del (files[:]) del (dirs[:]) lastVideo = newestFile ftp.quit() log.close()
def main(): global totalFlow log = open('Log.txt', 'w') lastTimeStamp = -1 time.clock() user = '******' pw = 'cs.13,bee' ftp = FTP('cs.appstate.edu', user, pw) running = True lastVideo = None lastWait = 256 app = App() while running: # get most recent day directory ftp.cwd('/usr/local/bee/beemon/pit1') ret = ftp.retrlines('LIST', splitDirLine) sortDirsByDate(dirs) newestDir = dirs[0] ftp.cwd("{0}/video".format(newestDir)) # get most recent video file ret = ftp.retrlines('LIST', splitFileLine) sortFilesByTime(files) newestFile = files[0] if newestFile == lastVideo: waitTime = lastWait * 2 print("Waiting for {0}ms for next video".format(waitTime)) if tools.handle_keys(waitTime) == 1: break lastWait = waitTime continue else: lastWait = 256 with open('tempfile.h264', 'wb') as tempfile: ret = ftp.retrbinary("RETR %s" % newestFile, tempfile.write) print(ret) app.openNewVideo('tempfile.h264') cv2.namedWindow('Tracking') cv2.namedWindow('Mask') app.run() totalFlow += app.arrivals totalFlow -= app.departures # Log timeStr = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime()) logStr = "{0} {1}\n".format(timeStr, str(totalFlow)) log.write(logStr) arrivals.append(app.arrivals) departures.append(app.departures) # print("Arrivals: {0} Departures: {1}".format(app.arrivals, app.departures)) os.remove('tempfile.h264') del(files[:]) del(dirs[:]) lastVideo = newestFile ftp.quit() log.close()
def run(self, as_script=True): if self.invisible: cv2.namedWindow("Control") prev_gray = None prev_points = [] self.nextTrackID = 0 while True: # Get frame ret, frame = self.cam.read() if not ret: break frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # Segment fg_mask = self.operator.apply(frame) fg_mask = ((fg_mask == 255) * 255).astype(np.uint8) fg_mask = morph_openclose(fg_mask) # Detect blobs if "3.0." in cv2.__version__: _, contours, _ = cv2.findContours((fg_mask.copy()), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_L1) else: contours, _ = cv2.findContours((fg_mask.copy()), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_L1) areas, detections = drawing.draw_min_ellipse(contours, frame, MIN_AREA, MAX_AREA, draw=False) self.areas += areas # Track self.predictNewLocations(frame) assignments, unmatchedTracks, unmatchedDetections = self.assignTracks(detections, frame) self.updateMatchedTracks(assignments, detections) self.updateUnmatchedTracks(unmatchedTracks) self.deleteLostTracks() self.createNewTracks(detections, unmatchedDetections) self.showTracks(frame) # self.showLostTracks(frame) self.checkTrackCrosses() # Store frame and go to next prev_gray = frame_gray prev_points = detections self.frame_idx += 1 if not self.invisible: self.draw_overlays(frame, fg_mask) cv2.imshow('Tracking', frame) cv2.imshow("Mask", fg_mask) delay = FRAME_DELAY if handle_keys(delay) == 1: break # else: # if handle_keys(delay) == 1: # break # Should we continue running or yield some information about the current frame if as_script: continue else: pass # After the video, examine tracks # self.checkLostTrackCrosses() self.cam.release()
def run(self, as_script=True): if self.invisible: cv2.namedWindow("Control") prev_gray = None prev_points = None while True: ret, frame = self.cam.read() if not ret: break fg_mask = self.operator.apply(frame) fg_mask = ((fg_mask == 255) * 255).astype(np.uint8) fg_mask = morph_openclose(fg_mask) frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) if prev_gray is not None and prev_points is not None: p0 = np.float32([point for point in prev_points]).reshape(-1, 1, 2) if drawing.draw_prev_points(frame, prev_points): # p1, st, err = cv2.calcOpticalFlowPyrLK(prev_gray, frame_gray, p0, None, **lk_params) frame_gray[fg_mask == 0] = 255 p1, st, err = cv2.calcOpticalFlowPyrLK(prev_gray, frame_gray, p0, None, **lk_params) for p_i, p_f in zip(p0.reshape(-1, 2), p1.reshape(-1, 2)): result = cross(ROI, ROI_W, ROI_H, p_i, p_f) if result is 1: self.arrivals += 1 if not self.quiet: print("Arrival") elif result is -1: self.departures += 1 if not self.quiet: print("Departure") if self.drawTracks: drawing.draw_line(frame, tuple(p_i), tuple(p_f)) prev_gray = frame_gray contours, hier = drawing.draw_contours(frame, fg_mask) areas, prev_points = drawing.draw_min_ellipse(contours, frame, MIN_AREA, MAX_AREA) self.areas += areas self.frame_idx += 1 if not self.invisible: self.draw_overlays(frame, fg_mask) cv2.imshow("Fas", frame_gray) cv2.imshow('Tracking', frame) cv2.imshow("Mask", fg_mask) delay = 33 else: delay = 1 if handle_keys(delay) == 1: break # Should we continue running or yield some information about the current frame if as_script: continue else: pass return self.areas
def run(self, as_script=True): if self.invisible: cv2.namedWindow("Control") prev_gray = None prev_points = None while True: ret, frame = self.cam.read() if not ret: break fg_mask = self.operator.apply(frame) fg_mask = ((fg_mask == 255) * 255).astype(np.uint8) fg_mask = morph_openclose(fg_mask) frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) if prev_gray is not None and prev_points is not None: p0 = np.float32([point for point in prev_points]).reshape(-1, 1, 2) if drawing.draw_prev_points(frame, prev_points): # p1, st, err = cv2.calcOpticalFlowPyrLK(prev_gray, frame_gray, p0, None, **lk_params) frame_gray[fg_mask == 0] = 255 p1, st, err = cv2.calcOpticalFlowPyrLK( prev_gray, frame_gray, p0, None, **lk_params) for p_i, p_f in zip(p0.reshape(-1, 2), p1.reshape(-1, 2)): result = cross(ROI, ROI_W, ROI_H, p_i, p_f) if result is 1: self.arrivals += 1 if not self.quiet: print("Arrival") elif result is -1: self.departures += 1 if not self.quiet: print("Departure") if self.drawTracks: drawing.draw_line(frame, tuple(p_i), tuple(p_f)) prev_gray = frame_gray contours, hier = drawing.draw_contours(frame, fg_mask) areas, prev_points = drawing.draw_min_ellipse( contours, frame, MIN_AREA, MAX_AREA) self.areas += areas self.frame_idx += 1 if not self.invisible: self.draw_overlays(frame, fg_mask) cv2.imshow("Fas", frame_gray) cv2.imshow('Tracking', frame) cv2.imshow("Mask", fg_mask) delay = 33 else: delay = 1 if handle_keys(delay) == 1: break # Should we continue running or yield some information about the current frame if as_script: continue else: pass return self.areas
def run(self, as_script=True): if self.invisible: cv2.namedWindow("Control") prev_gray = None prev_points = [] self.nextTrackID = 0 while True: # Get frame ret, frame = self.cam.read() if not ret: break # Convert frame to grayscale frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # Segment fg_mask = self.operator.apply(frame) fg_mask = ((fg_mask == 255) * 255).astype(np.uint8) fg_mask = morph_openclose(fg_mask) # Detect blobs version = int(re.findall(r'\d+', cv2.__version__)[0]) if version == 3: _, contours, _ = cv2.findContours((fg_mask.copy()), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_L1) else: # Get contours for detected bees using the foreground mask contours, _ = cv2.findContours((fg_mask.copy()), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_L1) areas, detections = drawing.draw_min_ellipse(contours, frame, MIN_AREA, MAX_AREA, draw=False) self.areas += areas # Track self.predictNewLocations(frame) assignments, unmatchedTracks, unmatchedDetections = self.assignTracks(detections, frame) self.updateMatchedTracks(assignments, detections) self.updateUnmatchedTracks(unmatchedTracks) self.deleteLostTracks() self.createNewTracks(detections, unmatchedDetections) self.showTracks(frame) # self.showLostTracks(frame) self.checkTrackCrosses() # Store frame and go to next prev_gray = frame_gray prev_points = detections self.frame_idx += 1 if not self.invisible: self.draw_overlays(frame, fg_mask) cv2.imshow('Tracking', frame) cv2.imshow("Mask", fg_mask) delay = FRAME_DELAY if handle_keys(delay) == 1: break # else: # if handle_keys(delay) == 1: # break # Should we continue running or yield some information about the current frame if as_script: continue else: pass # After the video, examine tracks # self.checkLostTrackCrosses() self.cam.release()