コード例 #1
0
def launch_FLIR_GUI(bg=None):

    if bg is None:
        cap = fc2.Context()
        cap.connect(*cap.get_camera_from_index(0))
        cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8,
                                          fc2.FRAMERATE_15)
        m, f = cap.get_video_mode_and_frame_rate()
        p = cap.get_property(fc2.FRAME_RATE)
        cap.set_property(**p)
        cap.start_capture()
        img = fc2.Image()
        cap.retrieve_buffer(img)
        frame = np.array(img)

        im = frame.copy()
        im = np.expand_dims(im, 2)

        r = cv2.selectROI(im, fromCenter=False)

        cap.stop_capture()
        cap.disconnect()
        cv2.destroyAllWindows()

    else:
        r = cv2.selectROI(bg, fromCenter=False)
        cv2.destroyAllWindows()

    return r
コード例 #2
0
def camera(n_frames, save_dir):

	cap = fc2.Context()
	cap.connect(*cap.get_camera_from_index(0))

	cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8, fc2.FRAMERATE_30)
	m, f = cap.get_video_mode_and_frame_rate()
	p = cap.get_property(fc2.FRAME_RATE)
	cap.set_property(**p)
	cap.start_capture()

	baseTime = time.time()

	for i in range(n_frames):

		img = fc2.Image()
		cap.retrieve_buffer(img)
		frame = np.array(img)

		j = str(i)
		k = str(j.zfill(8))
		cv2.imwrite(save_dir+'/{}.jpg'.format(k), frame)

		# Video keyboard interrupt
		if cv2.waitKey(1) & 0xFF == ord('q'):
			break
	else:
		pass

	cap.stop_capture()
	cap.disconnect()
	cv2.destroyAllWindows()
	cv2.waitKey()
コード例 #3
0
    def _run(self, keep=True):
        print fc2.get_library_version()
        c = fc2.Context()
        if (c.get_num_of_cameras() < 1): return
        c.connect(*c.get_camera_from_index(0))
        m, f = c.get_video_mode_and_frame_rate()
        print(c.get_format7_configuration())
        c.start_capture()
        im = fc2.Image()
        while True:
            c.retrieve_buffer(im)
            a = np.array(im)
            a = cv2.resize(a, (640, 480))
            a = cv2.flip(a, 0)
            if self.ctrl:
                self.ctrl.lock.acquire()
                self.ctrl.imageQueue.append(a)
                self.ctrl.haveImage = True
                self.ctrl.lock.release()
                self.ctrl.event()
                #cv2.imshow('rgb1', a)
                #cv2.waitKey(1)
            else:
                cv2.imshow('rgb', a)

                if not keep:
                    key = cv2.waitKey()
                    break
                else:
                    key = cv2.waitKey(1)
                    if key & 0xFF == ord('q'): break
        c.stop_capture()
        c.disconnect()
コード例 #4
0
ファイル: ClosedLoop.py プロジェクト: flybrains/cl-ui
	def run(self):
		self.threadactive=True


		if self.testMode==True:
			cap = cv2.VideoCapture('/home/patrick/Desktop/out16.mp4')
		else:
			try:
				cap = fc2.Context()
				cap.connect(*cap.get_camera_from_index(0))
				cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8, fc2.FRAMERATE_30)
				m, f = cap.get_video_mode_and_frame_rate()
				p = cap.get_property(fc2.FRAME_RATE)
				cap.set_property(**p)
				cap.start_capture()
			except fc2.ApiError:
				msg = 'Make sure a camera is connected  '
				self.error = ErrorMsg(msg)
				self.error.show()
				self.threadactive=False
				self.finished.emit()
				return None


		i = 0

		while (i < self.nFrames and self.threadactive):

			if self.testMode==True:
				ret, frame = cap.read()
				frame = cv2.resize(frame, (420,420))
				frame = frame.copy()
			else:
				img = fc2.Image()
				cap.retrieve_buffer(img)
				frame = np.array(img)
				frame = cv2.cvtColor(frame,cv2.COLOR_GRAY2RGB)

			h, w, ch = frame.shape
			bytesPerLine = ch * w
			convertToQtFormat = QImage(frame.data, w, h, bytesPerLine, QImage.Format_RGB888)
			p = convertToQtFormat.scaled(420, 420, QtCore.Qt.KeepAspectRatio)
			self.changePixmap.emit(p)
			if self.write==True:
				j = str(i)
				k = str(j.zfill(6))
				cv2.imwrite(self.saveDir+'/{}.jpg'.format(k), frame)
			i = i+1
			prog = int(100*(i/self.nFrames))
			if self.write==True:
				self.count.emit(prog)
		self.finished.emit()

		if self.testMode==True:
			cap.release()

		else:
			cap.stop_capture()
			cap.disconnect()
コード例 #5
0
 def __init__(self):
     self.capturing = False
     self.context = fc2.Context()
     print 'number of cameras:', self.context.get_num_of_cameras()
     #=c.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y16, fc2.FRAMERATE_7_5)
     if self.context.get_num_of_cameras() == 0:
         raise RuntimeError('No camera connected')
     self.context.connect(*self.context.get_camera_from_index(0))
     print 'camera info:', self.context.get_camera_info()
コード例 #6
0
    def __init__(self):
        self.home_path = os.environ['HOME']
        self.temp_dir = os.path.join(self.home_path, "tmp/high_speed_temp_dir/")
        if not os.path.exists(self.temp_dir):
            os.makedirs(self.temp_dir)

        self.c = fc2.Context()
#         c.get_num_of_cameras()
        self.connect()
コード例 #7
0
 def __init__(self, camnum):
     fourcc = 0
     fpsec = 70
     resolution = (1504, 1500)
     self.cam = fc2.Context()
     self.cam.connect(*self.cam.get_camera_from_index(camnum))
     self.serial_num = self.cam.get_camera_info()
     self.im = fc2.Image()
     self.video = cv2.VideoWriter('cam'+str(camnum)+'.AVI', fourcc, fpsec,
                                  resolution, False)
     self.cam.start_capture()
     self.cycles = []
     self.camID = camnum
コード例 #8
0
 def initializeCamera(self):
     cam = fc2.Context()
     num_cam = cam.get_num_of_cameras()
     if num_cam == 0:
         raise ValueError("No Firefly Camera detected.")
     elif num_cam > 1:
         print "Warning: Multiple Firefly Cameras detected. First one picked."
     cam.connect(*cam.get_camera_from_index(0))
     #cam.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y8, fc2.FRAMERATE_7_5)
     cam.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y8,
                                       fc2.FRAMERATE_15)
     #cam.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8, fc2.FRAMERATE_30)
     #         self.cam.set_property()
     return cam
コード例 #9
0
ファイル: sibal.py プロジェクト: sinsinsin00/I_SYN_Project
 def cameraSettings(self):
     print(fc2.get_library_version())
     self.c = fc2.Context()
     numberCam = self.c.get_num_of_cameras()
     print(numberCam)    
     self.c.connect(*self.c.get_camera_from_index(0))
     print(self.c.get_camera_info())
     m, f = self.c.get_video_mode_and_frame_rate()
     print(m, f)
     print(self.c.get_property_info(fc2.FRAME_RATE))
     p = self.c.get_property(fc2.FRAME_RATE)
     print(p)
     self.c.set_property(**p)
     self.c.start_capture()
コード例 #10
0
def getBG():
    # if address is not None:
    #   bg = cv2.imread(config.bg)
    #   bg = cv2.cvtColor(bg, cv2.COLOR_GRAY2RGB)
    # else:
    cap = fc2.Context()
    cap.connect(*cap.get_camera_from_index(0))
    cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8,
                                      fc2.FRAMERATE_15)
    m, f = cap.get_video_mode_and_frame_rate()
    p = cap.get_property(fc2.FRAME_RATE)
    cap.set_property(**p)
    cap.start_capture()

    temp = np.ndarray((480, 640, 60), dtype=np.uint8)

    for i in range(60):

        img = fc2.Image()
        cap.retrieve_buffer(img)
        frame = np.array(img)

        temp[:, :, i] = frame[:, :]

    bgAvg = np.mean(temp, axis=2)

    #_, bg = cv2.threshold(bgAvg, 150, 255, 0)

    bg = bgAvg.astype(np.uint8)

    cap.stop_capture()
    cap.disconnect()

    bgBig = cv2.resize(bg, None, fx=2, fy=2)

    r = cv2.selectROI(bgBig, fromCenter=False)

    x, y, w, h = r[0], r[1], r[2], r[3]

    bgBig[y:y + h, x:x + w] = np.ones((h, w), dtype=np.uint8) * 160

    bg = cv2.resize(bgBig, None, fx=0.5, fy=0.5)

    cv2.destroyAllWindows()

    return bg
コード例 #11
0
def GenericCamera(n_frames, save_dir, block):
    cap = fc2.Context()
    cap.connect(*cap.get_camera_from_index(0))
    cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8,
                                      fc2.FRAMERATE_15)
    m, f = cap.get_video_mode_and_frame_rate()
    p = cap.get_property(fc2.FRAME_RATE)
    cap.set_property(**p)
    cap.start_capture()

    photos = []

    time0 = time.time()
    for i in range(n_frames):
        print(time.time() - time0)

        if block == 'pre':
            logging.debug("{},{},{}".format(str(time.time() - globalZeroTime),
                                            'pre', str(i)))
        else:
            logging.debug("{},{},{}".format(str(time.time() - globalZeroTime),
                                            'post', str(i)))

        img = fc2.Image()
        cap.retrieve_buffer(img)
        frame = np.array(img)
        photos.append(frame)
        # smallFrame = cv2.resize(frame, None, fx=0.5, fy=0.5)
        cv2.imshow("Frame", frame)
        #cv2.imwrite(save_dir+'/{}.jpg'.format(i), frame)

        # Video keyboard interrupt
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

    if save_dir is not None:
        with open(save_dir + '/photos.pkl', 'wb') as f:
            pickle.dump(photos, f)
    else:
        pass

    cap.stop_capture()
    cap.disconnect()
    cv2.destroyAllWindows()
    cv2.waitKey()
コード例 #12
0
    def __init__(self):
        self.fly = flycapture2.Context()
        a = self.fly.get_camera_from_index(0)
        b = self.fly.connect(a[0], a[1], a[2], a[3])

        self.videomodes = {
            '640x480_8bit': flycapture2.VIDEOMODE_640x480Y8,
            '640x480_16bit': flycapture2.VIDEOMODE_640x480Y16
        }

        self.framerates = {
            '7.5': flycapture2.FRAMERATE_7_5,
            '15': flycapture2.FRAMERATE_15,
            '30': flycapture2.FRAMERATE_30,
            '60': flycapture2.FRAMERATE_60
        }

        self.use16bit = True
コード例 #13
0
    def open(self):
        self.context = fc2.Context()
        self.context.connect(
            *self.context.get_camera_from_index(self.device_no))

        # self.reset_camera()

        if self.fast_and_small_video:
            width, height = self.set_resolution(3 * 480, 3 * 480)
            self.framerate = framerate_focus
        else:
            self.framerate = framerate_full
            self.set_resolution()  # maximize resolution

        if self.triggered:
            self.set_software_trigger()
            print('Framerate set to: {:.1f} Hz'.format(self.framerate))

        self.context.start_capture()
        self.im = fc2.Image()
コード例 #14
0
def test():
    print fc2.get_library_version()
    c = fc2.Context()
    print c.get_num_of_cameras()
    c.connect(*c.get_camera_from_index(0))
    print c.get_camera_info()
    c.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y16,
            fc2.FRAMERATE_7_5)
    m, f = c.get_video_mode_and_frame_rate()
    print m, f
    print c.get_video_mode_and_frame_rate_info(m, f)
    print c.get_property_info(fc2.FRAME_RATE)
    p = c.get_property(fc2.FRAME_RATE)
    print p
    c.set_property(**p)
    c.start_capture()
    im = fc2.Image()
    print [np.array(c.retrieve_buffer(im)).sum() for i in range(80)]
    a = np.array(im)
    print a.shape, a.base
    c.stop_capture()
    c.disconnect()
コード例 #15
0
def connectCapture(bool):

    c = fc2.Context()

    c.connect(*c.get_camera_from_index(bool))

    print "Connecting to  camera: ", bool

    print c.get_camera_info()

    if bool == 0:

        c.set_format7_configuration(fc2.MODE_0, 800, 400, 800, 800,
                                    fc2.PIXEL_FORMAT_RGB8)

    else:

        c.set_format7_configuration(fc2.MODE_0, 860, 500, 800, 800,
                                    fc2.PIXEL_FORMAT_RGB8)

    c.start_capture()

    return c
コード例 #16
0
    def __init__(self):
        """Setup the communications with the flycapture2 device."""
        import flycapture2 as fc2

        # FlyCapture Info printing and setup:
        print "\n\nlibrary version: {}\n".format(fc2.get_library_version())
        self.context = fc2.Context()
        print "Number of Cameras: {}\n".format(
            self.context.get_num_of_cameras())
        self.context.connect(*self.context.get_camera_from_index(0))
        print "Camera Info: {}\n".format(self.context.get_camera_info())
        m, f = self.context.get_video_mode_and_frame_rate()
        print "Video Mode: {}\nFrame Rate:{}\n".format(m, f)
        print "Frame Rate Property Info: {}\n".format(
            self.context.get_property_info(fc2.FRAME_RATE))
        p = self.context.get_property(fc2.FRAME_RATE)
        print "Frame Rate Property: {}\n".format(p)
        self.context.set_property(**p)
        self.context.start_capture()
        self.fc2_image = fc2.Image()
        print "done with flycap2 setup"
        self.cam_on = True

        self.image = None
        cv2.namedWindow('raw', cv2.WINDOW_NORMAL)
        cv2.waitKey(5)

        self.__acquisition_thread = None
        self.lock = threading.Lock()

        self.run = True
        self.__acquisition_thread = threading.Thread(group=None,
                                                     target=self.acquire,
                                                     name='acquisition_thread',
                                                     args=(),
                                                     kwargs={})
        self.__acquisition_thread.start()
コード例 #17
0
    def __init__(self, idx=0):
        """
        :param idx: Camera ID to use
        :return:
        """
        self.img = np.zeros(
            (4000, 3000,
             3))  # Keep a copy of the current working image, init to zeros
        self.ctx = fc2.Context()

        # assume only one camera
        self.ctx.connect(*self.ctx.get_camera_from_index(idx))

        # Set to Format7, mode 10, RAW8 pixels. This captures raw bayer images at the highest resolution
        self.ctx.set_format7_configuration(fc2.MODE_10, 0, 0, 4000, 3000,
                                           fc2.PIXEL_FORMAT_RAW8)

        # validates necessary for some reason?
        m, f = self.ctx.get_video_mode_and_frame_rate()
        p = self.ctx.get_property(fc2.AUTO_EXPOSURE)
        p['auto_manual_mode'] = False
        self.ctx.set_property(**p)

        return
コード例 #18
0
 def __init__(self, index=0, **k):
     self.ctx = fc2.Context()
     self.ctx.connect(*self.ctx.get_camera_from_index(index))
     super(Fc2Capture, self).__init__(**k)
コード例 #19
0
ファイル: gen_example.py プロジェクト: julianarhee/pyflycap

def produce(consumer):
    """Produces a set of values and forwards them to the pre-defined consumer
    function"""
    while True:
        data = get_data()
        #print('Produced {}'.format(data))
        consumer.send(data)
        yield


if __name__ == '__main__':

    print fc2.get_library_version()
    c = fc2.Context()
    print c.get_num_of_cameras()
    c.connect(*c.get_camera_from_index(0))
    print c.get_camera_info()
    # c.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y16,
    #         fc2.FRAMERATE_7_5)
    m, f = c.get_video_mode_and_frame_rate()
    print m, f
    # print c.get_video_mode_and_frame_rate_info(m, f)
    print c.get_property_info(fc2.FRAME_RATE)
    p = c.get_property(fc2.FRAME_RATE)
    print p
    c.set_property(**p)
    c.start_capture()

    cv2.startWindowThread()
コード例 #20
0
                print("Bound to PvAPI camera (name: %s, uid: %s)" %
                      (camera.name, camera.uid))

                camera.request_frame_rate(frame_rate)

            except Exception as e:

                # print("%s" % e)
                cam_info = None

    else:

        import flycapture2 as fc2
        print fc2.get_library_version()
        camera = fc2.Context()
        print camera.get_num_of_cameras()
        camera.connect(*camera.get_camera_from_index(0))
        print camera.get_camera_info()
        # c.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y16,
        #         fc2.FRAMERATE_7_5)
        m, f = camera.get_video_mode_and_frame_rate()
        print m, f
        # print c.get_video_mode_and_frame_rate_info(m, f)
        print camera.get_property_info(fc2.FRAME_RATE)
        p = camera.get_property(fc2.FRAME_RATE)
        print p
        camera.set_property(**p)
        # camera.start_capture()

    if camera is None:
コード例 #21
0
ファイル: __init__.py プロジェクト: apleonhardt/flytracker
 def __init__(self, serial_number):
     self.context = cap.Context()
     self.connected = False
     self.serial_number = serial_number
     self.image_buffer = cap.Image()
コード例 #22
0
ファイル: __init__.py プロジェクト: apleonhardt/flytracker
def get_camera_number():
    temp_context = cap.Context()
    return temp_context.get_num_of_cameras()
コード例 #23
0
    def run(self):
        cap = fc2.Context()
        cap.connect(*cap.get_camera_from_index(0))
        cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8,
                                          fc2.FRAMERATE_15)
        m, f = cap.get_video_mode_and_frame_rate()
        p = cap.get_property(fc2.FRAME_RATE)
        cap.set_property(**p)
        cap.start_capture()

        cy = r[1] + int(r[3] / 2)
        cx = r[0] + int(r[2] / 2)

        x = r[0]
        y = r[1]
        w = r[2]
        h = r[3]

        photos = []
        slidingWindow = []

        for i in range(self.nFrames):
            img = fc2.Image()
            cap.retrieve_buffer(img)
            frame = np.array(img)

            photos.append(frame)

            smallFrame = frame[y:y + h, x:x + w]
            smallFrame = np.multiply(smallFrame, mask)
            smallFrame[np.where(smallFrame == 0)] = 255

            detect, trackedFrame, pos = track(smallFrame, 120, config.flySize)

            if pos[1] is None:
                slidingWindow.append(np.mean(slidingWindow))
            else:
                slidingWindow.append(pos[1])

            cv2.imshow("Frame", trackedFrame)

            #cv2.imwrite(self.saveDir+'/{}.jpg'.format(i), frame)

            # Code for walking direction
            # 0 = No walking
            # 1 = Walking towards top
            # 2 = Walking towards bottom

            if i > 2:
                slidingWindow.pop(0)
                d = np.diff(slidingWindow)
                da = np.mean(d)

                if da > 2:
                    signal = 2
                elif da < -2:
                    signal = 1
                else:
                    signal = 0
            else:
                signal = 0

            logging.debug('{},{},{},{},{}'.format(
                str(time.time() - globalZeroTime), 'reinforcement', str(i),
                str(pos[0]), str(pos[1]))),

            if not q.full():
                # This will be set conditionally by the tracker
                item = signal
                q.put(item)
                #logging.debug('Putting {} in queue'.format(signal))

            # Video keyboard interrupt
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
        if self.save_dir is not None:
            with open(self.save_dir + '/photos.pkl', 'wb') as f:
                pickle.dump(photos, f)
        else:
            pass

        cap.stop_capture()
        cap.disconnect()
        cv2.destroyAllWindows()
        cv2.waitKey()

        return
コード例 #24
0
ファイル: ClosedLoop.py プロジェクト: flybrains/cl-ui
	def runExperiment(self):

		self.saveVideo = bool(self.saveVideoCheckBox.isChecked())

		if (self.arduinoCommText.toPlainText() is ""):
			msg = 'Must specify Arduino COMM port  '
			self.error = ErrorMsg(msg)
			self.error.show()
			return None

		if (self.arduinoBaudText.toPlainText() is ''):
			msg = 'Must specify Arduino Baudrate  '
			self.error = ErrorMsg(msg)
			self.error.show()
			return None

		try:
			self.savePath
		except AttributeError:
			msg = 'Must select unique save location '
			self.error = ErrorMsg(msg)
			self.error.show()
			return None



		self.progressBar.show()
		self.setWindowTitle(self.title)
		self.label = QLabel(self)
		self.label.move(10, 40)
		self.label.resize(420, 420)

		self.comm = str(self.arduinoCommText.toPlainText())
		self.baud = int(self.arduinoBaudText.toPlainText())

		try:
			self.ser = serial.Serial(self.comm, self.baud)
		except serial.serialutil.SerialException:
			msg = 'Unable to establish connection with Arduino. Check COMM and Baud and connection with board. Re-upload program if necessary '
			self.error = ErrorMsg(msg)
			self.error.show()
			return None

		cap = fc2.Context()
		cap.connect(*cap.get_camera_from_index(0))
		cap.set_video_mode_and_frame_rate(fc2.VIDEOMODE_640x480Y8, fc2.FRAMERATE_30)
		m, f = cap.get_video_mode_and_frame_rate()
		p = cap.get_property(fc2.FRAME_RATE)
		cap.set_property(**p)
		cap.start_capture()


		time.sleep(1)

		dt = datetime.now()
		self.datetimeString = str(dt.month)+"_"+str(dt.day)+"_"+str(dt.year)+"_"+str(dt.hour)+str(dt.minute)

		timeList = [float(block.duration) for block in self.blockList]
		cfgList = [[block.lightColor, block.lightIntensity] for block in self.blockList]
		self.programLists = [timeList, cfgList]
		timeSum = np.sum(timeList)
		nFrames = int(timeSum*30)

		outFolder = self.savePath + "/" + self.datetimeString

		if outFolder in os.listdir():
			outFolder = outFolder + '_a'
		else:
			os.mkdir(outFolder)

		if 'videos' in os.listdir(self.savePath):
			pass
		else:
			os.mkdir(self.savePath + '/videos')

		try:
			self.camThread = CameraThread(nFrames, outFolder, write=True)
			self.camThread.changePixmap.connect(self.setImage)
			self.camThread.count.connect(self.updatePGB)
			self.camThread.start()
			self.camThread.finished.connect(self.setBG)
		except fc2.ApiError:
			msg = 'Make sure a camera is connected  '
			self.error = ErrorMsg(msg)
			self.error.show()
			return None

		self.lightThread = LightThread(self.ser, self.programLists)
		self.lightThread.start()
		self.lightThread.lightsFinished.connect(self.serialCleanup)
コード例 #25
0
def get_available_flycap_cameras():
    return fc2.Context().get_num_of_cameras()