class PMTWidgetUI(QWidget): # waveforms_generated = pyqtSignal(object, object, list, int) SignalForContourScanning = pyqtSignal(int, int, int, np.ndarray, np.ndarray) MessageBack = pyqtSignal(str) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # os.chdir('./')# Set directory to current folder. self.setFont(QFont("Arial")) self.setMinimumSize(1200,850) self.setWindowTitle("PMTWidget") self.layout = QGridLayout(self) #------------------------Initiating class------------------- self.pmtTest = pmtimagingTest() self.pmtTest_contour = pmtimagingTest_contour() self.savedirectory = r'M:\tnw\ist\do\projects\Neurophotonics\Brinkslab\Data\Octoscope\pmt_image_default_dump' self.prefixtextboxtext = '_fromGalvoWidget' #************************************************************************************************************************************** #-------------------------------------------------------------------------------------------------------------------------------------- #-----------------------------------------------------------GUI for PMT tab------------------------------------------------------------ #-------------------------------------------------------------------------------------------------------------------------------------- #************************************************************************************************************************************** pmtimageContainer = QGroupBox("PMT image") self.pmtimageLayout = QGridLayout() self.pmtvideoWidget = pg.ImageView() self.pmtvideoWidget.ui.roiBtn.hide() self.pmtvideoWidget.ui.menuBtn.hide() self.pmtvideoWidget.resize(400,400) self.pmtimageLayout.addWidget(self.pmtvideoWidget, 0, 0) pmtroiContainer = QGroupBox("PMT ROI") self.pmtimageroiLayout = QGridLayout() self.pmt_roiwidget = pg.GraphicsLayoutWidget() self.pmt_roiwidget.resize(150,150) self.pmt_roiwidget.addLabel('ROI', row=0, col=0) # create ROI self.vb_2 = self.pmt_roiwidget.addViewBox(row=1, col=0, lockAspect=True, colspan=1) self.vb_2.name = 'ROI' self.pmtimgroi = pg.ImageItem() self.vb_2.addItem(self.pmtimgroi) #self.roi = pg.RectROI([20, 20], [20, 20], pen=(0,9)) #r1 = QRectF(0, 0, 895, 500) ROIpen = QPen() # creates a default pen ROIpen.setStyle(Qt.DashDotLine) ROIpen.setWidth(0.5) ROIpen.setBrush(QColor(0,161,255)) self.roi = pg.PolyLineROI([[0,0], [80,0], [80,80], [0,80]], closed=True, pen=ROIpen)#, maxBounds=r1 #self.roi.addScaleHandle([1,0], [1, 0]) self.roi.sigHoverEvent.connect(lambda: self.show_handle_num()) # update handle numbers self.pmtvb = self.pmtvideoWidget.getView() self.pmtimageitem = self.pmtvideoWidget.getImageItem() self.pmtvb.addItem(self.roi)# add ROIs to main image self.pmtimageroiLayout.addWidget(self.pmt_roiwidget, 0, 0) pmtimageContainer.setMinimumWidth(850) pmtroiContainer.setMaximumHeight(380) # pmtroiContainer.setMaximumWidth(300) pmtimageContainer.setLayout(self.pmtimageLayout) pmtroiContainer.setLayout(self.pmtimageroiLayout) #----------------------------Contour----------------------------------- pmtContourContainer = QGroupBox("Contour selection") pmtContourContainer.setFixedWidth(280) self.pmtContourLayout = QGridLayout() #contour_Description = QLabel("Handle number updates when parking mouse cursor upon ROI. Points in contour are divided evenly between handles.") #contour_Description.setStyleSheet('color: blue') #self.pmtContourLayout.addWidget(contour_Description,0,0) self.pmt_handlenum_Label = QLabel("Handle number: ") self.pmtContourLayout.addWidget(self.pmt_handlenum_Label,1,0) self.contour_strategy = QComboBox() self.contour_strategy.addItems(['Manual','Uniform']) self.pmtContourLayout.addWidget(self.contour_strategy, 1, 1) self.pointsinContour = QSpinBox(self) self.pointsinContour.setMinimum(1) self.pointsinContour.setMaximum(1000) self.pointsinContour.setValue(100) self.pointsinContour.setSingleStep(100) self.pmtContourLayout.addWidget(self.pointsinContour, 2, 1) self.pmtContourLayout.addWidget(QLabel("Points in contour:"), 2, 0) self.contour_samprate = QSpinBox(self) self.contour_samprate.setMinimum(0) self.contour_samprate.setMaximum(1000000) self.contour_samprate.setValue(50000) self.contour_samprate.setSingleStep(50000) self.pmtContourLayout.addWidget(self.contour_samprate, 3, 1) self.pmtContourLayout.addWidget(QLabel("Sampling rate:"), 3, 0) self.generate_contour_sacn = StylishQT.generateButton() self.pmtContourLayout.addWidget(self.generate_contour_sacn, 4, 1) self.generate_contour_sacn.clicked.connect(lambda: self.generate_contour()) self.do_contour_sacn = StylishQT.runButton("Contour") self.do_contour_sacn.setFixedHeight(32) self.pmtContourLayout.addWidget(self.do_contour_sacn, 5, 0) self.do_contour_sacn.clicked.connect(lambda:self.buttonenabled('contourscan', 'start')) self.do_contour_sacn.clicked.connect(lambda: self.measure_pmt_contourscan()) self.stopButton_contour = StylishQT.stop_deleteButton() self.stopButton_contour.setFixedHeight(32) self.stopButton_contour.clicked.connect(lambda:self.buttonenabled('contourscan', 'stop')) self.stopButton_contour.clicked.connect(lambda: self.stopMeasurement_pmt_contour()) self.stopButton_contour.setEnabled(False) self.pmtContourLayout.addWidget(self.stopButton_contour, 5, 1) pmtContourContainer.setLayout(self.pmtContourLayout) #----------------------------Control----------------------------------- controlContainer = QGroupBox("Galvo Scanning Panel") controlContainer.setFixedWidth(280) self.controlLayout = QGridLayout() self.pmt_fps_Label = QLabel("Per frame: ") self.controlLayout.addWidget(self.pmt_fps_Label, 5, 0) self.saveButton_pmt = StylishQT.saveButton() self.saveButton_pmt.clicked.connect(lambda: self.saveimage_pmt()) self.controlLayout.addWidget(self.saveButton_pmt, 5, 1) self.startButton_pmt = StylishQT.runButton("") self.startButton_pmt.setFixedHeight(32) self.startButton_pmt.setCheckable(True) self.startButton_pmt.clicked.connect(lambda:self.buttonenabled('rasterscan', 'start')) self.startButton_pmt.clicked.connect(lambda: self.measure_pmt()) self.controlLayout.addWidget(self.startButton_pmt, 6, 0) self.stopButton = StylishQT.stop_deleteButton() self.stopButton.setFixedHeight(32) self.stopButton.clicked.connect(lambda:self.buttonenabled('rasterscan', 'stop')) self.stopButton.clicked.connect(lambda: self.stopMeasurement_pmt()) self.stopButton.setEnabled(False) self.controlLayout.addWidget(self.stopButton, 6, 1) #-----------------------------------Galvo scanning------------------------------------------------------------------------ self.textboxAA_pmt = QSpinBox(self) self.textboxAA_pmt.setMinimum(0) self.textboxAA_pmt.setMaximum(1000000) self.textboxAA_pmt.setValue(500000) self.textboxAA_pmt.setSingleStep(100000) self.controlLayout.addWidget(self.textboxAA_pmt, 1, 1) self.controlLayout.addWidget(QLabel("Sampling rate:"), 1, 0) #self.controlLayout.addWidget(QLabel("Galvo raster scanning : "), 1, 0) self.textbox1B_pmt = QSpinBox(self) self.textbox1B_pmt.setMinimum(-10) self.textbox1B_pmt.setMaximum(10) self.textbox1B_pmt.setValue(3) self.textbox1B_pmt.setSingleStep(1) self.controlLayout.addWidget(self.textbox1B_pmt, 2, 1) self.controlLayout.addWidget(QLabel("Volt range:"), 2, 0) self.Scanning_pixel_num_combobox = QSpinBox(self) self.Scanning_pixel_num_combobox.setMinimum(0) self.Scanning_pixel_num_combobox.setMaximum(1000) self.Scanning_pixel_num_combobox.setValue(500) self.Scanning_pixel_num_combobox.setSingleStep(244) self.controlLayout.addWidget(self.Scanning_pixel_num_combobox, 3, 1) self.controlLayout.addWidget(QLabel("Pixel number:"), 3, 0) self.textbox1H_pmt = QSpinBox(self) self.textbox1H_pmt.setMinimum(1) self.textbox1H_pmt.setMaximum(20) self.textbox1H_pmt.setValue(1) self.textbox1H_pmt.setSingleStep(1) self.controlLayout.addWidget(self.textbox1H_pmt, 4, 1) self.controlLayout.addWidget(QLabel("average over:"), 4, 0) controlContainer.setLayout(self.controlLayout) #---------------------------Set tab1 layout--------------------------- # pmtmaster = QGridLayout() self.layout.addWidget(pmtimageContainer, 0,0,3,1) self.layout.addWidget(pmtroiContainer,1,1) self.layout.addWidget(pmtContourContainer,2,1) self.layout.addWidget(controlContainer,0,1) # self.layout.setLayout(pmtmaster) #&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& #-------------------------------------------------------------------------------------------------------------------------------------- #------------------------------------------------------Functions for TAB 'PMT'--------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------------------------------- #************************************************************************************************************************************** def buttonenabled(self, button, switch): if button == 'rasterscan': if switch == 'start': self.startButton_pmt.setEnabled(False) self.stopButton.setEnabled(True) elif switch == 'stop': self.startButton_pmt.setEnabled(True) self.stopButton.setEnabled(False) elif button == 'contourscan': if switch == 'start': #disable start button and enable stop button self.do_contour_sacn.setEnabled(False) self.stopButton_contour.setEnabled(True) elif switch == 'stop': self.do_contour_sacn.setEnabled(True) self.stopButton_contour.setEnabled(False) def measure_pmt(self): self.Daq_sample_rate_pmt = int(self.textboxAA_pmt.value()) # Voltage settings, by default it's equal range square. self.Value_voltXMax = self.textbox1B_pmt.value() self.Value_voltXMin = self.Value_voltXMax*-1 Value_voltYMin = self.Value_voltXMin Value_voltYMax = self.Value_voltXMax self.Value_xPixels = int(self.Scanning_pixel_num_combobox.value()) Value_yPixels = self.Value_xPixels self.averagenum =int(self.textbox1H_pmt.value()) Totalscansamples = self.pmtTest.setWave(self.Daq_sample_rate_pmt, self.Value_voltXMin, self.Value_voltXMax, Value_voltYMin, Value_voltYMax, self.Value_xPixels, Value_yPixels, self.averagenum) time_per_frame_pmt = Totalscansamples/self.Daq_sample_rate_pmt ScanArrayXnum=int((Totalscansamples/self.averagenum)/Value_yPixels) #r1 = QRectF(500, 500, ScanArrayXnum, int(Value_yPixels)) #self.pmtimageitem.setRect(r1) self.pmtTest.pmtimagingThread.measurement.connect(self.update_pmt_Graphs) #Connecting to the measurement signal self.pmt_fps_Label.setText("Per frame: %.4f s" % time_per_frame_pmt) self.pmtTest.start() def measure_pmt_contourscan(self): self.Daq_sample_rate_pmt = int(self.contour_samprate.value()) self.pmtTest_contour.setWave_contourscan(self.Daq_sample_rate_pmt, self.handle_viewbox_coordinate_position_array_expanded_forDaq, self.contour_point_number) contour_freq = self.Daq_sample_rate_pmt/self.contour_point_number #r1 = QRectF(500, 500, ScanArrayXnum, int(Value_yPixels)) #self.pmtimageitem.setRect(r1) #self.pmtTest_contour.pmtimagingThread_contour.measurement.connect(self.update_pmt_Graphs) #Connecting to the measurement signal self.pmt_fps_Label.setText("Contour frequency: %.4f Hz" % contour_freq) self.pmtTest_contour.start() self.MessageToMainGUI('---!! Continuous contour scanning !!---'+'\n') def saveimage_pmt(self): Localimg = Image.fromarray(self.data_pmtcontineous) #generate an image object Localimg.save(os.path.join(self.savedirectory, 'PMT_'+ self.prefixtextboxtext + '_' +datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'.tif')) #save as tif #np.save(os.path.join(self.savedirectory, 'PMT'+ self.saving_prefix +datetime.now().strftime('%Y-%m-%d_%H-%M-%S')), self.data_pmtcontineous) def update_pmt_Graphs(self, data): """Update graphs.""" self.data_pmtcontineous = data self.pmtvideoWidget.setImage(data) self.pmtimgroi.setImage(self.roi.getArrayRegion(data, self.pmtimageitem), levels=(0, data.max())) # #self.pmtvideoWidget.update_pmt_Window(self.data_pmtcontineous) def show_handle_num(self): self.ROIhandles = self.roi.getHandles() self.ROIhandles_nubmer = len(self.ROIhandles) self.pmt_handlenum_Label.setText("Handle number: %.d" % self.ROIhandles_nubmer) def generate_contour(self): """ getLocalHandlePositions IS THE FUNCTION TO GRAP COORDINATES FROM IMAGEITEM REGARDLESS OF IMAGEITEM ZOOMING OR PANNING!!! """ self.ROIhandles = self.roi.getHandles() self.ROIhandles_nubmer = len(self.ROIhandles) self.contour_point_number = int(self.pointsinContour.value()) self.handle_scene_coordinate_position_raw_list = self.roi.getSceneHandlePositions() self.handle_local_coordinate_position_raw_list = self.roi.getLocalHandlePositions() self.Daq_sample_rate_pmt = int(self.contour_samprate.value()) # self.galvo_contour_label_1.setText("Points in contour: %.d" % self.contour_point_number) # self.galvo_contour_label_2.setText("Sampling rate: %.d" % self.Daq_sample_rate_pmt) #put scene positions into numpy array self.handle_scene_coordinate_position_array = np.zeros((self.ROIhandles_nubmer, 2))# n rows, 2 columns for i in range(self.ROIhandles_nubmer): self.handle_scene_coordinate_position_array[i] = np.array([self.handle_scene_coordinate_position_raw_list[i][1].x(), self.handle_scene_coordinate_position_raw_list[i][1].y()]) if self.contour_strategy.currentText() == 'Manual': #Interpolation self.point_num_per_line = int(self.contour_point_number/self.ROIhandles_nubmer) self.Interpolation_number = self.point_num_per_line-1 # try to initialize an array then afterwards we can append on it #self.handle_scene_coordinate_position_array_expanded = np.array([[self.handle_scene_coordinate_position_array[0][0], self.handle_scene_coordinate_position_array[0][1]], [self.handle_scene_coordinate_position_array[1][0], self.handle_scene_coordinate_position_array[1][1]]]) # -------------------------------------------------------------------------Interpolation from first to last---------------------------------------------------------------------------- for i in range(self.ROIhandles_nubmer-1): self.Interpolation_x_diff = self.handle_scene_coordinate_position_array[i+1][0] - self.handle_scene_coordinate_position_array[i][0] self.Interpolation_y_diff = self.handle_scene_coordinate_position_array[i+1][1] - self.handle_scene_coordinate_position_array[i][1] self.Interpolation_x_step = self.Interpolation_x_diff/self.point_num_per_line self.Interpolation_y_step = self.Interpolation_y_diff/self.point_num_per_line Interpolation_temp = np.array([[self.handle_scene_coordinate_position_array[i][0], self.handle_scene_coordinate_position_array[i][1]], [self.handle_scene_coordinate_position_array[i+1][0], self.handle_scene_coordinate_position_array[i+1][1]]]) for j in range(self.Interpolation_number): Interpolation_temp=np.insert(Interpolation_temp,1,[self.handle_scene_coordinate_position_array[i+1][0] - (j+1)*self.Interpolation_x_step,self.handle_scene_coordinate_position_array[i+1][1] - (j+1)*self.Interpolation_y_step],axis = 0) Interpolation_temp = np.delete(Interpolation_temp, 0, 0) if i == 0: self.handle_scene_coordinate_position_array_expanded = Interpolation_temp else: self.handle_scene_coordinate_position_array_expanded=np.append(self.handle_scene_coordinate_position_array_expanded, Interpolation_temp, axis=0) #self.handle_scene_coordinate_position_array_expanded=np.delete(self.handle_scene_coordinate_position_array_expanded, 0, 0) # Interpolation between last and first self.Interpolation_x_diff = self.handle_scene_coordinate_position_array[0][0] - self.handle_scene_coordinate_position_array[-1][0] self.Interpolation_y_diff = self.handle_scene_coordinate_position_array[0][1] - self.handle_scene_coordinate_position_array[-1][1] self.Interpolation_x_step = self.Interpolation_x_diff/self.point_num_per_line self.Interpolation_y_step = self.Interpolation_y_diff/self.point_num_per_line Interpolation_temp = np.array([[self.handle_scene_coordinate_position_array[-1][0], self.handle_scene_coordinate_position_array[-1][1]], [self.handle_scene_coordinate_position_array[0][0], self.handle_scene_coordinate_position_array[0][1]]]) for j in range(self.Interpolation_number): Interpolation_temp=np.insert(Interpolation_temp,1,[self.handle_scene_coordinate_position_array[0][0] - (j+1)*self.Interpolation_x_step,self.handle_scene_coordinate_position_array[0][1] - (j+1)*self.Interpolation_y_step],axis = 0) Interpolation_temp = np.delete(Interpolation_temp, 0, 0) #Interpolation_temp = np.flip(Interpolation_temp, 0) self.handle_scene_coordinate_position_array_expanded=np.append(self.handle_scene_coordinate_position_array_expanded, Interpolation_temp, axis=0) #self.handle_scene_coordinate_position_array_expanded=np.delete(self.handle_scene_coordinate_position_array_expanded, 0, 0) #----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- self.handle_viewbox_coordinate_position_array_expanded = np.zeros((self.contour_point_number, 2))# n rows, 2 columns # Maps from scene coordinates to the coordinate system displayed inside the ViewBox for i in range(self.contour_point_number): qpoint_Scene = QPoint(self.handle_scene_coordinate_position_array_expanded[i][0], self.handle_scene_coordinate_position_array_expanded[i][1]) qpoint_viewbox = self.pmtvb.mapSceneToView(qpoint_Scene) self.handle_viewbox_coordinate_position_array_expanded[i] = np.array([qpoint_viewbox.x(),qpoint_viewbox.y()]) #print(self.handle_scene_coordinate_position_array) #print(self.handle_scene_coordinate_position_array_expanded) #print(self.handle_viewbox_coordinate_position_array_expanded) constants = HardwareConstants() '''Transform into Voltages to galvos''' '''coordinates in the view box(handle_viewbox_coordinate_position_array_expanded_x) are equivalent to voltages sending out''' if self.Value_xPixels == 500: if self.Value_voltXMax == 3: # for 500 x axis, the real ramp region sits around 52~552 out of 0~758 self.handle_viewbox_coordinate_position_array_expanded[:,0] = ((self.handle_viewbox_coordinate_position_array_expanded[:,0])/500)*6-3 #(self.handle_viewbox_coordinate_position_array_expanded[:,0]-constants.pmt_3v_indentation_pixels) self.handle_viewbox_coordinate_position_array_expanded[:,1] = ((self.handle_viewbox_coordinate_position_array_expanded[:,1])/500)*6-3 self.handle_viewbox_coordinate_position_array_expanded = np.around(self.handle_viewbox_coordinate_position_array_expanded, decimals=3) # shape into (n,) and stack self.handle_viewbox_coordinate_position_array_expanded_x = np.resize(self.handle_viewbox_coordinate_position_array_expanded[:,0],(self.contour_point_number,)) self.handle_viewbox_coordinate_position_array_expanded_y = np.resize(self.handle_viewbox_coordinate_position_array_expanded[:,1],(self.contour_point_number,)) self.handle_viewbox_coordinate_position_array_expanded_forDaq = np.vstack((self.handle_viewbox_coordinate_position_array_expanded_x,self.handle_viewbox_coordinate_position_array_expanded_y)) print(self.handle_viewbox_coordinate_position_array_expanded) '''Speed and acceleration check''' #for i in range(self.contour_point_number): # speed_between_points = ((self.handle_viewbox_coordinate_position_array_expanded_x[i+1]-self.handle_viewbox_coordinate_position_array_expanded_x[i])**2+(self.handle_viewbox_coordinate_position_array_expanded_y[i+1]-self.handle_viewbox_coordinate_position_array_expanded_y[i])**2)**(0.5) self.Daq_sample_rate_pmt = int(self.contour_samprate.value()) time_gap = 1/self.Daq_sample_rate_pmt contour_x_speed = np.diff(self.handle_viewbox_coordinate_position_array_expanded_x)/time_gap contour_y_speed = np.diff(self.handle_viewbox_coordinate_position_array_expanded_y)/time_gap contour_x_acceleration = np.diff(contour_x_speed)/time_gap contour_y_acceleration = np.diff(contour_y_speed)/time_gap constants = HardwareConstants() speedGalvo = constants.maxGalvoSpeed #Volt/s aGalvo = constants.maxGalvoAccel #Acceleration galvo in volt/s^2 print(np.amax(abs(contour_x_speed))) print(np.amax(abs(contour_y_speed))) print(np.amax(abs(contour_x_acceleration))) print(np.amax(abs(contour_y_acceleration))) print(str(np.mean(abs(contour_x_speed)))+' and mean y speed:'+str(np.mean(abs(contour_y_speed)))) print(str(np.mean(abs(contour_x_acceleration)))+' and mean y acceleration:'+str(np.mean(abs(contour_y_acceleration)))) if speedGalvo > np.amax(abs(contour_x_speed)) and speedGalvo > np.amax(abs(contour_y_speed)): print('Contour speed is OK') self.MessageToMainGUI('Contour speed is OK'+'\n') else: QMessageBox.warning(self,'OverLoad','Speed too high!',QMessageBox.Ok) if aGalvo > np.amax(abs(contour_x_acceleration)) and aGalvo > np.amax(abs(contour_y_acceleration)): print('Contour acceleration is OK') self.MessageToMainGUI('Contour acceleration is OK'+'\n') else: QMessageBox.warning(self,'OverLoad','Acceleration too high!',QMessageBox.Ok) if self.contour_strategy.currentText() == 'Uniform': # Calculate the total distance self.total_distance = 0 for i in range(self.ROIhandles_nubmer): if i != (self.ROIhandles_nubmer-1): Interpolation_x_diff = self.handle_scene_coordinate_position_array[i+1][0] - self.handle_scene_coordinate_position_array[i][0] Interpolation_y_diff = self.handle_scene_coordinate_position_array[i+1][1] - self.handle_scene_coordinate_position_array[i][1] distance_vector = (Interpolation_x_diff**2+Interpolation_y_diff**2)**(0.5) self.total_distance = self.total_distance + distance_vector else: Interpolation_x_diff = self.handle_scene_coordinate_position_array[0][0] - self.handle_scene_coordinate_position_array[-1][0] Interpolation_y_diff = self.handle_scene_coordinate_position_array[0][1] - self.handle_scene_coordinate_position_array[-1][1] distance_vector = (Interpolation_x_diff**2+Interpolation_y_diff**2)**(0.5) self.total_distance = self.total_distance + distance_vector self.averaged_uniform_step = self.total_distance/self.contour_point_number print(self.averaged_uniform_step) print(self.handle_scene_coordinate_position_array) for i in range(self.ROIhandles_nubmer): if i == 0: Interpolation_x_diff = self.handle_scene_coordinate_position_array[i+1][0] - self.handle_scene_coordinate_position_array[i][0] Interpolation_y_diff = self.handle_scene_coordinate_position_array[i+1][1] - self.handle_scene_coordinate_position_array[i][1] distance_vector = (Interpolation_x_diff**2+Interpolation_y_diff**2)**(0.5) num_of_Interpolation = distance_vector//self.averaged_uniform_step #Interpolation_remaining = distance_vector%self.averaged_uniform_step self.Interpolation_remaining_fornextround = self.averaged_uniform_step*(1-(distance_vector/self.averaged_uniform_step-num_of_Interpolation)) print('Interpolation_remaining_fornextround: '+str(self.Interpolation_remaining_fornextround)) self.Interpolation_x_step = Interpolation_x_diff/(distance_vector/self.averaged_uniform_step) self.Interpolation_y_step = Interpolation_y_diff/(distance_vector/self.averaged_uniform_step) Interpolation_temp = np.array([[self.handle_scene_coordinate_position_array[i][0], self.handle_scene_coordinate_position_array[i][1]], [self.handle_scene_coordinate_position_array[i+1][0], self.handle_scene_coordinate_position_array[i+1][1]]]) for j in range(int(num_of_Interpolation)): Interpolation_temp=np.insert(Interpolation_temp,-1,[self.handle_scene_coordinate_position_array[i][0] + (j+1)*self.Interpolation_x_step,self.handle_scene_coordinate_position_array[i+1][1] + (j+1)*self.Interpolation_y_step],axis = 0) Interpolation_temp = np.delete(Interpolation_temp,-1,axis=0) self.handle_scene_coordinate_position_array_expanded_uniform = Interpolation_temp elif i != (self.ROIhandles_nubmer-1): Interpolation_x_diff = self.handle_scene_coordinate_position_array[i+1][0] - self.handle_scene_coordinate_position_array[i][0] Interpolation_y_diff = self.handle_scene_coordinate_position_array[i+1][1] - self.handle_scene_coordinate_position_array[i][1] distance_vector = (Interpolation_x_diff**2+Interpolation_y_diff**2)**(0.5) num_of_Interpolation = (distance_vector-self.Interpolation_remaining_fornextround)//self.averaged_uniform_step print('Interpolation_remaining_fornextround: '+str(self.Interpolation_remaining_fornextround)) if self.Interpolation_remaining_fornextround != 0: self.Interpolation_remaining_fornextround_x =Interpolation_x_diff/(distance_vector/self.Interpolation_remaining_fornextround)#(self.Interpolation_remaining_fornextround/distance_vector)*Interpolation_x_diff self.Interpolation_remaining_fornextround_y =Interpolation_y_diff/(distance_vector/self.Interpolation_remaining_fornextround)#(self.Interpolation_remaining_fornextround/distance_vector)*Interpolation_y_diff else: self.Interpolation_remaining_fornextround_x = 0 self.Interpolation_remaining_fornextround_y = 0 # Reset the starting point Interpolation_x_diff = self.handle_scene_coordinate_position_array[i+1][0] - self.handle_scene_coordinate_position_array[i][0] - self.Interpolation_remaining_fornextround_x Interpolation_y_diff = self.handle_scene_coordinate_position_array[i+1][1] - self.handle_scene_coordinate_position_array[i][1] - self.Interpolation_remaining_fornextround_y self.Interpolation_x_step = Interpolation_x_diff/((distance_vector-self.Interpolation_remaining_fornextround)/self.averaged_uniform_step) self.Interpolation_y_step = Interpolation_y_diff/((distance_vector-self.Interpolation_remaining_fornextround)/self.averaged_uniform_step) Interpolation_temp = np.array([[self.handle_scene_coordinate_position_array[i][0]+self.Interpolation_remaining_fornextround_x, self.handle_scene_coordinate_position_array[i][1]+self.Interpolation_remaining_fornextround_y], [self.handle_scene_coordinate_position_array[i+1][0], self.handle_scene_coordinate_position_array[i+1][1]]]) for j in range(int(num_of_Interpolation)): Interpolation_temp=np.insert(Interpolation_temp,-1,[self.handle_scene_coordinate_position_array[i][0]+self.Interpolation_remaining_fornextround_x + (j+1)*self.Interpolation_x_step,self.handle_scene_coordinate_position_array[i][1]+\ self.Interpolation_remaining_fornextround_y + (j+1)*self.Interpolation_y_step],axis = 0) Interpolation_temp = np.delete(Interpolation_temp,-1,axis=0) self.handle_scene_coordinate_position_array_expanded_uniform=np.append(self.handle_scene_coordinate_position_array_expanded_uniform, Interpolation_temp, axis=0) self.Interpolation_remaining_fornextround = self.averaged_uniform_step*(1-((distance_vector-self.Interpolation_remaining_fornextround)/self.averaged_uniform_step-num_of_Interpolation)) else: # connect the first and the last Interpolation_x_diff = self.handle_scene_coordinate_position_array[0][0] - self.handle_scene_coordinate_position_array[-1][0] Interpolation_y_diff = self.handle_scene_coordinate_position_array[0][1] - self.handle_scene_coordinate_position_array[-1][1] distance_vector = (Interpolation_x_diff**2+Interpolation_y_diff**2)**(0.5) num_of_Interpolation = (distance_vector-self.Interpolation_remaining_fornextround)//self.averaged_uniform_step #self.Interpolation_remaining_fornextround = self.averaged_uniform_step*(1-((distance_vector-self.Interpolation_remaining_fornextround)/self.averaged_uniform_step-num_of_Interpolation)) self.Interpolation_remaining_fornextround_x =(self.Interpolation_remaining_fornextround/distance_vector)*Interpolation_x_diff self.Interpolation_remaining_fornextround_y =(self.Interpolation_remaining_fornextround/distance_vector)*Interpolation_y_diff # Reset the starting point Interpolation_x_diff = self.handle_scene_coordinate_position_array[0][0] - self.handle_scene_coordinate_position_array[i][0] + self.Interpolation_remaining_fornextround_x Interpolation_y_diff = self.handle_scene_coordinate_position_array[0][1] - self.handle_scene_coordinate_position_array[i][1] + self.Interpolation_remaining_fornextround_y self.Interpolation_x_step = Interpolation_x_diff/((distance_vector-self.Interpolation_remaining_fornextround)/self.averaged_uniform_step) self.Interpolation_y_step = Interpolation_y_diff/((distance_vector-self.Interpolation_remaining_fornextround)/self.averaged_uniform_step) Interpolation_temp = np.array([[self.handle_scene_coordinate_position_array[-1][0]+self.Interpolation_remaining_fornextround_x, self.handle_scene_coordinate_position_array[-1][1]+self.Interpolation_remaining_fornextround_y], [self.handle_scene_coordinate_position_array[0][0], self.handle_scene_coordinate_position_array[0][1]]]) for j in range(int(num_of_Interpolation)): Interpolation_temp=np.insert(Interpolation_temp,-1,[self.handle_scene_coordinate_position_array[-1][0]+self.Interpolation_remaining_fornextround_x + (j+1)*self.Interpolation_x_step,self.handle_scene_coordinate_position_array[-1][1]+\ self.Interpolation_remaining_fornextround_y + (j+1)*self.Interpolation_y_step],axis = 0) Interpolation_temp = np.delete(Interpolation_temp,-1,axis=0) self.handle_scene_coordinate_position_array_expanded_uniform=np.append(self.handle_scene_coordinate_position_array_expanded_uniform, Interpolation_temp, axis=0) print(self.handle_scene_coordinate_position_array_expanded_uniform) print(self.handle_scene_coordinate_position_array_expanded_uniform.shape) #----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- self.handle_viewbox_coordinate_position_array_expanded = np.zeros((self.contour_point_number, 2))# n rows, 2 columns # Maps from scene coordinates to the coordinate system displayed inside the ViewBox for i in range(self.contour_point_number): qpoint_Scene = QPoint(self.handle_scene_coordinate_position_array_expanded_uniform[i][0], self.handle_scene_coordinate_position_array_expanded_uniform[i][1]) qpoint_viewbox = self.pmtvb.mapSceneToView(qpoint_Scene) self.handle_viewbox_coordinate_position_array_expanded[i] = np.array([qpoint_viewbox.x(),qpoint_viewbox.y()]) #print(self.handle_scene_coordinate_position_array) #print(self.handle_scene_coordinate_position_array_expanded) #print(self.handle_viewbox_coordinate_position_array_expanded) '''Transform into Voltages to galvos''' constants = HardwareConstants() if self.Value_xPixels == 500: if self.Value_voltXMax == 3: # for 500 x axis, the real ramp region sits around 52~552 out of 0~758 self.handle_viewbox_coordinate_position_array_expanded[:,0] = ((self.handle_viewbox_coordinate_position_array_expanded[:,0])/500)*6-3 #self.handle_viewbox_coordinate_position_array_expanded[:,0]-constants.pmt_3v_indentation_pixels self.handle_viewbox_coordinate_position_array_expanded[:,1] = ((self.handle_viewbox_coordinate_position_array_expanded[:,1])/500)*6-3 self.handle_viewbox_coordinate_position_array_expanded = np.around(self.handle_viewbox_coordinate_position_array_expanded, decimals=3) # shape into (n,) and stack self.handle_viewbox_coordinate_position_array_expanded_x = np.resize(self.handle_viewbox_coordinate_position_array_expanded[:,0],(self.contour_point_number,)) self.handle_viewbox_coordinate_position_array_expanded_y = np.resize(self.handle_viewbox_coordinate_position_array_expanded[:,1],(self.contour_point_number,)) self.handle_viewbox_coordinate_position_array_expanded_forDaq = np.vstack((self.handle_viewbox_coordinate_position_array_expanded_x,self.handle_viewbox_coordinate_position_array_expanded_y)) print(self.handle_viewbox_coordinate_position_array_expanded) '''Speed and acceleration check''' #for i in range(self.contour_point_number): # speed_between_points = ((self.handle_viewbox_coordinate_position_array_expanded_x[i+1]-self.handle_viewbox_coordinate_position_array_expanded_x[i])**2+(self.handle_viewbox_coordinate_position_array_expanded_y[i+1]-self.handle_viewbox_coordinate_position_array_expanded_y[i])**2)**(0.5) self.Daq_sample_rate_pmt = int(self.contour_samprate.value()) time_gap = 1/self.Daq_sample_rate_pmt contour_x_speed = np.diff(self.handle_viewbox_coordinate_position_array_expanded_x)/time_gap contour_y_speed = np.diff(self.handle_viewbox_coordinate_position_array_expanded_y)/time_gap contour_x_acceleration = np.diff(contour_x_speed)/time_gap contour_y_acceleration = np.diff(contour_y_speed)/time_gap constants = HardwareConstants() speedGalvo = constants.maxGalvoSpeed #Volt/s aGalvo = constants.maxGalvoAccel #Acceleration galvo in volt/s^2 print(np.amax(abs(contour_x_speed))) print(np.amax(abs(contour_y_speed))) print(np.amax(abs(contour_x_acceleration))) print(np.amax(abs(contour_y_acceleration))) print(str(np.mean(abs(contour_x_speed)))+' and mean y speed:'+str(np.mean(abs(contour_y_speed)))) print(str(np.mean(abs(contour_x_acceleration)))+' and mean y acceleration:'+str(np.mean(abs(contour_y_acceleration)))) if speedGalvo > np.amax(abs(contour_x_speed)) and speedGalvo > np.amax(abs(contour_y_speed)): print('Contour speed is OK') self.MessageToMainGUI('Contour speed is OK'+'\n') if aGalvo > np.amax(abs(contour_x_acceleration)) and aGalvo > np.amax(abs(contour_y_acceleration)): print('Contour acceleration is OK') self.MessageToMainGUI('Contour acceleration is OK'+'\n') self.SignalForContourScanning.emit(self.contour_point_number, self.Daq_sample_rate_pmt, (1/int(self.contour_samprate.value())*1000)*self.contour_point_number, self.handle_viewbox_coordinate_position_array_expanded_x, self.handle_viewbox_coordinate_position_array_expanded_y) def generate_contour_for_waveform(self): self.contour_time = int(self.textbox1L.value()) self.time_per_contour = (1/int(self.contour_samprate.value())*1000)*self.contour_point_number repeatnum_contour = int(self.contour_time/self.time_per_contour) self.repeated_contoursamples_1 = np.tile(self.handle_viewbox_coordinate_position_array_expanded_x, repeatnum_contour) self.repeated_contoursamples_2 = np.tile(self.handle_viewbox_coordinate_position_array_expanded_y, repeatnum_contour) self.handle_viewbox_coordinate_position_array_expanded_forDaq_waveform = np.vstack((self.repeated_contoursamples_1,self.repeated_contoursamples_2)) return self.handle_viewbox_coordinate_position_array_expanded_forDaq_waveform def generate_galvos_contour_graphy(self): self.xlabelhere_galvos = np.arange(len(self.handle_viewbox_coordinate_position_array_expanded_forDaq_waveform[1,:]))/self.Daq_sample_rate_pmt self.PlotDataItem_galvos = PlotDataItem(self.xlabelhere_galvos, self.handle_viewbox_coordinate_position_array_expanded_forDaq_waveform[1,:]) self.PlotDataItem_galvos.setDownsampling(auto=True, method='mean') self.PlotDataItem_galvos.setPen('w') self.pw.addItem(self.PlotDataItem_galvos) self.textitem_galvos = pg.TextItem(text='Contour', color=('w'), anchor=(1, 1)) self.textitem_galvos.setPos(0, 5) self.pw.addItem(self.textitem_galvos) def MessageToMainGUI(self, text): self.MessageBack.emit(text) def stopMeasurement_pmt(self): """Stop the seal test.""" self.pmtTest.aboutToQuitHandler() def stopMeasurement_pmt_contour(self): """Stop the seal test.""" self.pmtTest_contour.aboutToQuitHandler() self.MessageToMainGUI('---!! Contour stopped !!---'+'\n') # def closeEvent(self, event): # # QtWidgets.QApplication.quit() # event.accept() '''
class imMobilize(QtWidgets.QWidget): def __init__(self, parent=None, *args): # run the widget, get the form QtWidgets.QWidget.__init__(self, parent, *args) self.ui = Ui_Form() self.ui.setupUi(self) os.chdir("D:\\") self.working_directory_selected = False """ ============================================================== Connecting the buttons and labels in the serial control group ============================================================== """ self.arduino = Arduino() # populate serial available combobox self.update_serial_available() # connect scan button self.ui.buttonSerialScan.clicked.connect(self.update_serial_available) # connect connect/disconnect button self.ui.buttonSerialConnect.clicked.connect( self.serial_connect_disconnect) # try: # self.serial_connect_disconnect() # except: # QtWidgets.QMessageBox.warning(self, "No controller found", "Could not connect to a suitable controller \n Connect Manually.") self.ui.lineeditWriteToSerial.editingFinished.connect( self.write_to_serial) self.ui.buttonWriteToSerial.clicked.connect(self.write_to_serial) self.ui.checkBoxReadSerialLive.clicked.connect(self.listen_to_serial) self.logged_temperature = np.array([]) self.temp_plot_xvals = np.array([]) """ ===================================================================== Connecting the buttons and controls in the light stim controls group ===================================================================== """ # connect the save_stim button self.ui.buttonSaveStim.clicked.connect(self.save_lightstim) # Style the three sliders (color the handles) # self.ui.sliderRed.setStyleSheet("QSlider::handle:vertical {background-color: red; border: 1px outset; border-radius: 3px;}") # self.ui.sliderGreen.setStyleSheet("QSlider::handle:vertical {background-color: lime; border: 1px outset; border-radius: 3px;}") # self.ui.sliderBlue.setStyleSheet("QSlider::handle:vertical {background-color: cyan; border: 1px outset; border-radius: 3px;}") self.ui.toggleWhiteColor.valueChanged.connect( self.toggle_lightstim_type) self.toggle_lightstim_type() """ ==================================================================== Connecting buttons and controls in vibration stimuli group ==================================================================== """ self.ui.buttonSaveVibrationStim.clicked.connect( self.save_vibration_stim) """ ===================================================================== Connecting the buttons and controls in the stimuli manager group ===================================================================== """ self.Stims = StimuliManager(arduino=self.arduino) self.set_experiment_view() self.set_lightstim_name_lineedit() self.set_vibrationstim_name_lineedit() self.ui.buttonDeleteSelectedStim.clicked.connect( self.delete_selected_stim) self.ui.buttonDeleteAllStims.clicked.connect(self.delete_all_stims) self.ui.buttonLoadStimulusProfile.clicked.connect(self.load_stimuli) self.ui.buttonSaveStimulusProfile.clicked.connect(self.save_stimuli) """ ===================================================================== Connecting the buttons and controls in the MetaData entry group ===================================================================== """ self.ui.buttonSetWorkingDirectory.clicked.connect( self.set_working_directory) self.ui.labelMetaDataDrugName.setVisible(False) self.ui.lineeditMetaDataDrugName.setVisible(False) self.ui.labelMetaDataGenetics.setVisible(False) self.ui.lineeditMetaDataGenetics.setVisible(False) self.ui.spinBoxExperimentDurationMinutes.valueChanged.connect( self.set_experiment_view) self.ui.spinBoxExperimentDurationSeconds.valueChanged.connect( self.set_experiment_view) self.ui.checkBoxExperimentAutonaming.clicked.connect( self.set_experiment_autonaming) self.set_experiment_autonaming() self.experiment_live = False self.ui.buttonStartExperiment.clicked.connect(self.start_experiment) self.path = os.curdir self.experiment_name = self.ui.lineeditExperimentName.text() self.experiment_path = os.path.join(self.path, "Exp_" + self.experiment_name) """ ====================================================================== Connecting buttons and controls to the camera manager group ====================================================================== """ self.detect_cameras() self.ui.buttonCameraConnectDisconnect.clicked.connect( self.camera_connect_disconnect) self.ui.buttonCameraPreview.clicked.connect(self.toggle_preview) self.ui.buttonScanForCameras.clicked.connect(self.detect_cameras) self.ui.spinBoxFramerate.valueChanged.connect( self.set_camera_framerate) for x in range(-2, -12, -1): self.ui.comboboxCameraExposure.addItem(str(2**x)) self.ui.comboboxCameraExposure.currentTextChanged.connect( self.set_camera_exposure) self.ui.sliderCameraBrightness.sliderMoved.connect( self.set_camera_brightness) self.ui.sliderCameraGamma.sliderMoved.connect(self.set_camera_gamma) self.ui.buttonCameraResetProperties.clicked.connect( self.reset_camera_properties) #This is for IR light, so actually belongs to arduino, but it is located here in the GUI. self.ui.sliderIRLight.sliderReleased.connect(self.set_IR_light) """ ====================================================================== Connecting buttons and controls to the video manager ====================================================================== """ self.set_autonaming(True) self.ui.buttonSetPath.clicked.connect(self.set_path) self.ui.checkboxAutoNaming.toggled.connect(self.set_autonaming) self.ui.buttonRecordVideo.clicked.connect(self.record_video) def detect_cameras(self): self.ui.comboBoxConnectedCameras.clear() for ii in range(3): cap = cv2.VideoCapture(ii) if not cap.isOpened(): pass else: cam = "Camera " + str(ii + 1) self.ui.comboBoxConnectedCameras.addItem(cam) def camera_connect_disconnect(self): if not hasattr(self, "cam") or not self.cam.cap.isOpened(): camera_number = int(self.ui.comboBoxConnectedCameras.currentText(). split(" ")[1]) - 1 self.cam = Camera(camera=camera_number) if self.cam.cap.isOpened(): self.ui.groupBoxCameraControls.setEnabled(True) self.ui.groupBoxVideoControls.setEnabled(True) self.ui.comboBoxConnectedCameras.setEnabled(False) self.ui.labelCameraConnectionStatus.setText( "Connected to Camera " + str(camera_number + 1)) self.ui.labelCameraConnectionStatus.setStyleSheet( "color: green") self.ui.buttonCameraConnectDisconnect.setText("Disconnect") self.ui.buttonCameraConnectDisconnect.setStyleSheet( "color: red") t = self.ui.comboboxCameraExposure.findText( str(2**(self.cam.exposure))) self.ui.comboboxCameraExposure.setCurrentIndex(t) self.ui.sliderCameraBrightness.setValue(self.cam.brightness) self.ui.labelCameraBrighness.setNum(self.cam.brightness) self.ui.sliderCameraGamma.setValue(self.cam.gamma * 10) self.ui.labelCameraGamma.setNum(self.cam.gamma) self.ui.sliderCameraGamma.setEnabled(False) else: self.toggle_preview(False) self.cam.stop() self.cam.cap.release() self.ui.buttonCameraPreview.setChecked(False) self.ui.groupBoxCameraControls.setEnabled(False) self.ui.groupBoxVideoControls.setEnabled(False) self.ui.comboBoxConnectedCameras.setEnabled(True) self.ui.buttonCameraConnectDisconnect.setText("Connect") self.ui.buttonCameraConnectDisconnect.setStyleSheet("color: black") self.ui.labelCameraConnectionStatus.setText( "Status: Not Connected") self.ui.labelCameraConnectionStatus.setStyleSheet("color: black") def toggle_lightstim_type(self): val = self.ui.toggleWhiteColor.value() if val == 1: self.lightStimType = "White" self.ui.labelLightStimTypeLED.setEnabled(False) self.ui.labelLightStimTypeWhite.setEnabled(True) self.ui.widgetLEDSliders.setEnabled(False) self.ui.toggleWhiteColor.setStyleSheet( "QSlider::handle:horizontal {background-color: rgba(255,255,255,255); border: 2px outset; width: 10px; height: 10px; border-radius: 5px;}" "QSlider::groove:horizontal {height: 10px; width: 30px; background-color: rgba(255, 0,0, 100); border-radius: 5px; border: 1px solid;}" ) self.ui.sliderRed.setStyleSheet( "QSlider::handle:vertical {background-color: grey; border: 1px outset; border-radius: 3px;}" ) self.ui.sliderGreen.setStyleSheet( "QSlider::handle:vertical {background-color: grey; border: 1px outset; border-radius: 3px;}" ) self.ui.sliderBlue.setStyleSheet( "QSlider::handle:vertical {background-color: grey; border: 1px outset; border-radius: 3px;}" ) if val == 0: self.lightStimType = "LED" self.ui.labelLightStimTypeLED.setEnabled(True) self.ui.labelLightStimTypeWhite.setEnabled(False) self.ui.widgetLEDSliders.setEnabled(True) self.ui.toggleWhiteColor.setStyleSheet( "QSlider::handle:horizontal {background-color: rgba(255,100,0,100); border: 2px outset; width: 10px; height: 10px; border-radius: 5px;}" "QSlider::groove:horizontal {height: 10px; width: 30px; background-color: rgba(255, 255, 255,100); border-radius: 5px; border: 1px solid;}" ) self.ui.sliderRed.setStyleSheet( "QSlider::handle:vertical {background-color: red; border: 1px outset; border-radius: 3px;}" ) self.ui.sliderGreen.setStyleSheet( "QSlider::handle:vertical {background-color: lime; border: 1px outset; border-radius: 3px;}" ) self.ui.sliderBlue.setStyleSheet( "QSlider::handle:vertical {background-color: cyan; border: 1px outset; border-radius: 3px;}" ) def set_experiment_view(self): self.ui.graphicsView.clear() plot = PlotDataItem(pen="k") duration = (self.ui.spinBoxExperimentDurationMinutes.value() * 60) + self.ui.spinBoxExperimentDurationSeconds.value() xs = np.linspace(0, duration, 2) ys = [1, 1] plot.setData(xs, ys) self.ui.graphicsView.addItem(plot) for ii in range(len(self.Stims.df)): start = self.Stims.df.time_on.iloc[ii] stop = self.Stims.df.time_off.iloc[ii] if self.Stims.df.message_on.iloc[ii].startswith("w"): box = LinearRegionItem(values=(start, stop), brush=(255, 255, 250, 230), movable=False) self.ui.graphicsView.addItem(box) elif self.Stims.df.message_on.iloc[ii].startswith("n"): on = self.Stims.df.message_on.iloc[ii][1:] r = int(on[:3]) g = int(on[3:6]) b = int(on[6:]) box = LinearRegionItem(values=(start, stop), brush=(r, g, b, 50), movable=False) self.ui.graphicsView.addItem(box) elif self.Stims.df.message_on.iloc[ii].startswith("v"): box = LinearRegionItem(values=(start, stop), brush="k", movable=False) self.ui.graphicsView.addItem(box) self.ui.comboBoxSelectStimId.clear() for stim_id in set(self.Stims.df.id): self.ui.comboBoxSelectStimId.addItem(stim_id) # def checkStimSafeguard(self): # if self.ui.checkboxDirectStim.isChecked() is False: # self.ui.listwidgetAllStims.itemDoubleClicked.disconnect(self.single_stim) # elif self.ui.checkboxDirectStim.isChecked(): # self.ui.listwidgetAllStims.itemDoubleClicked.connect(self.single_stim) def update_serial_available(self): """Scans the available comports and updates the combobox with the resulst""" self.ui.comboboxSerialAvailable.clear() self.arduino.scan_comports() for port in list(self.arduino.port_dict.keys()): self.ui.comboboxSerialAvailable.addItem(port) def serial_connect_disconnect(self): """Connects to selected serial if disconnected and vice versa""" if self.ui.buttonSerialConnect.text().lower() == "connect": port = self.ui.comboboxSerialAvailable.currentText() self.arduino.connect(port) if self.arduino.ser.isOpen(): self.ui.labelSerialConnected.setText("Connected: " + self.arduino.port) self.ui.labelSerialConnected.setStyleSheet("color: green") self.ui.buttonSerialConnect.setText("Disconnect") self.ui.checkBoxReadSerialLive.setEnabled(True) self.ui.sliderIRLight.setEnabled(True) elif self.ui.buttonSerialConnect.text().lower() == "disconnect": self.ui.checkBoxReadSerialLive.setChecked(False) self.arduino.disconnect() if self.arduino.ser.isOpen() == False: self.ui.labelSerialConnected.setText("Status: Not connected") self.ui.labelSerialConnected.setStyleSheet("color: black") self.ui.buttonSerialConnect.setText("Connect") self.ui.checkBoxReadSerialLive.setEnabled(False) self.ui.checkBoxReadSerialLive.setChecked(False) self.ui.sliderIRLight.setEnabled(False) def listen_to_serial(self): """Starts a thread that listens on the self.Arduino.ser serial connection and updates the GUI with incoming messages""" if self.ui.checkBoxReadSerialLive.isChecked(): self.ui.graphicsViewSerial.clear() self.temperature_plot = PlotDataItem(pen=(0, 153, 153)) self.temperature_plot_2 = PlotDataItem(pen=(255, 128, 0)) self.temperature_plot.setDownsampling(auto=True, method="mean") self.temperature_plot_2.setDownsampling(auto=True, method="mean") self.ui.graphicsViewSerial.addItem(self.temperature_plot) self.ui.graphicsViewSerial.addItem(self.temperature_plot_2) t = Thread(target=self.start_listening, args=()) t.start() def start_listening(self): """To be started in separate thread: listens to Serial port and updates gui via separate method if new message comes in""" self.arduino.ser.flushInput() self.logged_temperature = np.array([]) self.logged_temperature_2 = np.array([]) self.logged_temperature_time = np.array([]) start = time.time() while self.ui.checkBoxReadSerialLive.isChecked( ) and self.arduino.ser.isOpen(): if self.arduino.ser.in_waiting > 0: message = self.arduino.ser.readline().decode() elapsed_time = time.time() - start self.update_serial_display(message, np.round(elapsed_time, 2)) else: time.sleep(0.01) else: pass def update_serial_display(self, message, elapsed_time): temp1, temp2 = message.split(" ") self.ui.labelSerialLiveIncoming.setText(str(elapsed_time)) self.ui.labelSerialLiveTemperature.setText(message) temp1 = float(temp1) temp2 = float(temp2) self.logged_temperature_time = np.hstack( [self.logged_temperature_time, elapsed_time]) self.logged_temperature = np.hstack([self.logged_temperature, temp1]) self.logged_temperature_2 = np.hstack( [self.logged_temperature_2, temp2]) self.temperature_plot.setData(x=self.logged_temperature_time, y=self.logged_temperature) self.temperature_plot_2.setData(x=self.logged_temperature_time, y=self.logged_temperature_2) def write_to_serial(self): """Send message in the lineEdit widget to the serial port""" message = self.ui.lineeditWriteToSerial.text() self.arduino.write(message) self.ui.lineeditWriteToSerial.clear() def set_lightstim_name_lineedit(self): c = 1 while "LightStim" + str(len(self.Stims.df) + c) in self.Stims.df.id: c += 1 newname = "LightStim" + str(len(self.Stims.df) + c) self.ui.lineeditLightStimName.setText(newname) def save_lightstim(self): """Takes the current settings in the lightstim box and adds them to stim manager""" start_time = self.ui.spinboxStimStart.value() stop_time = self.ui.spinboxStimDuration.value() + start_time stim_id = self.ui.lineeditLightStimName.text() if self.lightStimType == "LED": start_message = "n" for slider in [ self.ui.sliderRed, self.ui.sliderGreen, self.ui.sliderBlue ]: start_message += str(slider.value()).zfill(3) stop_message = "nC" elif self.lightStimType == "White": start_message = "wS" stop_message = "wC" self.Stims.add_stimulus(start_time, stop_time, start_message, stop_message, stim_id) self.set_lightstim_name_lineedit() self.ui.comboBoxSelectStimId.clear() stim_ids = list(set(self.Stims.df.id)) stim_ids.sort() for stim_id in stim_ids: self.ui.comboBoxSelectStimId.addItem(stim_id) self.set_experiment_view() def set_vibrationstim_name_lineedit(self): c = 1 while "VibrationStim" + str(len(self.Stims.df) + c) in self.Stims.df.id: c += 1 newname = "VibrationStim" + str(len(self.Stims.df) + c) self.ui.lineeditVibrationStimName.setText(newname) def save_vibration_stim(self): """takes the current settings in the vibration stim box and adds it to the stim manager""" start_time = self.ui.spinBoxVibrationStart.value() duration = self.ui.spinBoxVibrationDuration.value() freq = self.ui.spinBoxVibrationFrequency.value() start_message = "v" + str(freq).zfill(3) + str(int( duration * 1000)).zfill(4) stop_message = "" stim_id = self.ui.lineeditVibrationStimName.text() self.Stims.add_stimulus(start_time, start_time + duration, start_message, stop_message, stim_id) self.set_vibrationstim_name_lineedit() self.ui.comboBoxSelectStimId.clear() stim_ids = list(set(self.Stims.df.id)) stim_ids.sort() for stim_id in stim_ids: self.ui.comboBoxSelectStimId.addItem(stim_id) self.set_experiment_view() def delete_selected_stim(self): stim_id = self.ui.comboBoxSelectStimId.currentText() self.Stims.delete_stimulus(stim_id) self.set_experiment_view() def delete_all_stims(self): self.Stims.delete_all_stims() self.set_experiment_view() def load_stimuli(self): file = QtWidgets.QFileDialog.getOpenFileName(self, "Select a Stimuli File") file = file[0] if os.path.exists(file) and file.endswith(".txt"): df = pd.read_csv(file, delimiter="\t") self.Stims.df = df self.set_experiment_view() elif not file: pass else: QtWidgets.QMessageBox.warning(self, "Invalid Filename", "Invalid Stimulus File selected.") def save_stimuli(self): filename_selected = False while not filename_selected: filename = QtWidgets.QFileDialog.getSaveFileName( self, "Save Stimuli file as")[0] if not filename: break else: if not filename.endswith(".txt"): filename += ".txt" self.Stims.df.to_csv(filename, sep="\t") filename_selected = True def toggle_preview(self, event): if event: self.cam.preview() self.ui.buttonCameraPreview.setText("Stop") self.ui.buttonCameraPreview.setStyleSheet("color: red") else: self.ui.buttonCameraPreview.setText("Preview") self.ui.buttonCameraPreview.setStyleSheet("color: Black") self.cam.stop() def set_autonaming(self, bool): if bool: self.ui.lineeditVideoName.setText( time.strftime("%Y%m%d_%H%M%S_video")) def set_experiment_autonaming(self): if self.ui.checkBoxExperimentAutonaming.isChecked(): name = time.strftime("%Y%m%d_%H%M%S") name += "_" name += str(self.ui.spinboxCrowdsize.value()) name += "_" name += str(self.ui.spinBoxExperimentDurationMinutes.value()) + "m" name += str( self.ui.spinBoxExperimentDurationSeconds.value()) + "s_" if self.ui.checkboxMetaDataDrugs.isChecked(): name += self.ui.lineeditMetaDataDrugName.text() + "_" else: name += "None_" if self.ui.checkboxMetaDataGenetics.isChecked(): name += self.ui.lineeditMetaDataGenetics.text() + "_" else: name += "None_" if len(self.Stims.df) > 0: name += "Light" else: name += "None" self.ui.lineeditExperimentName.setText(name) else: pass def set_camera_framerate(self, fps): self.cam.framerate = fps # if self.cam.do_gamma == False: # self.ui.sliderCameraGamma.setVisible(False) # self.ui.labelCameraGamma.setVisible(False) # else: # self.ui.sliderCameraGamma.setVisible(True) # self.ui.labelCameraGamma.setVisible(True) def set_camera_brightness(self, brightness): self.cam.brightness = brightness def set_camera_gamma(self, gamma): gamma = gamma / 10 self.cam.gamma = gamma self.ui.labelCameraGamma.setNum(gamma) def set_camera_exposure(self, exp): # If set exposure is longer than framerate allows, then adjust framerate exp = float(exp) if exp > (1 / self.cam.framerate): print( "\n WARNING: Adjusting framerate to accomodate for exposure \n" ) # self.set_camera_framerate(1 / exp) self.ui.spinBoxFramerate.setValue(int(1 / exp)) exp = np.log2(exp) self.cam.exposure = exp def set_IR_light(self): val = self.ui.sliderIRLight.value() self.arduino.write("i" + str(val).zfill(3)) sys.stdout.write('\n IR Light set to {}. {}% of power'.format( val, (val / 255) * 100)) def set_path(self): self.path = QtGui.QFileDialog.getExistingDirectory() print(self.path) def set_working_directory(self): self.working_directory_selected = False while not self.working_directory_selected: self.path = QtGui.QFileDialog.getExistingDirectory() if os.path.exists(self.path): os.chdir(self.path) self.working_directory_selected = True else: q = QtWidgets.QMessageBox.question( self, "No path selected", "Do you wish to not select a path? \n Path then defaults to D:\\" ) if q == QtWidgets.QMessageBox.Yes: self.path = "D:\\" self.working_directory_selected = True def reset_camera_properties(self): self.cam.brightness = 0 self.ui.sliderCameraBrightness.setValue(0) self.ui.labelCameraBrighness.setNum(0) self.cam.exposure = -5.0 t = self.ui.comboboxCameraExposure.findText(str(2**-5)) self.ui.comboboxCameraExposure.setCurrentIndex(t) self.cam.gamma = 1 self.ui.sliderCameraGamma.setValue(10) self.ui.labelCameraGamma.setNum(1) self.cam.framerate = 30 self.ui.spinBoxFramerate.setValue(30) def record_video(self, event): if event: self.ui.buttonRecordVideo.setText("Stop") self.ui.buttonRecordVideo.setStyleSheet("color:red") self.ui.groupBoxCameraControls.setEnabled(False) self.ui.groupBoxCameraConnection.setEnabled(False) if self.ui.checkboxAutoNaming.isChecked(): self.ui.lineeditVideoName.setText( time.strftime("%Y%m%d_%H%M" + "_experiment")) name = os.path.join(self.path, self.ui.lineeditVideoName.text()) duration = self.ui.spinBoxVideoTimeMinutes.value() * 60 duration += self.ui.spinBoxVideoTimeSeconds.value() preview = self.ui.checkBoxPreviewWhileRecording.isChecked() self.cam.video(name, duration, preview) t = Thread(target=self.wait_for_recording_end) t.start() else: self.ui.groupBoxCameraControls.setEnabled(True) self.ui.groupBoxCameraConnection.setEnabled(True) self.ui.buttonRecordVideo.setText("Record") self.ui.buttonRecordVideo.setStyleSheet("color:Black") self.cam.stop() def wait_for_recording_end(self): time.sleep(0.1) while self.cam.alive == True: time.sleep(0.1) else: self.record_video(False) # self.ui.buttonRecordVideo.setChecked(False) # """ ========================================================================================== METHODS TO RUN AN ENTIRE EXPERIMENT ========================================================================================== """ def start_experiment(self, event): if event: if not hasattr(self, "cam"): QtWidgets.QMessageBox.warning( self, "Not Connected Warning", "No open connection with Camera") self.ui.buttonStartExperiment.setChecked(False) elif not self.cam.cap.isOpened(): QtWidgets.QMessageBox.warning( self, "Not Connected Warning", "No open connection with Camera") self.ui.buttonStartExperiment.setChecked(False) elif not self.arduino.connected: QtWidgets.QMessageBox.warning( self, "Not Connected Warning", "No open connection with Controller") self.ui.buttonStartExperiment.setChecked(False) else: if not self.working_directory_selected: QtWidgets.QMessageBox.warning( self, "No valid working directory", "Select a directory to save your experiment") self.set_working_directory() self.experiment_live = True self.ui.buttonStartExperiment.setStyleSheet( 'color: rgba(255,0,0,255)') self.ui.buttonStartExperiment.setText('Interrupt') self.set_experiment_autonaming() self.ui.widgetMetaData.setEnabled(False) self.experiment_name = self.ui.lineeditExperimentName.text() self.experiment_path = os.path.join( self.path, "Exp_" + self.experiment_name) if not os.path.exists( self.experiment_path) or not os.path.isdir( self.experiment_path): os.mkdir(self.experiment_path) if self.ui.checkBoxReadSerialLive.isChecked(): self.ui.checkBoxReadSerialLive.setChecked(False) self.ui.checkBoxReadSerialLive.setChecked(True) self.listen_to_serial() duration = ( self.ui.spinBoxExperimentDurationMinutes.value() * 60) + self.ui.spinBoxExperimentDurationSeconds.value() preview = self.ui.checkBoxPreviewWhileRecording.isChecked() self.cam.video( os.path.join(self.experiment_path, self.experiment_name), duration, preview) self.Stims.start_stimuli() t = Thread(target=self.wait_for_experiment_end, args=()) t.start() else: self.experiment_live = False self.cam.stop() self.ui.checkBoxReadSerialLive.setChecked(False) self.ui.buttonStartExperiment.setStyleSheet("color: Black") self.ui.buttonStartExperiment.setText("Start Experiment") df = pd.DataFrame() df["date"] = [time.strftime("%Y%m%d")] df["time"] = [time.strftime("%H%M%S")] df["duration"] = [ (self.ui.spinBoxExperimentDurationMinutes.value() * 60) + self.ui.spinBoxExperimentDurationSeconds.value() ] df["drugs"] = [self.ui.lineeditMetaDataDrugName.text()] df["genetics"] = [self.ui.lineeditMetaDataGenetics.text()] df["age"] = [self.ui.spinboxAge] df["framerate"] = [self.cam.framerate] df["dechorionated"] = [ self.ui.checkboxMetaDataDechorionation.isChecked() ] df["exposture"] = [ float(self.ui.comboboxCameraExposure.currentText()) ] df["gamma"] = [self.cam.gamma] df["brightness"] = [self.cam.brightness] df["infrared"] = [self.ui.sliderIRLight.value()] df.to_csv(os.path.join(self.experiment_path, "metadata.txt"), sep="\t") self.Stims.df.to_csv(os.path.join(self.experiment_path, "stimuli_profile.txt"), sep="\t") df = pd.DataFrame(np.hstack([ self.logged_temperature_time.reshape(-1, 1), self.logged_temperature.reshape(-1, 1), self.logged_temperature_2.reshape(-1, 1) ]), columns=["time", "temperature", "temperature2"]) df.set_index("time", inplace=True) df.to_csv(os.path.join(self.experiment_path, "logged_temperatures.txt"), sep="\t") self.ui.widgetMetaData.setEnabled(True) def wait_for_experiment_end(self): time.sleep(0.2) # Give camera a chance to start. while self.cam.alive: time.sleep(0.2) else: if self.experiment_live: self.start_experiment(False) self.ui.buttonStartExperiment.setChecked(False) # def start_experiment(self): # # if self.experiment_live: # self.experiment_live = False # self.cam.stop() # self.ui.checkBoxReadSerialLive.setChecked(False) # self.ui.buttonStartExperiment.setStyleSheet("color: Black") # self.ui.buttonStartExperiment.setText("Start Experiment") # # df = pd.DataFrame() # df["date"] = [time.strftime("%Y%m%d")] # df["time"] = [time.strftime("%H%M%S")] # df["duration"] = [(self.ui.spinBoxExperimentDurationMinutes.value() * 60) + self.ui.spinBoxExperimentDurationSeconds.value()] # df["drugs"] = [self.ui.lineeditMetaDataDrugName.text()] # df["genetics"] = [self.ui.lineeditMetaDataGenetics.text()] # df["framerate"] = [self.cam.framerate] # df["dechorionated"] = [self.ui.checkboxMetaDataDechorionation.isChecked()] # df["exposture"] = [float(self.ui.comboboxCameraExposure.currentText())] # df["gamma"] = [self.cam.brightness] # df["brightness"] = [self.cam.brightness] # df["infrared"] = [self.ui.sliderIRLight.value] # # # df.to_csv(os.path.join(self.experiment_path, "metadata.txt"), sep="\t") # # self.Stims.df.to_csv(os.path.join(self.experiment_path, "stimuli_profile.txt"), sep="\t") # df = pd.DataFrame(np.hstack([self.logged_temperature_time.reshape(-1,1), self.logged_temperature.reshape(-1,1), self.logged_temperature_2.reshape(-1,1)]), columns = ["time", "temperature", "temperature2"]) # df.set_index("time", inplace=True) # df.to_csv(os.path.join(self.experiment_path, "logged_temperatures.txt"), sep="\t") # # # self.ui.groupboxMetaData.setEnabled(True) # # elif not self.experiment_live: # if not hasattr(self, "cam"): # QtWidgets.QMessageBox.warning(self, "Not Connected Warning", "No open connection with Camera or Controller") # elif self.cam.cap.isOpened() != True or self.arduino.ser.isOpen() != True: # QtWidgets.QMessageBox.warning(self, "Not Connected Warning", "No open connection with Camera or Controller") # else: # self.experiment_live = True # self.ui.buttonStartExperiment.setStyleSheet('color: rgba(255,0,0,255)') # self.ui.buttonStartExperiment.setText('Interrupt') # # self.set_experiment_autonaming() # self.ui.groupboxMetaData.setEnabled(False) # self.experiment_name = self.ui.lineeditExperimentName.text() # # self.experiment_path = os.path.join(self.path, "Exp_" + self.experiment_name) # if not os.path.exists(self.experiment_path) or not os.path.isdir(self.experiment_path): # os.mkdir(self.experiment_path) # # if self.ui.checkBoxReadSerialLive.isChecked(): # self.ui.checkBoxReadSerialLive.setChecked(False) # self.ui.checkBoxReadSerialLive.setChecked(True) # self.listen_to_serial() # # duration = (self.ui.spinBoxExperimentDurationMinutes.value() * 60) + self.ui.spinBoxExperimentDurationSeconds.value() # self.cam.video(os.path.join(self.experiment_path, self.experiment_name), duration) # self.Stims.start_stimuli() # t = Thread(target=self.wait_for_experiment_end, args=()) # t.start() # def wait_for_experiment_end(self): # time.sleep(0.2) # Give camera a chance to start. # while self.cam.alive: # time.sleep(0.1) # else: # if self.experiment_live: # self.start_experiment() def closeEvent(self, event): QtWidgets.QMessageBox.warning(self, "Closing", "Nothing you can do about it") self.arduino.ser.close() self.cam.cap.release() event.accept() print("yay")