def testZernike(self): img = np.random.rand(500, 500) mask = np.ones((500, 500), dtype=bool) rr, cc = circle(250, 250, 100) mask[rr, cc] = 0 masked_ima = np.ma.masked_array(img, mask=mask) coef, mat = zernike.zernikeFit(masked_ima, np.arange(10) + 1) zernike.zernikeSurface(masked_ima, coef, mat)
def _createCubeTTrFromCube(self, fitEx=None): # ci mette un eternit a fare l estenzione dell immagine #cube = self._readCube() cube_ttr = None if fitEx is None: for i in range(self._cube.shape[2]): image = self._cube[:, :, i] coef, mat = zernike.zernikeFit(image, np.array([2, 3])) surf = zernike.zernikeSurface(image, coef, mat) image_ttr = image - surf if cube_ttr is None: cube_ttr = image_ttr else: cube_ttr = np.ma.dstack((cube_ttr, image_ttr)) else: for i in range(self._cube.shape[2]): coef, interf_coef = tip_tilt_interf_fit.fit(self._cube[:, :, i]) image_ttr = self._ttd.ttRemoverFromCoeff(coef, self._cube[:, :, i]) if cube_ttr is None: cube_ttr = image_ttr else: cube_ttr = np.ma.dstack((cube_ttr, image_ttr)) self._saveCube(cube_ttr, 'Total_Cube_ttr.fits') return cube_ttr
def piston_noise(self, data_file_path): ''' Remove tip and tilt from image and average the results .. dovrei vedere una variazione nel tempo Parameters ---------- data_file_path: string measurement data folder Returns ------- mean: numpy array vector containing images's mean time: numpy array vector of the time at which the image were taken ''' list = glob.glob(os.path.join(data_file_path, '*.h5')) image_number = len(list) time = np.arange(image_number) * (1 / 27.58) mean_list = [] for j in range(image_number): name = 'img_%04d.h5' % j file_name = os.path.join(data_file_path, name) image = self._ic.from4D(file_name) zernike_coeff_array, mat = zernike.zernikeFit( image, np.array([2, 3])) sur = zernike.zernikeSurface(image, zernike_coeff_array, mat) image_ttr = image - sur mean = image_ttr.mean() mean_list.append(mean) spe, freq = self._fft(mean) return np.array(mean_list), time, spe, freq
def longTerm_rmsConvection(self): where = fold_name.OPD_SERIES_ROOT_FOLDER #where = '/mnt/m4storage/Data/M4Data/OPTData/OPD_series/' #where = '/home/labot/data/M4/Data/M4Data/OPTData/OPD_series' path = os.path.join(where, self.tt) D1 = sorted(glob.glob(os.path.join(path, '*.fits'))) #D = D1[0:-np.int(len(D1)/8)] D = D1[0:-2] cube = None for name in D: print(name) image = read_data.readFits_maskedImage(name) if cube is None: cube = image else: cube = np.ma.dstack((cube, image)) mean = np.ma.mean(cube, axis=2) rms_list = [] for i in range(cube.shape[2]): #print(i) ima = cube[:, :, i] - mean coef, mat = zernike.zernikeFit(ima, np.arange(8) + 1) surf = zernike.zernikeSurface(ima, coef, mat) new_ima = ima - surf rms = new_ima.std() rms_list.append(rms) rms = np.array(rms_list) x = np.arange(rms.size) x_list = [] for i in range(rms.size): aa = D[i].split('_')[-1] tt = aa.split('.')[0] x_list.append(tt) x_time = np.array(x_list) ntick = 11 plt.figure(figsize=(10, 6)) plt.plot(x, rms, '-') plt.xticks(x, x_time[::len(x_time) / ntick], rotation=45) plt.locator_params(nbins=ntick, axis='x', tight=True) plt.ylabel('rms[m]') plt.title('%s' % self.tt) results_path = os.path.join(fold_name.OUT_FOLDER, 'LongTermStability') dove = os.path.join(results_path, self.tt) if os.path.exists(dove): dove = dove else: os.makedirs(dove) name = os.path.join(dove, 'rmsMeanDiff.fits') pyfits.writeto(name, rms, overwrite=True) name = os.path.join(dove, '%s-rms.png' % self.tt) if os.path.isfile(name): os.remove(name) plt.savefig(name) return rms
def tipTiltDetrend(self, image, roi, final_index, analysis_ind=None): """ Parameters ---------- image: numpy masked array image to be analyzed roi: list roi of the image final_index: int index of final roi analysis_index: nunpy array index of roi to be used for the analysis Returns ------- image_ttr: numpy array image without tip and tilt """ roi_copy = np.copy(roi) self._logger.debug('Removal of tip-tilt from roi[%d]', final_index) self._totalMatList = [] coefList = [] for r in roi: if np.any(r == 0): imag = np.ma.masked_array(image.data, mask=r) ima = np.ma.MaskedArray.copy(imag) coef, mat = zernike.zernikeFit(ima, np.array([1, 2, 3])) self._totalMatList.append(mat[:, 1:]) coefList.append(coef) if analysis_ind is None: analysis_ind = np.array([1, 2]) #from roi #coef_list = coefList #del coef_list[final_index] else: analysis_ind = analysis_ind coef_list = [] for i in range(len(analysis_ind)): coef_list.append(coefList[analysis_ind[i]]) piston, tip, tilt = np.average(coef_list, axis=0) surfcoef = np.array([tip, tilt]) image_whit_tt = np.ma.masked_array(image.data, mask=roi[final_index]) zernike_surface_to_subtract = zernike.zernikeSurface( image_whit_tt, surfcoef, self._totalMatList[final_index]) image_ttr = image_whit_tt - zernike_surface_to_subtract return image_ttr
def tiptilt_fit(ima): ''' Parameters ---------- image: masked array Returns ------- ima_ttr: numpy masked array image without tip and tilt ''' if ima is not None: coef, mat = zernike.zernikeFit(ima, np.array([2, 3])) surf = zernike.zernikeSurface(ima, coef, mat) new_image = ima - surf ima_ttr = np.ma.masked_array(new_image, mask=ima.mask) return ima_ttr
def _rmsFromCube(self, cube_to_process): ''' Parameters ---------- cube_to_process: [pixel, pixel, number of imagescube] cube generated by the analyzer_iffunctions Returns ------- rms_mean: numpy array rms averaged on the number of modes used in the iff's acquisition tip: numpy array tip averaged on the number of modes used in the iff's acquisition tilt: numpy array tilt averaged on the number of modes used in the iff's acquisition ''' self._logger.debug('Calculation of rms, tip and tilt') rms_list = [] coef_tilt_list = [] coef_tip_list = [] quad_list = [] ptv_list = [] for i in range(cube_to_process.shape[2]): image = cube_to_process[:, :, i] coef, mat = zernike.zernikeFit(image, np.array([1, 2, 3])) sur = zernike.zernikeSurface(image, coef, mat) image_ttr = image - sur ptv = np.max(image_ttr) - np.min(image_ttr) ptv_list.append(ptv) rms = image_ttr.std() rms_list.append(rms) coef_tip_list.append(coef[1]) coef_tilt_list.append(coef[2]) quad = np.sqrt(coef[1]**2 + coef[2]**2) quad_list.append(quad) ptv_vector = np.array(ptv_list) rms_vector = np.array(rms_list) tip = np.array(coef_tip_list).mean() tilt = np.array(coef_tilt_list).mean() quad_tt = np.array(quad_list).mean() rms_mean = np.mean(rms_vector) ptv_mean = np.mean(ptv_vector) return rms_mean, quad_tt, tilt, ptv_mean
def validFrames(self): '''Create the plot of individual RMS of each frames (without tip/tilt) together with RMS of the average frame ''' rs_img = self._readRsImg() coef, mat = zernike.zernikeFit(rs_img, np.array([2, 3])) surf = zernike.zernikeSurface(rs_img, coef, mat) rs_ttr = rs_img - surf r0 = rs_ttr.std() cube_ttr = self._readCube(1) rs_vect = np.zeros(cube_ttr.shape[2]) for j in range(cube_ttr.shape[2]): rs_vect[j] = cube_ttr[:, :, j].std() plt.figure() plt.plot(np.arange(cube_ttr.shape[2]), rs_vect, label='Data'); plt.yscale('log') plt.plot(np.zeros(cube_ttr.shape[2]) + r0, label='Average') plt.ylabel('m RMS'); plt.xlabel('# frames') plt.title('Images WFE'); plt.legend()
def analysis_whit_structure_function(self, data_file_path, tau_vector, h5_or_fits=None): ''' .. 4000 = total number of image in hdf5 Parameters ---------- data_file_path: string measurement data folder tau_vector: numpy array vector of tau to use Other Parameters ---------------- h5_or_fits: if it is none the h5 data analysis is performed Returns ------- rms_medio: numpy array calculated on the difference of the images (distant tau) quad_med: numpy array squaring sum of tip and tilt calculated on the difference of the images ''' if h5_or_fits is None: list = glob.glob(os.path.join(data_file_path, '*.h5')) else: listtot = glob.glob(os.path.join(data_file_path, '*.fits')) listtot.sort() list = listtot[0:-2] image_number = len(list) i_max = np.int((image_number - tau_vector[tau_vector.shape[0] - 1]) / (tau_vector[tau_vector.shape[0] - 1] * 2)) if i_max <= 10: raise OSError('tau = %s too large. i_max = %d' % (tau_vector[tau_vector.shape[0] - 1], i_max)) rms_medio_list = [] quad_med_list = [] for j in range(tau_vector.shape[0]): dist = tau_vector[j] print(dist) rms_list = [] quad_list = [] for i in range(i_max): k = i * dist * 2 if h5_or_fits is None: #k = i * dist * 2 name = 'img_%04d.h5' % k file_name = os.path.join(data_file_path, name) image_k = self._ic.from4D(file_name) name = 'img_%04d.h5' % (k + dist) file_name = os.path.join(data_file_path, name) image_dist = self._ic.from4D(file_name) else: image_k = read_data.readFits_maskedImage(list[k]) image_dist = read_data.readFits_maskedImage(list[k + dist]) image_diff = image_k - image_dist zernike_coeff_array, mat = zernike.zernikeFit( image_diff, np.array([2, 3])) sur = zernike.zernikeSurface(image_diff, zernike_coeff_array, mat) image_ttr = image_diff - sur quad = np.sqrt(zernike_coeff_array[0]**2 + zernike_coeff_array[1]**2) rms = image_ttr.std() rms_list.append(rms) quad_list.append(quad) rms_vector = np.array(rms_list) aa = rms_vector.mean() rms_medio_list.append(aa) quad_med_list.append(np.array(quad_list).mean()) rms_medio = np.array(rms_medio_list) quad_med = np.array(quad_med_list) # per calcolo statistical amplitude of convention n_meas = rms_vector.shape[0] * 2 * tau_vector.shape[0] return rms_medio, quad_med, n_meas
def robustImageFromDataSet(n_images, data_file_path, zernike_vector_to_subtract, offset=None): ''' From fits files and whit offset subtraction Parameters ---------- n_images: int number of images to analyze path: string total path for data analysis Other Parameters ---------------- offset: if it is None data analysis is made by split n_images in two else re-reads the offset image saved in the tt folder and subtracts it to each image during cube creation Returns ------- robust_image: numpy masked array robust image from data set ''' last_name = data_file_path.split('/')[-1] if last_name == 'hdf5': list_tot = glob.glob(os.path.join(data_file_path, '*.h5')) tt = data_file_path.split('/')[-2] ext = 1 else: list_tot = glob.glob(os.path.join(data_file_path, '*.fits')) tt = data_file_path.split('/')[-1] ext = 0 list_tot.sort() list = list_tot[0:n_images] if offset is None: half = np.int(len(list) / 2) list1 = list[0:half] list2 = list[half:] cube1 = None print('Creating cube 1') for name in list1: #print(name) image = read_data.read_phasemap(name, ext) if cube1 is None: cube1 = image else: cube1 = np.ma.dstack((cube1, image)) cube2 = None print('Creating cube 2') for name in list2: #print(name) image = read_data.read_phasemap(name, ext) if cube2 is None: cube2 = image else: cube2 = np.ma.dstack((cube1, image)) mean1 = np.ma.mean(cube1, axis=2) mean2 = np.ma.mean(cube2, axis=2) final_image = mean2 - mean1 else: fits_file_name = os.path.join(config.OUT_FOLDER, 'Req', tt, 'OptOffset.fits') image_optOffset = read_data.readFits_maskedImage(fits_file_name) cube = None print('Creating cube') for name in list: #print(name) ima = read_data.read_phasemap(name, ext) image = ima - image_optOffset if cube is None: cube = image else: cube = np.ma.dstack((cube, image)) final_image = np.ma.mean(cube, axis=2) coef, mat = zernike.zernikeFit(final_image, zernike_vector_to_subtract) surf = zernike.zernikeSurface(final_image, coef, mat) image_ttr = final_image - surf return image_ttr