def online_dark(): f = open(r"/home/wangxinhua/level1/Level1rev02/json.txt", 'r') para = json.load(f) f.close() path = para['path'] #"/home/wangxinhua/20190518/HA" redrive = para['redrive'] #"/home/wangxinhua/nvst" dark_flag = int(para['dark_flag']) flat_flag = int(para['flat_flag']) darked_path = para['darked_path'] datapath, flatpath, darkpath = xyy.path_paser(path) #path of a group of fits try: path.split(':')[1] path = path[2:] except Exception as e: path = path[1:] print(os.path.join(redrive, path, 'Dark', 'dark.fits')) if os.path.exists(os.path.join(redrive, path, 'Dark', 'dark.fits')): print('dark have been calculated,pass') else: for i in darkpath: darkeddata = xyy.online_mean(i) xyy.mkdir(os.path.join(redrive, path, 'Dark')) #print(os.path.join(redrive,path,'Dark')) xyy.writefits(os.path.join(redrive, path, 'Dark', 'dark.fits'), darkeddata) print('Dark is over')
def online_flat(): f = open(r"/home/wangxinhua/level1/Level1/json.txt", 'r') para = json.load(f) f.close() path = para['path'] #"/home/wangxinhua/20190518/HA" redrive = para['redrive'] #"/home/wangxinhua/nvst" dark_flag = int(para['dark_flag']) flat_flag = int(para['flat_flag']) darked_path = para['darked_path'] datapath, flatpath, darkpath = xyy.path_paser(path) #mean flat darkdata = xyy.readfits( os.path.join(redrive, path[1:], 'Dark', 'dark.fits'))[0] for i in flatpath: if os.path.exists(os.path.join(redrive, i[1:], 'flat.fits')): print('flat have been calculated') else: xyy.mkdir(os.path.join(redrive, i[1:])) flatdata = xyy.online_mean(i) xyy.writefits(os.path.join(redrive, i[1:], 'flat.fits'), flatdata) print(os.path.join(redrive, i[1:], 'flat.fits')) #(data-dark)/(flat-dark)*max(flat-dark) for j in datapath: bandoff = i.split('/')[-1] if bandoff in j and bandoff in i: datafitspath = os.listdir(j) for k in datafitspath: xyy.mkdir(os.path.join(redrive, j[1:])) print(os.path.join(redrive, j[1:], k)) data = xyy.readfits(os.path.join(j, k))[0] xyy.writefits(os.path.join(redrive, j[1:], k), (data - darkdata) / (flatdata - darkdata) * np.max(flatdata - darkdata)) '''elif 'CENT' in j and 'CENT' in i: datafitspath = os.listdir(j) for k in datafitspath: xyy.mkdir(os.path.join(redrive,j[1:])) print(os.path.join(redrive,j[1:],k)) data = xyy.readfits(os.path.join(j,k))[0] xyy.writefits(os.path.join(redrive,j[1:],k),(data-darkdata)/(flatdata-darkdata)*np.max(flatdata-darkdata)) elif 'R050' in j and 'R050' in i: datafitspath = os.listdir(j) for k in datafitspath: xyy.mkdir(os.path.join(redrive,j[1:])) print(os.path.join(redrive,j[1:],k)) data = xyy.readfits(os.path.join(j,k))[0] xyy.writefits(os.path.join(redrive,j[1:],k),(data-darkdata)/(flatdata-darkdata)*np.max(flatdata-darkdata))''' print('flat is over')
print('暗场数据文件夹:', darkpath) print() #========================================================================= print('开始计算暗场!') #('G:', '\\20190518\\HA\\Dark') redarkpath=os.path.join(redrive,os.path.splitdrive(darkpath[0])[1]) xyy.mkdir(redarkpath) darkfile=os.path.join(redarkpath,'dark.fits') if not os.path.exists(darkfile): dark= xyy.dirfitsaddmean(darkpath[0]) xyy.writefits(darkfile,dark) else: print('暗场已计算过!') print() print() #---------------------- print('开始计算平场!') flatfile=[] for i in range(len(flatpath)):#['G:\\20190518\\HA\\FLAT00'] fdir=flatpath[i]
def Align(DirFits, DirFlat, DirDark, DeviceNumber): #传入参数:待处理的fits文件的路径。字符串类型一次处理一组 #处理后的Flat路径 处理后的Dark路径 DirFits = DirFits DirFlat = DirFlat DirDark = DirDark DeviceNumber = DeviceNumber #print(DirFlat) #print(DirDark) #print(DeviceNumber) print(DirFits) #print(DirFlat) #print(DirDark) #print(DeviceNumber) cp.cuda.Device(DeviceNumber).use() win = cp.array(win_host) winsr = cp.array(winsr_host) sitfdata = cp.array(sitfdata_host, '<f4') gussf = cp.array(gussf_host) #读取待处理数据 data_path_fits = os.listdir(DirFits) Year = datetime.datetime.now().strftime('%Y') aligned_path = os.path.join(DirFits[DirFits.index(Year):-7], DirFits[-6:] + '.fits') #2020/20200215/... #增加处理过的文件不再处理的判断 SaveFits = redrive + os.path.splitdrive(aligned_path[0:-11])[1] xyy.mkdir(SaveFits) SaveFitsName = os.path.join(SaveFits, aligned_path[-11:]) if os.path.exists(SaveFitsName) == 0: Filter = data_path_fits.copy() for i in data_path_fits: if os.path.getsize(os.path.join(DirFits, i)) != 2111040: Filter.remove(i) numb = len(Filter) #print('Align') #numb = len(data_path_fits) #在cubedata函数补上预处理过程,添加完毕 #print(DirFits) #print(numb) #datas,numb = xyy.cubedata(DirFlat,DirDark,DirFits, data_path_fits,rcxsize,rcysize) #cubedata = cp.array(datas,dtype='<f4') #datas = 0#释放内存 #cp.cuda.Device(np.random.randint(0,4)).use() cubedata = cp.array(xyy.cubedata(DirFlat, DirDark, DirFits, Filter, rcxsize, rcysize), dtype='<f4') #将文件夹所有fits转成三维数组 #if numb > 1: try: start = time.time() ini = cubedata[0, :, :] initmp = ini[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: initmp = filters.sobel(filters.gaussian(initmp, 5.0)) #开始对齐 t = 1 head = fits.getheader(os.path.join(DirFits, Filter[0])) for j in range(1, numb): #从第二个开始处理并与第一张图对齐 data = cubedata[j, :, :] datatmp = data[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: datatmp = filters.sobel(filters.gaussian(datatmp, 5.0)) cc, corr = xyy.corrmaxloc_gpu(initmp, datatmp) tmp = xyy.imgshift_gpu(data, [-cc[0], -cc[1]]) #对齐后的图 if only_align_no_luckyimage == 1: #不选帧,直接叠加 print('Pattern without lucky image') ini += tmp t += 1 else: #开始对位移后的图选帧 cubedata[j, :, :] = tmp[0:rcxsize, 0:rcysize] if only_align_no_luckyimage == 1: averg = ini / t else: cubepf = cubedata[:, pfstart[0]:pfstart[0] + pfsize[0], pfstart[1]:pfstart[1] + pfsize[1]] cubemean = cp.mean(cubepf, axis=0) psdcube = cp.empty([numb, pfsize[0], pfsize[1]], dtype=cp.float32) for nn in range(numb): tmp = cubepf[nn, :, :].copy() meantmp = cp.mean(tmp) tmp = (tmp - meantmp) * win + meantmp psd = cp.abs(cp.fft.fftshift(cp.fft.fft2(tmp)))**2 psd = (psd / psd[pfsize[0] // 2, pfsize[1] // 2]).astype( cp.float32) psdcube[nn, :, :] = psd psdmean = cp.mean(psdcube, axis=0) psdcube = psdcube / psdmean [Y, X] = cp.meshgrid(cp.arange(pfsize[1]), cp.arange(pfsize[0])) dist = ((X - pfsize[0] // 2)**2.0 + (Y - pfsize[1] // 2)**2.0)**0.5 ring = cp.where((dist >= infrq) & (dist <= otfrq), 1.0, 0.0).astype(cp.float32) psdcube = psdcube * ring ringcube = cp.mean(cp.mean(psdcube, axis=1), axis=1) index0 = cp.argsort(ringcube)[::-1] #排序 cubesort0 = cubedata.copy()[index0][0:int( fsp * numb), :, :] #取排序前*的帧,再次相关对齐,叠加 ini = cp.mean(cubesort0, axis=0).astype(cp.float32) initmp = ini[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: initmp = filters.sobel( filters.gaussian(cp.asnumpy(initmp), 5.0)) for nn in range(cubesort0.shape[0]): data = cubesort0[nn, :, :].copy() datatmp = data[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: datatmp = filters.sobel( filters.gaussian(cp.asnumpy(datatmp), 5.0)) cc, corr = xyy.corrmaxloc_gpu(initmp, datatmp) tmp = xyy.imgshift_gpu(data, [-cc[0], -cc[1]]) #相关对齐 cubesort0[nn, :, :] = tmp averg = cp.mean(cubesort0, axis=0).astype(cp.float32) #叠加 if postprocess_flag == 1: print('Starting deconvolution') cubesr = cubedata[:, srstx:srstx + srxsize, srsty:srsty + srysize] try: r0, index = xyy.cubesrdevr0_gpu( cubesr, srsize, winsr, sitfdata, diameter, diaratio, maxfre, 0.00, 0.06, start_r0, step_r0) except Exception as e: sys.exit() sitf = xyy.GetSitf_gpu(sitfdata, maxfre, rcxsize, index) #读取理论点扩散函数 img = xyy.ImgPSDdeconv_gpu(averg, sitf) head['CODE2'] = r0 #r0写到fits头文件中 result = xyy.ImgFilted_gpu(img, gussf) result = result / np.median( cp.asnumpy(result)) * np.median(cp.asnumpy(averg)) try: #redrive:/* SaveFits = redrive + os.path.splitdrive( aligned_path[0:-11])[1] #print(aligned_path) #print(SaveFits) xyy.mkdir(SaveFits) SaveFitsName = os.path.join(SaveFits, aligned_path[-11:]) except Exception as e: #print(e) pass xyy.writefits(SaveFitsName, cp.asnumpy(result).astype(np.float32), head) else: try: SaveFits = redrive + os.path.splitdrive( aligned_path[0:-11])[1] xyy.mkdir(SaveFits) SaveFitsName = os.path.join(SaveFits, aligned_path[-11:]) except Exception as e: #print(e) pass result = averg xyy.writefits(SaveFitsName, cp.asnumpy(result).astype(np.float32), head) #print(SaveFitsName) print('elapse:' + str(time.time() - start) + 's') print('Complete waiting for next group data') try: os.mkdir(LatestFitsR0) except Exception as e: pass LtstDtlFts( os.path.join(LatestFitsR0, SaveFitsName.split('/')[-2] + '.latest'), r0, SaveFitsName) #else: # print('文件夹下无文件,跳过') except Exception as e: #print('文件夹下无文件,跳过') print(e) pass
def align(): f = open(r"/home/wangxinhua/level1/Level1/json.txt",'r') para = json.load(f) f.close() rcxsize = int(para['rcxsize']) rcysize = int(para['rcysize']) corstart = re.findall('\d+',para['corstart']) corstart = [int(i) for i in corstart] corsize = re.findall('\d+',para['corsize']) corsize = [int(i) for i in corsize] flated_path = para['flated_path'] sobel = int(para['sobel']) path = para['path'] only_align_no_luckyimage = int(para['only_align_no_luckyimage']) redrive = para['redrive'] only_align_no_luckyimage_path = para['only_align_no_luckyimage_path'] pfstart = re.findall('\d+',para['pfstart']) pfstart = [int(i) for i in pfstart] pfsize = re.findall('\d+',para['pfsize']) pfsize = [int(i) for i in pfsize] lucky_align_path = para['lucky_align_path'] win=xyy.win(int(pfsize[0]),int(pfsize[1]),0.5,winsty='hann') #----窗函数 diameter = float(para['diameter']) wavelen = float(para['wavelen']) pixsca = float(para['pixsca']) fsp = float(para['fsp']) srstx = int(para['srstx']) srsty = int(para['srsty']) srxsize = int(para['srxsize']) srysize = int(para['srysize']) postprocess_flag = int(para['postprocess_flag']) srsize = int(para['srsize']) winsr=xyy.win(srsize,srsize, 0.5, winsty='hann') diaratio = float(para['diaratio']) start_r0 = float(para['start_r0']) step_r0 = float(para['step_r0']) maxfre=wavelen*10.0**(-10.0)/(2.0*diameter*pixsca)*(180.0*3600.0/np.pi) filename = para['filename'] sitfdata=fits.getdata(filename) gussf=xyy.gaussf2d(rcxsize,rcysize,1.5) infrq=(pfsize[0]//2)*0.05/maxfre otfrq=(pfsize[0]//2)*0.10/maxfre datapath=[] flatpath=[] darkpath=[] subpaths = os.listdir(path) for i in range(len(subpaths)): subpath=os.path.join(path,subpaths[i]) if ('F' in subpaths[i]) or ('f' in subpaths[i]) : flatpath.append(subpath) elif ('D' in subpaths[i]) or ('d' in subpaths[i]): darkpath.append(subpath) else: datapath.append(subpath) #做对齐 #读预处理后的数据做对齐 proceed_path = r'F:/2019-12-29chengjiang/20190518/HA' dirs = xyy.nvst_dirsandfiles_path(proceed_path) roots = dirs[0] fitsfile = dirs[1] t = 0 for i in roots: i = i.split(':')[1] if 'f' not in i and 'd' not in i and 'F' not in i and 'D' not in i: data_root = i data_fits = dirs[1][t] t+=1 for i in data_fits: data_path_fits = os.listdir(i) numb = len(data_path_fits) assert numb == 100 cube = np.empty([numb,rcxsize,rcysize], dtype = np.float32) try: data_dir_fitstmp = os.path.join(i,data_path_fits[0]) except Exception as e: print('warning:目录'+i+'下没有fits文件') continue ini = xyy.readfits(data_dir_fitstmp)[0] initmp = ini[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] initmp_gpu = cp.asarray(initmp) print('基准文件:'+ data_dir_fitstmp) if sobel == 1: initmp = filters.sobel(filters.gaussian(initmp,5.0)) t = 0 for j in data_path_fits: head=fits.getheader(os.path.join(i,j)) if t !=0: data = xyy.readfits(i+"\\"+j)[0] datatmp = data[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel == 1: datatmp = filters.sobel(filters.gaussian(datatmp,5.0)) datatmp_gpu = cp.asarray(datatmp) cc,corr = xyy.corrmaxloc_gpu(initmp_gpu,datatmp_gpu) tmp = xyy.imgshift(data,[-cc[0],-cc[1]])#对齐后的图 if only_align_no_luckyimage == 1: #不选帧,直接叠加 print('不选帧对齐模式') ini += tmp else: #print('选帧后对齐模式') cube[t,:,:] = tmp[0:rcxsize,0:rcysize] cubepf=cube[:,pfstart[0]:pfstart[0]+pfsize[0],pfstart[1]:pfstart[1]+pfsize[1]] cubemean=np.mean(cubepf, axis=0) psdcube = np.empty([numb,pfsize[0],pfsize[1]], dtype=np.float32) for nn in range(numb): tmp=cubepf[nn,:,:].copy() meantmp=np.mean(tmp) tmp=(tmp-meantmp)*win+meantmp psd=np.abs(fft.fftshift(fft.fft2(tmp)))**2 psd=(psd/psd[pfsize[0]//2,pfsize[1]//2]).astype(np.float32) psdcube[nn,:,:]=psd psdmean=np.mean(psdcube, axis=0) psdcube=psdcube/psdmean [Y,X]=np.meshgrid(np.arange(pfsize[1]),np.arange(pfsize[0])) dist=((X-pfsize[0]//2)**2.0+(Y-pfsize[1]//2)**2.0)**0.5 ring=np.where((dist>=infrq)&(dist<=otfrq), 1.0, 0.0).astype(np.float32) psdcube=psdcube*ring ringcube=np.mean(np.mean(psdcube, axis=1),axis=1) index0=np.argsort(ringcube)[::-1] #--------------------------------------------------------------------------------------- #-------------------------------- 取排序前**帧, 再次相关对齐,叠加 cubesort0=cube.copy()[index0][0:int(fsp*numb),:,:] ini=np.mean(cubesort0, axis=0).astype(np.float32) initmp=ini[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel==1: initmp=filters.sobel(filters.gaussian(initmp,5.0)) initmp_gpu=cp.asarray(initmp) # ---------------------- 对齐 for nn in range(cubesort0.shape[0]): data=cubesort0[nn,:,:].copy() datatmp=data[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel==1: datatmp=filters.sobel(filters.gaussian(datatmp,5.0)) datatmp_gpu=cp.asarray(datatmp) cc,corr=xyy.corrmaxloc_gpu(initmp_gpu, datatmp_gpu) ####cc,corr=xyy.corrmaxloc(initmp, datatmp) tmp=xyy.imgshift(data,[-cc[0],-cc[1]]) cubesort0[nn,:,:]=tmp averg=np.mean(cubesort0, axis=0).astype(np.float32)#叠加 t +=1 #---------------------------- 选帧(1计算功率谱,2环带积分,3排序) #................................................. aligned_path = i+'/aligned' print('对齐后文件存储位置:'+path+os.path.splitdrive(aligned_path)[1]) if only_align_no_luckyimage == 1: try: os.mkdir(path+os.path.splitdrive(aligned_path)[1]) except Exception as e: print('警告:'+aligned_path+'文件夹已经存在') xyy.writefits(path+os.path.splitdrive(aligned_path)[1]+'\\'+'aligned.fits',initmp/len(data_path_fits)) else: try: os.mkdir(path+os.path.splitdrive(aligned_path)[1]) except Exception as e: print(path+aligned_path+'文件夹已经存在') xyy.writefits(path+os.path.splitdrive(aligned_path)[1]+'\\'+'aligned.fits',averg) #退卷积 if postprocess_flag == 1: cubesr=cube[:,srstx:srstx+srxsize,srsty:srsty+srysize] r0,index=xyy.cubesrdevr0(cubesr,srsize,winsr,sitfdata,diameter,diaratio,maxfre,0.00,0.06,start_r0,step_r0) sitf=xyy.GetSitf(sitfdata,maxfre,rcxsize,index) img=xyy.ImgPSDdeconv(averg,sitf) head['CODE2'] = r0 result=xyy.ImgFilted(img,gussf) result=result/np.median(result)*np.median(averg) fitsname = path+os.path.splitdrive(aligned_path)[1]+'\\'+'post_aligned.fits' xyy.writefits(fitsname,result.astype(np.float32),head)
def dark(): f = open(r"/home/wangxinhua/level1/Level1/json.txt", 'r') para = json.load(f) f.close() path = para['path'] redrive = para['redrive'] dark_flag = int(para['dark_flag']) flat_flag = int(para['flat_flag']) darked_path = para['darked_path'] subpaths = os.listdir(path) darkpath = [] datapath = [] for i in range(len(subpaths)): subpath = os.path.join(path, subpaths[i]) if ('D' in subpaths[i]) or ('d' in subpaths[i]): darkpath.append(subpath) elif ('F' not in subpaths[i]) and ('f' not in subpaths[i]): datapath.append(subpath) if len(darkpath) == 0: print('没有暗场数据!') darkpath = input('请输入暗场的路径(格式例如:E:\dark20180312):') elif len(datapath) == 0: print('没有观测数据,停止数据处理!') print('观测数据文件夹:', datapath) print('使用的暗场数据文件夹:', darkpath) print('开始计算暗场!') redarkpath = redrive + os.path.splitdrive(darkpath[0])[1] print(redarkpath) xyy.mkdir(redarkpath) darkfile = os.path.join(redarkpath, 'dark.fits') if not os.path.exists(darkfile): dark = np.array(xyy.dirfitsaddmean(darkpath[0]), dtype=np.float32) xyy.writefits(darkfile, dark) print('暗场计算完毕') else: print('暗场已计算过!') if dark_flag == 1 and flat_flag == 0: dark = xyy.readfits(redarkpath + '\\' + 'dark.fits')[0] print('开始计算只做暗场处理的请求') try: xyy.mkdir(darked_path) except Exception as e: print('folder has existed') #读取原数据 dirs = xyy.nvst_dirsandfiles_path(path) roots = dirs[0] fitsfile = dirs[1] t = 0 for i in roots: if 'f' not in i and 'd' not in i and 'F' not in i and 'D' not in i: data_root = i data_fits = dirs[1][t] t += 1 t = 0 for i in data_fits: files = os.listdir(i) for j in files: savepath = darked_path + os.path.splitdrive(i)[1] xyy.mkdir(savepath) print('正在计算第' + str(t) + '组') xyy.writefits( savepath + '\\' + j, np.array(xyy.readfits(os.path.join(i, j))[0] - dark + 32768, dtype=np.float32)) t += 1 print('处理完成,文件保存在:' + darked_path)
def align(): f = open(r"/home/wangxinhua/level1/Level1rev02/json.txt", 'r') para = json.load(f) f.close() rcxsize = int(para['rcxsize']) rcysize = int(para['rcysize']) corstart = re.findall('\d+', para['corstart']) corstart = [int(i) for i in corstart] corsize = re.findall('\d+', para['corsize']) corsize = [int(i) for i in corsize] flated_path = para['flated_path'] sobel = int(para['sobel']) path = para['path'] only_align_no_luckyimage = int(para['only_align_no_luckyimage']) redrive = para['redrive'] only_align_no_luckyimage_path = para['only_align_no_luckyimage_path'] pfstart = re.findall('\d+', para['pfstart']) pfstart = [int(i) for i in pfstart] pfsize = re.findall('\d+', para['pfsize']) pfsize = [int(i) for i in pfsize] lucky_align_path = para['lucky_align_path'] win = xyy.win_gpu(int(pfsize[0]), int(pfsize[1]), 0.5, winsty='hann') #----窗函数 diameter = float(para['diameter']) wavelen = float(para['wavelen']) pixsca = float(para['pixsca']) fsp = float(para['fsp']) srstx = int(para['srstx']) srsty = int(para['srsty']) srxsize = int(para['srxsize']) srysize = int(para['srysize']) postprocess_flag = int(para['postprocess_flag']) srsize = int(para['srsize']) winsr = xyy.win_gpu(srsize, srsize, 0.5, winsty='hann') diaratio = float(para['diaratio']) start_r0 = float(para['start_r0']) step_r0 = float(para['step_r0']) maxfre = wavelen * 10.0**(-10.0) / (2.0 * diameter * pixsca) * (180.0 * 3600.0 / np.pi) filename = para['filename'] sitfdata = cp.array(fits.getdata(filename), '<f4') gussf = xyy.gaussf2d_gpu(rcxsize, rcysize, 1.5) infrq = (pfsize[0] // 2) * 0.05 / maxfre otfrq = (pfsize[0] // 2) * 0.10 / maxfre #做对齐 #读预处理后的数据做对齐 try: proceed_path = os.path.join(redrive, path[2:]) proceed_path.split(':')[1] except Exception as e: proceed_path = os.path.join(redrive, path[1:]) datapath, flatpath, darkpath = xyy.path_paser(proceed_path) for i in datapath: data_path_fits = os.listdir(i) numb = len(data_path_fits) try: assert numb == 100 except Exception as e: print('You are working on the last set of data') cube = cp.empty([numb, rcxsize, rcysize], dtype=cp.float32) try: data_dir_fitstmp = os.path.join(i, data_path_fits[0]) except Exception as e: print('warning:目录' + i + '下没有fits文件') continue ini = xyy.readfits(data_dir_fitstmp)[0] initmp = ini[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] #initmp_gpu = cp.asarray(initmp) print('basefile:' + data_dir_fitstmp) if sobel == 1: initmp = filters.sobel(filters.gaussian(initmp, 5.0)) t = 0 cube[0, :, :] = initmp[0:rcxsize, 0:rcysize] #align for j in data_path_fits: head = fits.getheader(os.path.join(i, j)) if t != 0: data = xyy.readfits(i + "/" + j)[0] datatmp = data[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: datatmp = filters.sobel(filters.gaussian(datatmp, 5.0)) #datatmp_gpu = cp.asarray(datatmp) cc, corr = xyy.corrmaxloc_gpu(cp.array(initmp, dtype='<f4'), cp.array(datatmp, dtype='<f4')) #print(cc) #cc,corr = xyy.corrmaxloc(initmp,datatmp) tmp = xyy.imgshift_gpu(cp.array(data, dtype='<f4'), [-cc[0], -cc[1]]) #对齐后的图 if only_align_no_luckyimage == 1: #不选帧,直接叠加 print('不选帧对齐模式') ini += tmp else: #print('选帧后对齐模式') #100,1024,1028 cube[t, :, :] = tmp[0:rcxsize, 0:rcysize] t += 1 cubepf = cube[:, pfstart[0]:pfstart[0] + pfsize[0], pfstart[1]:pfstart[1] + pfsize[1]] cubemean = cp.mean(cubepf, axis=0) psdcube = cp.empty([numb, pfsize[0], pfsize[1]], dtype=cp.float32) for nn in range(numb): tmp = cubepf[nn, :, :].copy() meantmp = cp.mean(tmp) tmp = (tmp - meantmp) * win + meantmp psd = cp.abs(cp.fft.fftshift(cp.fft.fft2(tmp)))**2 psd = (psd / psd[pfsize[0] // 2, pfsize[1] // 2]).astype( cp.float32) psdcube[nn, :, :] = psd psdmean = cp.mean(psdcube, axis=0) psdcube = psdcube / psdmean [Y, X] = cp.meshgrid(cp.arange(pfsize[1]), cp.arange(pfsize[0])) dist = ((X - pfsize[0] // 2)**2.0 + (Y - pfsize[1] // 2)**2.0)**0.5 ring = cp.where((dist >= infrq) & (dist <= otfrq), 1.0, 0.0).astype(cp.float32) psdcube = psdcube * ring ringcube = cp.mean(cp.mean(psdcube, axis=1), axis=1) index0 = cp.argsort(ringcube)[::-1] #--------------------------------------------------------------------------------------- #-------------------------------- 取排序前**帧, 再次相关对齐,叠加 ################# cube = cp.asnumpy(cube) index0 = cp.asnumpy(index0) ################# #cubesort0=cube.copy()[index0][0:int(fsp*numb),:,:] cubesort0 = cube.copy()[index0][0:int(fsp * numb), :, :] ######################## cubesort0 = cp.array(cubesort0) cube = cp.array(cube, dtype='<f4') ######################## ini = cp.mean(cubesort0, axis=0).astype(cp.float32) initmp = ini[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: initmp = filters.sobel(filters.gaussian(cp.asnumpy(initmp), 5.0)) # ---------------------- 对齐 for nn in range(cubesort0.shape[0]): data = cubesort0[nn, :, :].copy() datatmp = data[corstart[0]:corstart[0] + corsize[0], corstart[1]:corstart[1] + corsize[1]] if sobel == 1: datatmp = filters.sobel( filters.gaussian(cp.asnumpy(datatmp), 5.0)) #datatmp_gpu=cp.asarray(datatmp) cc, corr = xyy.corrmaxloc_gpu(initmp, datatmp) #cc,corr = xyy.corrmaxloc(initmp,datatmp) ####cc,corr=xyy.corrmaxloc(initmp, datatmp) tmp = xyy.imgshift_gpu(data, [-cc[0], -cc[1]]) cubesort0[nn, :, :] = tmp #print(tmp) averg = cp.mean(cubesort0, axis=0).astype(cp.float32) #叠加 if only_align_no_luckyimage == 1: averg = ini / t #---------------------------- 选帧(1计算功率谱,2环带积分,3排序) #................................................. aligned_path = i + '/aligned' try: print('location of aligned:' + path + os.path.splitdrive(aligned_path)[1]) except Exception as e: print('location of aligned:' + aligned_path) if only_align_no_luckyimage == 1: try: os.mkdir(path + os.path.splitdrive(aligned_path)[1]) except Exception as e: print('warning:' + aligned_path + 'existed') xyy.writefits( path + os.path.splitdrive(aligned_path)[1] + '/' + 'aligned.fits', cp.asnumpy(initmp / len(data_path_fits))) else: try: os.mkdir(path + os.path.splitdrive(aligned_path)[1]) except Exception as e: #print(path+aligned_path+'existed') xyy.mkdir(aligned_path) xyy.writefits(aligned_path + '/' + 'aligned.fits', cp.asnumpy(averg)) #退卷积 if postprocess_flag == 1: cubesr = cube[:, srstx:srstx + srxsize, srsty:srsty + srysize] try: r0, index = xyy.cubesrdevr0_gpu(cubesr, srsize, winsr, sitfdata, diameter, diaratio, maxfre, 0.00, 0.06, start_r0, step_r0) except Exception as e: print(cube) print(cubesr) sys.exit() sitf = xyy.GetSitf_gpu(sitfdata, maxfre, rcxsize, index) img = xyy.ImgPSDdeconv_gpu(averg, sitf) head['CODE2'] = r0 result = xyy.ImgFilted_gpu(img, gussf) result = result / np.median(cp.asnumpy(result)) * np.median( cp.asnumpy(averg)) try: fitsname = redrive + os.path.splitdrive( aligned_path)[1] + '/' + 'post_aligned.fits' xyy.mkdir(redrive + os.path.splitdrive(aligned_path)[1]) except Exception as e: xyy.mkdir(os.path.join(redrive, i, 'aligned')) fitsname = os.path.join(redrive, i, 'aligned', 'post_aligned.fits') xyy.writefits(fitsname, cp.asnumpy(result).astype(np.float32), head)
def align(images,gpuid): import cupy as cp #comm = MPI.COMM_WORLD #comm_rank = comm.Get_rank() #comm_size = comm.Get_size() with cp.cuda.Device(gpuid): win=xyy.win_gpu(int(pfsize[0]),int(pfsize[1]),0.5,winsty='hann') winsr=xyy.win_gpu(srsize,srsize, 0.5, winsty='hann') sitfdata=cp.array(fits.getdata(filename),'<f4') gussf=xyy.gaussf2d_gpu(rcxsize,rcysize,1.5) ############################################################################# ##if comm_size >= len(datapath): ## nums = len(datapath)-1 ##else: #nums = len(datapath)/comm_size #if comm_rank == 0: # a = [] # for j in range(comm_size): # a.append(datapath[int(j*nums):int((j+1)*nums)]) #else: # a = None local_data = images ############################################################################# for i in local_data: data_path_fits = os.listdir(i) numb = len(data_path_fits) try: assert numb == 100 except Exception as e: print('You are working on the last set of data') #cube = cuda.shared.array([numb,rcxsize,rcysize], dtype = cp.float32) cubedata = cp.empty([numb,rcxsize,rcysize], dtype = cp.float32) try: data_dir_fitstmp = os.path.join(i,data_path_fits[0]) except Exception as e: print('warning:目录'+i+'下没有fits文件') continue cubedata = cp.array(xyy.cubedata(i, data_path_fits,rcxsize,rcysize),dtype='<f4') ini = cubedata[0,:,:] initmp = ini[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] #initmp_gpu = cp.asarray(initmp) print('basefile:'+ data_dir_fitstmp) if sobel == 1: initmp = filters.sobel(filters.gaussian(initmp,5.0)) t = 0 #align head=fits.getheader(os.path.join(i,data_path_fits[0])) for j in range(1,numb): data = cubedata[j,:,:] datatmp = data[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel == 1: datatmp = filters.sobel(filters.gaussian(datatmp,5.0)) #datatmp_gpu = cp.asarray(datatmp) cc,corr = xyy.corrmaxloc_gpu(initmp,datatmp) tmp = xyy.imgshift_gpu(data,[-cc[0],-cc[1]])#对齐后的图 if only_align_no_luckyimage == 1: #不选帧,直接叠加 print('不选帧对齐模式') ini += tmp else: #print('选帧后对齐模式') #100,1024,1028 cubedata[j,:,:] = tmp[0:rcxsize,0:rcysize] cubepf=cubedata[:,pfstart[0]:pfstart[0]+pfsize[0],pfstart[1]:pfstart[1]+pfsize[1]] cubemean=cp.mean(cubepf, axis=0) psdcube = cp.empty([numb,pfsize[0],pfsize[1]], dtype=cp.float32) for nn in range(numb): tmp=cubepf[nn,:,:].copy() meantmp=cp.mean(tmp) tmp=(tmp-meantmp)*win+meantmp psd=cp.abs(cp.fft.fftshift(cp.fft.fft2(tmp)))**2 psd=(psd/psd[pfsize[0]//2,pfsize[1]//2]).astype(cp.float32) psdcube[nn,:,:]=psd psdmean=cp.mean(psdcube, axis=0) psdcube=psdcube/psdmean [Y,X]=cp.meshgrid(cp.arange(pfsize[1]),cp.arange(pfsize[0])) dist=((X-pfsize[0]//2)**2.0+(Y-pfsize[1]//2)**2.0)**0.5 ring=cp.where((dist>=infrq)&(dist<=otfrq), 1.0, 0.0).astype(cp.float32) psdcube=psdcube*ring ringcube=cp.mean(cp.mean(psdcube, axis=1),axis=1) index0=cp.argsort(ringcube)[::-1] #--------------------------------------------------------------------------------------- #-------------------------------- 取排序前**帧, 再次相关对齐,叠加 ################# #cube = cp.asnumpy(cube) #index0 = cp.asnumpy(index0) ################# #cubesort0=cube.copy()[index0][0:int(fsp*numb),:,:] cubesort0=cubedata.copy()[index0][0:int(fsp*numb),:,:] ######################## #cubesort0 = cp.array(cubesort0) #cube = cp.array(cube,dtype='<f4') ######################## ini=cp.mean(cubesort0, axis=0).astype(cp.float32) initmp=ini[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel==1: initmp=filters.sobel(filters.gaussian(cp.asnumpy(initmp),5.0)) # ---------------------- 对齐 for nn in range(cubesort0.shape[0]): data=cubesort0[nn,:,:].copy() datatmp=data[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel==1: datatmp=filters.sobel(filters.gaussian(cp.asnumpy(datatmp),5.0)) #datatmp_gpu=cp.asarray(datatmp) cc,corr=xyy.corrmaxloc_gpu(initmp, datatmp) #cc,corr = xyy.corrmaxloc(initmp,datatmp) ####cc,corr=xyy.corrmaxloc(initmp, datatmp) tmp=xyy.imgshift_gpu(data,[-cc[0],-cc[1]]) cubesort0[nn,:,:]=tmp #print(tmp) averg=cp.mean(cubesort0, axis=0).astype(cp.float32)#叠加 if only_align_no_luckyimage == 1: averg = ini/t #---------------------------- 选帧(1计算功率谱,2环带积分,3排序) #................................................. aligned_path = i+'/aligned' try: print('location of aligned:'+path+os.path.splitdrive(aligned_path)[1]) except Exception as e: print('location of aligned:'+aligned_path) if only_align_no_luckyimage == 1: try: os.mkdir(path+os.path.splitdrive(aligned_path)[1]) except Exception as e: print('warning:'+aligned_path+'existed') xyy.writefits(path+os.path.splitdrive(aligned_path)[1]+'/'+'aligned.fits',cp.asnumpy(initmp/len(data_path_fits))) else: try: os.mkdir(path+os.path.splitdrive(aligned_path)[1]) except Exception as e: #print(path+aligned_path+'existed') xyy.mkdir(aligned_path) xyy.writefits(aligned_path+'/'+'aligned.fits',cp.asnumpy(averg)) #退卷积 if postprocess_flag == 1: cubesr=cubedata[:,srstx:srstx+srxsize,srsty:srsty+srysize] try: r0,index=xyy.cubesrdevr0_gpu(cubesr,srsize,winsr,sitfdata,diameter,diaratio,maxfre,0.00,0.06,start_r0,step_r0) except Exception as e: #print(cube) print(cubesr) sys.exit() sitf=xyy.GetSitf_gpu(sitfdata,maxfre,rcxsize,index) img=xyy.ImgPSDdeconv_gpu(averg,sitf) head['CODE2'] = r0 result=xyy.ImgFilted_gpu(img,gussf) result=result/np.median(cp.asnumpy(result))*np.median(cp.asnumpy(averg)) try: fitsname = redrive+os.path.splitdrive(aligned_path)[1]+'/'+'post_aligned.fits' xyy.mkdir(redrive+os.path.splitdrive(aligned_path)[1]) except Exception as e: xyy.mkdir(os.path.join(redrive,i,'aligned')) fitsname = os.path.join(redrive,i,'aligned','post_aligned.fits') xyy.writefits(fitsname,cp.asnumpy(result).astype(np.float32),head)
def process_IN_CREATE(self,event): f = open(r"/home/wangxinhua/level1/Level1rev04/json.txt",'r') para = json.load(f) f.close() f = open(r'/home/wangxinhua/flag.txt','r') path = f.readline() f.close() path = path+'/HA'#"/home/wangxinhua/20190518/HA" redrive = para['redrive']#"/home/wangxinhua/nvst" darked_path = para['darked_path'] rcxsize = int(para['rcxsize']) rcysize = int(para['rcysize']) corstart = re.findall('\d+',para['corstart']) corstart = [int(i) for i in corstart] corsize = re.findall('\d+',para['corsize']) corsize = [int(i) for i in corsize] sobel = int(para['sobel']) only_align_no_luckyimage = int(para['only_align_no_luckyimage']) redrive = para['redrive'] only_align_no_luckyimage_path = para['only_align_no_luckyimage_path'] pfstart = re.findall('\d+',para['pfstart']) pfstart = [int(i) for i in pfstart] pfsize = re.findall('\d+',para['pfsize']) pfsize = [int(i) for i in pfsize] lucky_align_path = para['lucky_align_path'] win=xyy.win_gpu(int(pfsize[0]),int(pfsize[1]),0.5,winsty='hann') #----窗函数 diameter = float(para['diameter']) wavelen = float(para['wavelen']) pixsca = float(para['pixsca']) fsp = float(para['fsp']) srstx = int(para['srstx']) srsty = int(para['srsty']) srxsize = int(para['srxsize']) srysize = int(para['srysize']) postprocess_flag = int(para['postprocess_flag']) srsize = int(para['srsize']) winsr=xyy.win_gpu(srsize,srsize, 0.5, winsty='hann') diaratio = float(para['diaratio']) start_r0 = float(para['start_r0']) step_r0 = float(para['step_r0']) maxfre=wavelen*10.0**(-10.0)/(2.0*diameter*pixsca)*(180.0*3600.0/np.pi) filename = para['filename'] sitfdata=cp.array(fits.getdata(filename),'<f4') gussf=xyy.gaussf2d_gpu(rcxsize,rcysize,1.5) infrq=(pfsize[0]//2)*0.05/maxfre otfrq=(pfsize[0]//2)*0.10/maxfre datapath, flatpath, darkpath = xyy.path_paser(path) new_path = event.pathname if_has_next_folder = new_path[:-6] a = 1 while a: time_new =[int(i) for i in os.listdir(if_has_next_folder)}] if np.where(int(new_path[-6:])<time_new,1,0): #the fold is full dark = flat = datafits = os.listdir(new_path) a = 0 numb = len(datafits) cube = cp.empty([numb,rcxsize,rcysize],dtype='float32') t = 0 for i in datafits: data = xyy.readfits(os.path.join(new_path,i))[0] cube[t,:,:] = cp.array((data-dark)/(flat-dark)*np.max(flat-dark),dtype='<f4')[0:rcxsize,0:rcysize] t += 1 ini = cubedata[0,:,:] initmp = ini[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] #initmp_gpu = cp.asarray(initmp) print('basefile:'+ data_dir_fitstmp) if sobel == 1: initmp = filters.sobel(filters.gaussian(initmp,5.0)) t = 0 #align head=fits.getheader(os.path.join(i,data_path_fits[0])) for j in range(1,numb): data = cubedata[j,:,:] datatmp = data[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel == 1: datatmp = filters.sobel(filters.gaussian(datatmp,5.0)) #datatmp_gpu = cp.asarray(datatmp) cc,corr = xyy.corrmaxloc_gpu(initmp,datatmp) tmp = xyy.imgshift_gpu(data,[-cc[0],-cc[1]])#对齐后的图 if only_align_no_luckyimage == 1: #不选帧,直接叠加 print('不选帧对齐模式') ini += tmp else: #print('选帧后对齐模式') #100,1024,1028 cubedata[j,:,:] = tmp[0:rcxsize,0:rcysize] cubepf=cubedata[:,pfstart[0]:pfstart[0]+pfsize[0],pfstart[1]:pfstart[1]+pfsize[1]] cubemean=cp.mean(cubepf, axis=0) psdcube = cp.empty([numb,pfsize[0],pfsize[1]], dtype=cp.float32) for nn in range(numb): tmp=cubepf[nn,:,:].copy() meantmp=cp.mean(tmp) tmp=(tmp-meantmp)*win+meantmp psd=cp.abs(cp.fft.fftshift(cp.fft.fft2(tmp)))**2 psd=(psd/psd[pfsize[0]//2,pfsize[1]//2]).astype(cp.float32) psdcube[nn,:,:]=psd psdmean=cp.mean(psdcube, axis=0) psdcube=psdcube/psdmean [Y,X]=cp.meshgrid(cp.arange(pfsize[1]),cp.arange(pfsize[0])) dist=((X-pfsize[0]//2)**2.0+(Y-pfsize[1]//2)**2.0)**0.5 ring=cp.where((dist>=infrq)&(dist<=otfrq), 1.0, 0.0).astype(cp.float32) psdcube=psdcube*ring ringcube=cp.mean(cp.mean(psdcube, axis=1),axis=1) index0=cp.argsort(ringcube)[::-1] #--------------------------------------------------------------------------------------- #-------------------------------- 取排序前**帧, 再次相关对齐,叠加 ################# #cube = cp.asnumpy(cube) #index0 = cp.asnumpy(index0) ################# #cubesort0=cube.copy()[index0][0:int(fsp*numb),:,:] cubesort0=cubedata.copy()[index0][0:int(fsp*numb),:,:] ######################## #cubesort0 = cp.array(cubesort0) #cube = cp.array(cube,dtype='<f4') ######################## ini=cp.mean(cubesort0, axis=0).astype(cp.float32) initmp=ini[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel==1: initmp=filters.sobel(filters.gaussian(cp.asnumpy(initmp),5.0)) # ---------------------- 对齐 for nn in range(cubesort0.shape[0]): data=cubesort0[nn,:,:].copy() datatmp=data[corstart[0]:corstart[0]+corsize[0],corstart[1]:corstart[1]+corsize[1]] if sobel==1: datatmp=filters.sobel(filters.gaussian(cp.asnumpy(datatmp),5.0)) #datatmp_gpu=cp.asarray(datatmp) cc,corr=xyy.corrmaxloc_gpu(initmp, datatmp) #cc,corr = xyy.corrmaxloc(initmp,datatmp) ####cc,corr=xyy.corrmaxloc(initmp, datatmp) tmp=xyy.imgshift_gpu(data,[-cc[0],-cc[1]]) cubesort0[nn,:,:]=tmp #print(tmp) averg=cp.mean(cubesort0, axis=0).astype(cp.float32)#叠加 if only_align_no_luckyimage == 1: averg = ini/t #---------------------------- 选帧(1计算功率谱,2环带积分,3排序) #................................................. aligned_path = '/home/wangxinhua/Desktop/align'+'/'.join(path.split('/')[path.split('/').index('Desktop')+1:])+'/aligned' try: print('location of aligned:'+path+os.path.splitdrive(aligned_path)[1]) except Exception as e: print('location of aligned:'+aligned_path) if only_align_no_luckyimage == 1: try: os.mkdir(path+os.path.splitdrive(aligned_path)[1]) except Exception as e: print('warning:'+aligned_path+'existed') xyy.writefits(path+os.path.splitdrive(aligned_path)[1]+'/'+'aligned.fits',cp.asnumpy(initmp/len(data_path_fits))) else: try: os.mkdir(path+os.path.splitdrive(aligned_path)[1]) except Exception as e: #print(path+aligned_path+'existed') xyy.mkdir(aligned_path) xyy.writefits(aligned_path+'/'+'aligned.fits',cp.asnumpy(averg)) #退卷积 if postprocess_flag == 1: cubesr=cubedata[:,srstx:srstx+srxsize,srsty:srsty+srysize] try: r0,index=xyy.cubesrdevr0_gpu(cubesr,srsize,winsr,sitfdata,diameter,diaratio,maxfre,0.00,0.06,start_r0,step_r0) except Exception as e: #print(cube) print(cubesr) sys.exit() sitf=xyy.GetSitf_gpu(sitfdata,maxfre,rcxsize,index) img=xyy.ImgPSDdeconv_gpu(averg,sitf) head['CODE2'] = r0 result=xyy.ImgFilted_gpu(img,gussf) result=result/np.median(cp.asnumpy(result))*np.median(cp.asnumpy(averg)) try: fitsname = redrive+os.path.splitdrive(aligned_path)[1]+'/'+'post_aligned.fits' xyy.mkdir(redrive+os.path.splitdrive(aligned_path)[1]) except Exception as e: xyy.mkdir(os.path.join(redrive,i,'aligned')) fitsname = os.path.join(redrive,i,'aligned','post_aligned.fits') xyy.writefits(fitsname,cp.asnumpy(result).astype(np.float32),head) #plt.imshow(result)''' # print('align is over') else: a = 1
def online_dark(path): f = open(r"/home/wangxinhua/level1/Level1rev06/json.txt", 'r') para = json.load(f) f.close() #path = para['path']#"/home/wangxinhua/20190518/HA" archivedarkdir = para['archivedarkdir'] redrive = para['redrive'] #"/home/wangxinhua/nvst" dark_flag = int(para['dark_flag']) darked_path = para['darked_path'] datapath, flatpath, darkpath = xyy.path_paser(path) #path of a group of fits flag = 1 try: os.makedirs(archivedarkdir) except Exception as e: print(e) today_time = time.strftime("%Y%m%d", time.localtime()) workdir = path use_other_dark = 0 t0 = 0 #count time while flag: if use_other_dark == 0 and t0 < 300: if darkpath != None: print('Using ' + darkpath[0]) flag = 0 operating_sys = platform.system() if operating_sys == 'Linux': try: path = path[path.index('\\') + 1:] except ValueError: path = path[path.index('/') + 1:] elif operating_sys == 'Windows': path = path.split(':')[1] try: path = path[path.index('\\') + 1:] except ValueError: path = path[path.index('/') + 1:] print(os.path.join(redrive, path, 'Dark', 'dark.fits')) #decide which dark will be used writting if os.path.exists( os.path.join(redrive, path, 'Dark', 'dark.fits')): print('dark have been calculated,pass') else: for i in darkpath: darkeddata = xyy.online_mean(i) xyy.mkdir(os.path.join(redrive, path, 'Dark')) #print(os.path.join(redrive,path,'Dark')) xyy.writefits( os.path.join(redrive, path, 'Dark', 'dark.fits'), darkeddata) #copy dark file to a folder copyfile(os.path.join(redrive, path, 'Dark', 'dark.fits'), archivedarkdir + '/' + today_time + 'dark.fits') dark_log = open( r'/home/wangxinhua/Observation_log/' + today_time + '.log', 'a+') dark_log.writelines('\nused Dark:' + os.path.join( redrive, path, 'Dark', 'dark.fits')) #which dark file is used writting dark_log.close() else: datapath, flatpath, darkpath = xyy.path_paser(path) flag = 1 time.sleep(1) t0 += 1 else: #5min have no new dark file then use the latest dark file #define a folder that record all dark fits latestdarkpath = os.listdir(archivedarkdir)[-1] latestdarkfile = archivedarkdir + '\\' + latestdarkpath dark_log = open( r'/home/wangxinhua/Observation_log/' + today_time + '.log', 'w') dark_log.writelines( 'used Dark:' + latestdarkfile) #which dark file is used writting dark_log.close() print('Dark is over')
def flat(): f = open(r"/home/wangxinhua/level1/Level1/json.txt", 'r') para = json.load(f) f.close() path = para['path'] flated_path = para['flated_path'] redrive = para['redrive'] subpaths = os.listdir(path) flatpath = [] darkpath = [] datapath = [] for i in range(len(subpaths)): subpath = os.path.join(path, subpaths[i]) if ('F' in subpaths[i]) or ('f' in subpaths[i]): flatpath.append(subpath) elif ('D' in subpaths[i]) or ('d' in subpaths[i]): darkpath.append(subpath) else: datapath.append(subpath) #---------------- if len(datapath) == 0: print('没有观测数据,停止数据处理!') print('观测数据文件夹:', datapath) #---------------------- 平场 if len(flatpath) == 0: print('没有平场数据!请输入邻近观测日的平场数据路径!') flatpath = input('请输入路径(格式例如:H:\20190112\HA\FLAT00):') print('平场数据文件夹:', flatpath) #------------------- 暗场 if len(darkpath) == 0: print('没有暗场数据!') darkpath = input('请输入暗场的路径(格式例如:E:\dark20180312):') print('暗场数据文件夹:', darkpath) print() #======================================================================== redarkpath = os.path.join(redrive, os.path.splitdrive(darkpath[0])[1]) xyy.mkdir(redarkpath) darkfile = os.path.join(redarkpath, 'dark.fits') #---------------------- dark = xyy.readfits(darkfile)[0] print('开始计算平场!') #xyy.nvst_dirsandfiles_path(path) dirs = xyy.nvst_dirsandfiles_path(path) roots = dirs[0] fitsfile = dirs[1] t = 0 for i in roots: if 'f' in i or 'F' in i: flat_root = i flat_fits = dirs[1][t] t += 1 t = 0 for i in roots: if 'f' not in i and 'd' not in i and 'F' not in i and 'D' not in i: data_root = i data_fits = dirs[1][t] t += 1 for i in flat_fits: xyy.mkdir(os.path.join(redrive, os.path.splitdrive(i)[1])) flatfile = os.path.join(redrive, os.path.splitdrive(i)[1]) if os.path.exists(flatfile + '\\' + 'flat.fits') != True: addmean = np.array(xyy.dirfitsaddmean(i), dtype=np.float32) #平均平场 xyy.writefits(flatfile + '\\' + 'flat.fits', addmean) else: print('平场已经计算过') addmean = xyy.readfits(flatfile + '\\' + 'flat.fits')[0] t = 0 for j in data_fits: #print('正在处理第'+str(t)+'组') if 'B050' in j and 'B050' in i: datafits = os.listdir(j) for k in datafits: if os.path.exists( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k) != True: xyy.mkdir( os.path.join(redrive, os.path.splitdrive(j)[1])) datatmp = np.array(xyy.readfits(os.path.join(j, k))[0], dtype=np.float32) xyy.writefits( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k, np.array((datatmp - dark) / (addmean - dark) * np.median(addmean - dark), dtype=np.float32)) print('在处理的数据:' + os.path.join(j, k)) print('使用的flat:' + flatfile + '\\' + 'flat.fits') print( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k) elif 'CENT' in j and 'CENT' in i: datafits = os.listdir(j) for k in datafits: if os.path.exists( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k) != True: xyy.mkdir( os.path.join(redrive, os.path.splitdrive(j)[1])) datatmp = np.array(xyy.readfits(os.path.join(j, k))[0], dtype=np.float32) xyy.writefits( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k, np.array((datatmp - dark) / (addmean - dark) * np.median(addmean), dtype=np.float32)) #归一化后做完平场处理的数据 print('在处理的数据:' + os.path.join(j, k)) print('使用的flat:' + flatfile + '\\' + 'flat.fits') print( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k) elif 'R050' in j and 'R050' in i: datafits = os.listdir(j) for k in datafits: if os.path.exists( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k) != True: xyy.mkdir( os.path.join(redrive, os.path.splitdrive(j)[1])) datatmp = np.array(xyy.readfits(os.path.join(j, k))[0], dtype=np.float32) xyy.writefits( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k, np.array((datatmp - dark) / (addmean - dark) * np.median(addmean), dtype=np.float32)) print('在处理的数据:' + os.path.join(j, k)) print('使用的flat:' + flatfile + '\\' + 'flat.fits') print( os.path.join(redrive, os.path.splitdrive(j)[1]) + '\\' + k) t += 1