def reduce(master_bias, master_dark, master_flat): # correct each observation_files file percent = 0 lt0 = time.time() testx = [] testy = [] testz = [] for counter, science_file in enumerate(observation_files): if show_progress.exit: return None, None, None label_4.configure(text='Reducing data and calculating statistics: {0}'.format(os.path.split(science_file)[1])) label_4.update() # correct it with master bias_files, master dark_files and master flat_files fits_file = pf.open(science_file, memmap=False) try: fits = [fits_file['SCI']] except KeyError: sci_id = 0 for sci_id in range(len(fits_file)): try: if (fits_file[sci_id].data).all(): break except: pass fits = [fits_file[sci_id]] fits_file.close() data_frame = np.ones_like(fits[0].data) * fits[0].data data_frame = (data_frame - master_bias - fits[0].header[exposure_time_key] * master_dark) / master_flat data_frame[np.where(np.isnan(data_frame))] = 0 if bin_fits > 1: data_frame = plc.bin_frame(data_frame, bin_fits) try: distribution = plc.one_d_distribution(data_frame.flatten()[::int(200000.0 / bin_to)], gaussian_fit=True, mad_filter=5.0) mean = distribution[2] std = distribution[3] except: mean = np.median(data_frame) std = plc.mad(data_frame) * 1.5 if observation_date_key == observation_time_key: observation_time = ' '.join(fits[0].header[observation_date_key].split('T')) else: observation_time = ' '.join([fits[0].header[observation_date_key].split('T')[0], fits[0].header[observation_time_key]]) observation_time = plc.UTC(observation_time) testx.append(observation_time.jd) testy.append(mean / fits[0].header[exposure_time_key]) testz.append(std) fits[0].header.set(mean_key, mean) fits[0].header.set(std_key, std) # write the new fits file # important to keep it like this for windows! time_in_file = observation_time.utc.isoformat() time_in_file = time_in_file.split('.')[0] time_in_file = time_in_file.replace('-', '_').replace(':', '_').replace('T', '_') hdu = pf.ImageHDU(header=fits[0].header, data=np.array(data_frame, dtype=np.float32)) plc.save_fits(pf.HDUList([pf.PrimaryHDU(), hdu]), '{0}{1}{2}{3}_{4}'.format( reduction_directory, os.sep, reduction_prefix, time_in_file, science_file.split(os.sep)[-1])) if counter == 0: ax.cla() ax.imshow(data_frame[::2, ::2], origin='lower', cmap=cm.Greys_r, vmin=fits[0].header[mean_key] + frame_low_std * fits[0].header[std_key], vmax=fits[0].header[mean_key] + frame_upper_std * fits[0].header[std_key]) ax.axis('off') canvas.draw() # counter new_percent = round(100 * (counter + 1) / float(len(observation_files)), 1) if new_percent != percent: lt1 = time.time() rm_time = (100 - new_percent) * (lt1 - lt0) / new_percent hours = rm_time / 3600.0 minutes = (hours - int(hours)) * 60 seconds = (minutes - int(minutes)) * 60 progress_bar_4['value'] = new_percent percent_label_4.configure(text='{0} % ({1}h {2}m {3}s left)'.format(new_percent, int(hours), int(minutes), int(seconds))) percent = new_percent show_progress.update() return testx, testy, testz
def avc_plot(self, latitude, longitude, tmzn, horizon, target_ra, target_dec, year_mont_string, ax, name, observatory_name): ax.cla() target = plc.Target(plc.Hours(target_ra), plc.Degrees(target_dec)) observatory = plc.Observatory(plc.Degrees(latitude), plc.Degrees(longitude), tmzn, horizon) observation = plc.Observation(target, observatory) year = int(year_mont_string.split()[0]) month = int(year_mont_string.split()[1]) months = [ 'xx', 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ] if (year - 2000) / 4.0 - int((year - 2000) / 4.0) == 0: days = [0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] else: days = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] time_0 = plc.LT('{0}-{1}-1 12:00:00'.format(year, month), observatory) jd_0 = time_0.jd time_1 = plc.JD(jd_0 + days[month]) events = [] # mid-day splits for jj in range(days[month] + 1): events.append([plc.JD(time_0.jd + jj), 'mid-day']) # target rise/set events events += observation.rise_set_events(time_0, time_1) # sun rise/set events for jj in range(days[month]): check_time = plc.JD(time_0.jd + jj) check_st = check_time.lst(observatory).hours sun = check_time.get_sun() # sun rise/set if -(90 - observatory.latitude.deg_pm ) < sun.dec.deg_pm < 90 - observatory.latitude.deg_pm: rise_ha = np.arccos( -sun.dec.tan * observatory.latitude.tan) * 12 / np.pi if rise_ha < 12: set_ha = rise_ha rise_ha = 24 - rise_ha else: set_ha = 24 - rise_ha rise_st = rise_ha + sun.ra.hours if rise_st > 24: rise_st -= 24 set_st = set_ha + sun.ra.hours if set_st > 24: set_st -= 24 if rise_st < check_st: next_rise_in_st_hours = 24 + rise_st - check_st else: next_rise_in_st_hours = rise_st - check_st if set_st < check_st: next_set_in_st_hours = 24 + set_st - check_st else: next_set_in_st_hours = set_st - check_st dt = next_rise_in_st_hours * (23.9344696 / 24) if dt < 24: events.append( [plc.JD(check_time.jd + dt / 24), 'sun_rise']) dt = next_set_in_st_hours * (23.9344696 / 24) if dt < 24: events.append([plc.JD(check_time.jd + dt / 24), 'sun_set']) # sun -18 rise/set if -(90 - observatory.latitude.deg_pm + 18.0 ) < sun.dec.deg_pm < 90 - (observatory.latitude.deg_pm + 18): rise_ha = np.arccos( np.sin((-18.0) * np.pi / 180) / sun.dec.cos / observatory.latitude.cos - sun.dec.tan * observatory.latitude.tan) * 12 / np.pi if rise_ha < 12: set_ha = rise_ha rise_ha = 24 - rise_ha else: set_ha = 24 - rise_ha rise_st = rise_ha + sun.ra.hours if rise_st > 24: rise_st -= 24 set_st = set_ha + sun.ra.hours if set_st > 24: set_st -= 24 if rise_st < check_st: next_rise_in_st_hours = 24 + rise_st - check_st else: next_rise_in_st_hours = rise_st - check_st if set_st < check_st: next_set_in_st_hours = 24 + set_st - check_st else: next_set_in_st_hours = set_st - check_st dt = next_rise_in_st_hours * (23.9344696 / 24) if dt < 24: events.append( [plc.JD(check_time.jd + dt / 24), 'sun_rise_18']) dt = next_set_in_st_hours * (23.9344696 / 24) if dt < 24: events.append( [plc.JD(check_time.jd + dt / 24), 'sun_set_18']) events2 = [[ff[0].jd, ff[0], ff[1]] for ff in events] events2.sort(key=lambda ff: ff[0]) # maxalt = str(round(observation.max_altitude.deg_pm, 1)) ax.xaxis.tick_top() ax.set_title(observatory_name + '\n' + name + ' ' + months[month] + ' ' + str(year) + ' max. alt. = ' + maxalt + ' degrees') ax.set_xlim((0, 1)) ax.set_xlabel('HOUR (UTC{0:+.1f})'.format(tmzn)) ax.set_xticks(np.arange(0, 24.5, 1)) ax.set_xticklabels(('12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12')) ax.set_ylim((0, days[month] + 1)) ax.set_ylabel('DAY') ax.set_yticks(np.arange(1, days[month] + 0.5, 1)) ax.tick_params(bottom=True, top=True, left=True, right=True, labelbottom=True, labeltop=True, labelright=False, labelleft=True) ax.grid(True, axis='y', linestyle='--') check_full_moon = plc.UTC('2000-1-21 04:41:00') for jj, ii in enumerate(events2[:-1]): moonphase = (np.sin( (float(ii[0] + 0.5) - float(check_full_moon.jd)) * np.pi / 29.530589))**2 test_jd = 0.5 * (ii[0] + events2[jj + 1][0]) dt_jd = 0.5 * (events2[jj + 1][0] - ii[0]) day = 1 + int(test_jd - jd_0) time_range = [ (ii[0] - jd_0 - int(ii[0] - jd_0)) * 24, (events2[jj + 1][0] - jd_0 - int(events2[jj + 1][0] - jd_0)) * 24 ] if time_range[1] == 0: time_range[1] = 24 alpha = 1 if not observation.is_target_visible(plc.JD(ii[0] + dt_jd)): color = 'w' alpha = 0 else: sun_az, sun_alt = observation.sun_azimuth_altitude( plc.JD(ii[0] + dt_jd)) if sun_alt.deg_pm > 0: color = 'y' elif sun_alt.deg_pm > -18: color = 'r' else: color = str(0.8 * (1 - moonphase)) ax.plot(time_range, [day, day], linewidth=2.5, color=color, alpha=alpha) shift = {'left': +0.3, 'right': -0.3} if ii[2] == 'target_set': ax.plot(time_range[0], day, 'k*', mec='k', markersize=8) if time_range[0] > 20.5: align = 'right' else: align = 'left' ax.text(time_range[0] + shift[align], day + 0.4, 'set: ' + (ii[1].utc + datetime.timedelta( days=tmzn / 24)).isoformat().split('T')[1][:5], va='center', ha=align, fontsize=9) if ii[2] == 'target_rise': ax.plot(time_range[0], day, 'w*', mec='k', markersize=8, markeredgewidth=0.5) if time_range[0] < 3.5: align = 'left' else: align = 'right' ax.text(time_range[0] + shift[align], day + 0.4, 'rise: ' + (ii[1].utc + datetime.timedelta( days=tmzn / 24)).isoformat().split('T')[1][:5], va='center', ha=align, fontsize=9)
def reduce_science(self): # correct each observation_files file if self.exit: self.after(self.save) else: xx = time.time() if self.science_counter == 0: self.progress_science.initiate(len(self.science_files)) science_file = self.science_files[self.science_counter] # correct it with master bias_files, master dark_files and master flat_files fits = get_fits_data(science_file) exp_time = fits[0].header[self.log.get_param('exposure_time_key')] data_frame = np.ones_like(fits[0].data) * fits[0].data data_frame = (data_frame - self.master_bias - exp_time * self.master_dark) / self.master_flat data_frame[np.where(np.isnan(data_frame))] = 0 if self.log.get_param('bin_fits') > 1: data_frame = plc.bin_frame(data_frame, self.log.get_param('bin_fits')) try: distribution = plc.one_d_distribution(data_frame.flatten()[::int(200000.0/self.log.bin_to)], gaussian_fit=True, mad_filter=5.0) mean = distribution[2] std = distribution[3] except: mean = np.median(data_frame) std = plc.mad(data_frame) * 1.5 with warnings.catch_warnings(): warnings.filterwarnings("ignore") psf = plc.fast_psf_find(data_frame, mean, std, 0.95 * self.log.get_param('burn_limit')) if np.isnan(psf): psf = self.psf + 10 skip = True else: self.psf = psf skip = False if self.log.get_param('observation_date_key') == self.log.get_param('observation_time_key'): observation_time = ' '.join(fits[0].header[self.log.get_param('observation_date_key')].split('T')) else: observation_time = ' '.join([fits[0].header[self.log.get_param('observation_date_key')].split('T')[0], fits[0].header[self.log.get_param('observation_time_key')]]) observation_time = plc.UTC(observation_time) if self.log.get_param('time_stamp') == 'exposure start': julian_date = observation_time.jd elif self.log.get_param('time_stamp') == 'mid-exposure': julian_date = observation_time.jd - 0.5 * exp_time / 60.0 / 60.0 / 24.0 elif self.log.get_param('time_stamp') == 'exposure end': julian_date = observation_time.jd - exp_time / 60.0 / 60.0 / 24.0 else: raise RuntimeError('Not acceptable time stamp.') fits[0].header.set(self.log.mean_key, mean) fits[0].header.set(self.log.std_key, std) fits[0].header.set(self.log.psf_key, psf) fits[0].header.set(self.log.time_key, julian_date) # write the new fits file # important to keep it like this for windows! time_in_file = observation_time.utc.isoformat() time_in_file = time_in_file.split('.')[0] time_in_file = time_in_file.replace('-', '_').replace(':', '_').replace('T', '_') new_name = '{0}{1}_{2}'.format(self.log.reduction_prefix, time_in_file, science_file.split(os.sep)[-1]) hdu = pf.ImageHDU(header=fits[0].header, data=np.array(data_frame, dtype=np.float32)) plc.save_fits(pf.HDUList([pf.PrimaryHDU(), hdu]), os.path.join(self.log.reduction_directory, new_name)) self.all_frames[new_name] = {self.log.mean_key: mean, self.log.std_key: std, self.log.psf_key: psf, self.log.time_key: julian_date, self.log.get_param('exposure_time_key'): fits[0].header[self.log.get_param('exposure_time_key')], self.log.skip_key: skip, self.log.align_x0_key: False, self.log.align_y0_key: False, self.log.align_u0_key: False} if self.progress_science_loop.get() or self.science_counter == 0: self.progress_figure.load_fits(hdu, new_name) # counter self.progress_science.update() self.science_counter += 1 if self.science_counter >= len(self.science_files): self.after(self.save) else: self.after(self.reduce_science)