def main(): path = "." pattern = "" if len(sys.argv) == 3: path = sys.argv[1] pattern = sys.argv[2] else: pattern = sys.argv[1] global ts ts = mark_time("start loop", ts) for file in os.listdir(path): if fnmatch.fnmatch(file, pattern): data = Table(Table.read(os.path.join(path, file), format="ascii"), masked=True) orig_mask = (data['ivar'] == 0) data.mask = [(data['ivar'] == 0)]*len(data.columns) idstr = file[:file.rfind('.')] ts = mark_time("getting match", ts) peaks = measure_peaks.find_and_measure_peaks(data, use_flux_con=False, ignore_defects=False) ts = mark_time("measure_peaks.find_and_measure_peaks", ts) peaks_mask = measure_peaks.mask_known_peaks(data, peaks) ''' test_data = data.copy() test_data.mask = np.ma.nomask test_data.mask = [peaks_mask]*len(data.columns) peaks = measure_peaks.find_and_measure_peaks(test_data, use_flux_con=False, ignore_defects=False) ''' #peaks_mask = np.zeros((7080,), dtype=bool) ts = mark_time("measure_peaks.mask_known_peaks", ts) data.mask = [orig_mask]*len(data.columns) start_continuum, start_wo_continuum = split_spectrum(data['wavelength'][:split_noisy_app], data['flux'][:split_noisy_app], peaks_mask[:split_noisy_app], orig_mask[:split_noisy_app], idstr=idstr, block_sizes=block_sizes) end_continuum, end_wo_continuum = split_spectrum(data['wavelength'][split_noisy_app:], data['flux'][split_noisy_app:], peaks_mask[split_noisy_app:], orig_mask[split_noisy_app:], idstr=idstr, block_sizes=block_sizes, mult=6) ts = mark_time("smoothing", ts) wo_continuum = np.ma.concatenate([start_wo_continuum, end_wo_continuum]) continuum = np.ma.concatenate([start_continuum, end_continuum]) #continuum, wo_continuum = tamp_down(continuum, wo_continuum, span=51) continuum, wo_continuum = tamp_down(continuum, wo_continuum) continuum, wo_continuum = tamp_down(continuum, wo_continuum, span=31) #continuum, wo_continuum = tamp_down(continuum, wo_continuum, span=31) continuum, wo_continuum = tamp_down(continuum, wo_continuum, span=21) #continuum, wo_continuum = tamp_down(continuum, wo_continuum, span=11) # Do not smooth #continuum, wo_continuum = smooth(continuum, wo_continuum) save_data(data['wavelength'], wo_continuum, continuum, data['ivar'], orig_mask, idstr) ts = mark_time("save_data", ts)
def main(): path = "." pattern = "" if len(sys.argv) == 3: path = sys.argv[1] pattern = sys.argv[2] else: pattern = sys.argv[1] for file in os.listdir(path): if fnmatch.fnmatch(file, pattern): data = Table(Table.read(os.path.join(path, file), format="ascii"), masked=True) data.mask = [(data['ivar'] == 0)]*len(data.columns) if np.count_nonzero(data['con_flux'].data < 0): print file, "BAD: continuum has zub-zero con_flux!!!!" print data['con_flux'][data['con_flux'].data < 0] total = data['con_flux']+data['flux'] if np.count_nonzero(total < 0): print file, "WORSE: total has zub-zero con_flux!!!!" print total[total < 0] ivar_cutoff = 0.005 ivar_cutoff_mask = (data['ivar'].data < ivar_cutoff) & (data['ivar'].data > 0) if np.any(ivar_cutoff_mask): print file, "QUESTIONABLE: ivar less than", ivar_cutoff print data["ivar"][ivar_cutoff_mask]
def main(): path = "." pattern = "" if len(sys.argv) == 3: path = sys.argv[1] pattern = sys.argv[2] else: pattern = sys.argv[1] for file in os.listdir(path): if fnmatch.fnmatch(file, pattern): data = Table(Table.read(os.path.join(path, file), format="ascii"), masked=True) data.mask = [(data['ivar'] == 0)]*len(data.columns) data['wavelength'].mask = False idstr = file[:file.rfind('.')] peak_flux_list = [] peak_flux = find_and_measure_peaks(data, peak_flux_list) #Let's just forget this for now; spans are maybe something to come back to ''' for key, vals in wlen_spans.items(): target_flux_totals = get_total_flux(key, data['wavelength'], data['flux'], data['con_flux'], target_wlens=None, wlen_spans=vals) peak_flux_list.append(target_flux_totals) ''' save_data(peak_flux, idstr)
def main(): path = "." pattern = "" if len(sys.argv) == 3: path = sys.argv[1] pattern = sys.argv[2] else: pattern = sys.argv[1] for file in os.listdir(path): if fnmatch.fnmatch(file, pattern): data = Table(Table.read(os.path.join(path, file), format="ascii"), masked=True) orig_mask = (data['ivar'] == 0) data.mask = [(data['ivar'] == 0)]*len(data.columns) idstr = file[:file.rfind('.')] test_data = data.copy() test_data['flux'] = minimize(test_data['wavelength'], test_data['flux'], [100, 200, 300], 0, start_ind=split_noisy_app) filtered = filter_and_subtract(test_data['flux'], test_data['wavelength'], 201, 24) test_data['flux'] = np.ma.min(np.ma.vstack([test_data['flux'], filtered]), axis=0) filtered = filter_and_subtract(test_data['flux'], test_data['wavelength'], 161, 18) test_data['flux'] = np.ma.min(np.ma.vstack([test_data['flux'], filtered]), axis=0) continuum = split_spectrum(test_data['wavelength'], test_data['flux']) wo_continuum = data['flux'] - continuum save_data(data['wavelength'], wo_continuum, continuum, data['ivar'], orig_mask, idstr)
def plot_it(data, key, peaks_table=None, no_con_flux=False, unmask=False, data_col=None, title=None, stack=False): data = Table(data, masked=True) if data_col is None: data_col = 'flux' if not unmask: if 'ivar' in data.colnames: data.mask = [(data['ivar'] == 0) | (np.abs(data['ivar']) <= 0.0001)]*len(data.columns) else: data.mask = [(data[data_col] == 0)]*len(data.columns) val = data[data_col] if 'sky' in data.colnames: val += data['sky'] if 'ivar' in data.colnames: sigma = np.power(data['ivar'], -0.5) else: sigma = 0 if 'con_flux' in data.colnames and not no_con_flux: val += data['con_flux'] plt.plot(data[key], [0]*len(data[key]), color='red') plt.plot(data[key], data['con_flux'], color='green') plt.plot(data[key], val, color='orange', alpha=0.7) else: if stack: plt.plot(data[key], val, alpha=0.7) else: plt.plot(data[key], [0]*len(data[key]), color='red') plt.plot(data[key], val, color='orange', alpha=0.7) if not stack and np.any(sigma > 0) and 'con_flux' not in data.colnames: plt.fill_between(data[key], val-sigma, val+sigma, color='red') if peaks_table is not None: for row in peaks_table: if row['total_type'] == 'line' and row['wavelength_lower_bound'] != 0: plt.fill_between([row['wavelength_lower_bound'], row['wavelength_upper_bound']], [0, np.max(val)], color='gray') if title is not None: plt.suptitle(title) plt.tight_layout() #plt.ylim((np.percentile(val-sigma,0.1),np.percentile(val+sigma,99.9))) if not stack: plt.show() plt.close()