def minimum_photouncert(catalog, columns): """ """ data = C.loaddata(catalog) # Loading the whole catalog content. head = C.loadheader(catalog) # Loading the original header. mm = A.get_magnitudes(catalog, columns) em = A.get_errmagnitudes(catalog, columns) nl = len(mm[:, 0]) # nl is the number of detections inside every single band. nf = len(mm[0, :]) # nf is the number of bands inside the catalog. errmag = N.zeros((nl, nf), float) # Where the new photo errors will be saved. for jj in range(nf): for ii in range(nl): if em[ii, jj] < 0.01: errmag[ii, jj] = 0.03 elif em[ii, jj] > 1.0: errmag[ii, jj] = 1.0 else: errmag[ii, jj] = em[ii, jj] # New values of mags error overwrites now the original data. vars, evars, posref, zpe, zpo = A.get_usefulcolumns(columns) data[:, evars] = errmag[:, N.arange(nf)] finalcatalog = catalog[:-3] + 'ecor.cat' C.savedata(data, finalcatalog, dir="", header=head) # Saving & creating a new catalog.
def replace_photo_uncert(catalog, columns): """ """ data = C.loaddata(catalog) # Loading the whole catalog content. head = C.loadheader(catalog) # Loading the original header. mm = A.get_magnitudes(catalog, columns) em = A.get_errmagnitudes(catalog, columns) filters = B.get_filter_list(columns) nl = len(mm[:, 0]) # nl is the number of detections inside every single band. nf = len(mm[0, :]) # nf is the number of bands inside the catalog. errmag = U.zeros((nl, nf), float) # Where the new photo errors will be saved. for jj in range(nf): maglim = B.get_limitingmagnitude(mm[:, jj], em[:, jj], 1., 0.25) print 'Limiting Magnitude for filter %s: %.3f' % (filters[jj], maglim) for ii in range(nl): if mm[ii, jj] == -99.: errmag[ii, jj] = 0.00 elif mm[ii, jj] == 99.: errmag[ii, jj] = maglim else: errmag[ii, jj] = em[ii, jj] # New values of mags error overwrites now the original data. vars, evars, posref, zpe, zpo = get_usefulcolumns(columns) data[:, evars] = errmag[:, U.arange(nf)] finalcatalog = catalog[:-3] + 'upp.cat' C.savedata(data, finalcatalog, dir="", header=head) # Saving & creating a new catalog.
def replace_kerttu_errmags(catalog, columns, finalcatalog): """ import alhambra_kerttu_fixerrmags as AFM catalog = '/Users/albertomolino/doctorado/articulos/ALHAMBRA/kerttu/test_photoz/kerttu.cat' columns = '/Users/albertomolino/doctorado/articulos/ALHAMBRA/kerttu/test_photoz/kerttu.columns' finalcatalog = '/Users/albertomolino/doctorado/articulos/ALHAMBRA/kerttu/test_photoz/kerttu3.cat' AFM.replace_kerttu_errmag(catalog,columns,finalcatalog) ------ """ data = C.loaddata(catalog) # Loading the whole catalog content. head = C.loadheader(catalog) # Loading the original header. mm = A.get_magnitudes(catalog, columns) em = A.get_errmagnitudes(catalog, columns) filters = B.get_filter_list(columns) nl = len(mm[:, 0]) # nl is the number of detections inside every single band. nf = len(mm[0, :]) # nf is the number of bands inside the catalog. errmag = U.zeros((nl, nf), float) # Where the new photo errors will be saved. for jj in range(nf): for ii in range(nl): if mm[ii, jj] == -99.: errmag[ii, jj] = 0.00 else: errmag[ii, jj] = em[ii, jj] # New values of mags error overwrites now the original data. vars, evars, posref, zpe, zpo = A.get_usefulcolumns(columns) data[:, evars] = errmag[:, U.arange(nf)] C.savedata(data, finalcatalog, dir="", header=head) # Saving & creating a new catalog.
def compress_bpz_catalogues(catalogue, sample, outname): """ It selects a subsample of sources from an input catalogue. :param catalogue: :return: """ head1 = C.loadheader(catalogue) data1 = C.loaddata(catalogue) C.savedata(data1[sample, :], outname, dir="", header=head1)
def appendcatalogs(catalog1, catalog2, catalogOUT): """ The task appends catalogs using only the catalog1's header. Catalog1(withheader)+catalog2(woheader) The final (composed) catalog is saved as catalogOUT. NEEDLESS TO SAY BOTH CATALOGS HAVE TO HAVE THE SAME FORMAT (ROWS&COLUMNS) !!! ----- """ print 'Reading file1: ', catalog1 data1 = C.loaddata(catalog1) # Loading the whole catalog1 content. head1 = C.loadheader(catalog1) # Loading the original header1. print 'Reading file2: ', catalog2 data2 = C.loaddata(catalog2) # Loading the whole catalog2 content. head2 = C.loadheader(catalog2) # Loading the original header2. outcat = catalogOUT print outcat try: nf1 = N.shape(data1)[0] nc1 = N.shape(data1)[1] except: nf1 = 1 nc1 = N.shape(data1)[0] try: nf2 = N.shape(data2)[0] nc2 = N.shape(data2)[1] except: nf2 = 1 nc2 = N.shape(data2)[0] print 'Dimensions catalogue_1: ncols: %i, nraws: %i' % (nf1, nc1) print 'Dimensions catalogue_2: ncols: %i, nraws: %i' % (nf2, nc2) if nc1 == nc2: nf = nf1 + nf2 nc = nc1 newdata = N.zeros((nf, nc), float) for ii in range(nf1): if nf1 < 2: newdata[ii, :] = data1[:] else: newdata[ii, :] = data1[ii, :] for ii in range(nf2): if nf2 < 2: newdata[ii + nf1, :] = data2[:] else: newdata[ii + nf1, :] = data2[ii, :] C.savedata(newdata, outcat, dir="", header=head1) # Saving and creating the new catalog. else: print 'Different number of rows between catalogs. Impossible to append catalogs !!'
def replacing_nans_catalogs(catalog, newname): """ vars = [] evars = [] data = C.loaddata(catalog) mags = data[:,vars] emags = data[:,evars] """ vars = N.array([ 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51, 54, 57, 60, 63, 66, 69, 72, 75, 78, 81, 84, 87, 90, 93, 96, 99, 102, 105, 108, 111 ]) evars = vars[:] + 1 s2n_vars = vars[:] + 2 data = C.loaddata(catalog) # Loading the whole catalog content. head = C.loadheader(catalog) # Loading the original header. mm = data[:, vars] em = data[:, evars] s2n = data[:, s2n_vars] nl = len(mm[:, 0]) # nl is the number of detections inside every single band. nf = len(mm[0, :]) # nf is the number of bands inside the catalog. newmag = U.zeros((nl, nf), float) # Where the new photo errors will be saved. errmag = U.zeros((nl, nf), float) # Where the new photo errors will be saved. new_s2n = U.zeros((nl, nf), float) for jj in range(len(vars)): for ii in range(nl): if abs(mm[ii, jj]) > 60.: newmag[ii, jj] = -99.0 errmag[ii, jj] = 0.00 new_s2n[ii, jj] = -1 elif s2n[ii, jj] < 0.00001: new_s2n[ii, jj] = 0. else: newmag[ii, jj] = mm[ii, jj] errmag[ii, jj] = em[ii, jj] new_s2n[ii, jj] = s2n[ii, jj] # New values of mags error overwrites now the original data. data[:, vars] = newmag[:, U.arange(nf)] data[:, evars] = errmag[:, U.arange(nf)] data[:, s2n_vars] = new_s2n[:, U.arange(nf)] C.savedata(data, newname, dir="", header=head) # Saving & creating a new catalog.
def runZPcal_catalogue(reference, frame, final): """ ---- filter_ref_cat,alig_frame_cat,alig_cal_frame_cat """ plots = 1 data2 = C.loaddata(frame) # Loading the whole catalog2 content. head2 = C.loadheader(frame) # Loading the original header2. pos_mags = 12 # ([12,20,21,22]) mag_r = U.get_data(reference, 12) mag_f = U.get_data(frame, 12) # good_sample = U.greater_equal(mag_r,16.) * U.less_equal(mag_r,21.5) good_sample = U.greater_equal(mag_r, 16.) * U.less_equal(mag_r, 19.) mag_r2, mag_f2 = U.multicompress(good_sample, (mag_r, mag_f)) offset = U.mean_robust(mag_f2 - mag_r2) if plots: plt.figure(11, figsize=(12, 9), dpi=80, facecolor='w', edgecolor='k') plt.clf() plt.plot(mag_r, (mag_f - mag_r - offset), 'ko', ms=10, alpha=0.1) plt.xlim(16, 25) plt.ylim(-5, 5.) plt.xlabel('AB', size=25) plt.ylabel('Mf-Mr', size=25) plt.xticks(fontsize=25) plt.yticks(fontsize=25) plt.legend(['Offset: %.4f' % (offset)], loc='upper right', numpoints=1) plt.title(A.getfilename(frame), size=15) plt.grid() figurename = final[:-3] + 'png' print 'figurename: ', figurename plt.savefig(figurename, dpi=100) plt.close() # Here it saves the offset in an ASCII file fileout = open(final[:-3] + 'txt', 'w') linea = '%s %.5f \n' % (final, offset) fileout.write(linea) fileout.close() # The offset is only applied to m!=99. magnitudes. new_mags = U.where(abs(mag_f) < 99, mag_f - offset, mag_f) data2[:, pos_mags] = new_mags C.savedata(data2, final, dir="", header=head2) print ' '
def flagging_dobledetections_mergecolumns(catalog): """ This serves to append an extra column (each to both inputted catalogs) indicating either a detection was repeated and with the lowest S/N of the two. Sources flagged as 1 are those detections to be excluded when combining both catalogs into a single one. -------- import alhambra_overlap as alhov cat2 = '/Volumes/amb22/catalogos/reduction_v4e/f02/f02p01_colorproext_2_ISO.cat' alhov.flagging_dobledetections_mergecolumns(cat2) """ data = coeio.loaddata(catalog) # Loading the whole catalog content. head = coeio.loadheader(catalog) # Loading the original header. nc = len(data.T) # Number columns dim = len(data[:,0]) # Number elements print 'nc,dim',nc,dim var1 = head[-3].split()[-1] var2 = head[-2].split()[-1] if var1 == var2: print 'Duplicated columns. Merging information...' uno = data[:,72] dos = data[:,73] tres = uno+dos newdata = U.zeros((dim,nc-1),float) for ii in range(nc-1): for jj in range(dim): if ii == nc-1: print 'pepe' newdata[jj,ii] = tres[jj] else: newdata[jj,ii] = data[jj,ii] head2 = head[:-1] head2[-1]='#' outcat = catalog[:-4]+'.mergedcolumns.cat' coeio.savedata(newdata,outcat, dir="",header=head2) # Saving and creating the new catalog. # Renaming files ap.renamefile(catalog,catalog+'.oldold.cat') if not os.path.exists(catalog): ap.renamefile(outcat,catalog)
def remove_detections_bysegmmaps(field, pointing, ccd): """ It uses the segmentation-maps to remove fake detections when masking out saturated stars. ---- import alhambra_fakedets as AF AF.remove_detections_bysegmmaps(2,1,1) """ root = '/Volumes/amb22/catalogos/reduction_v4f/f0%i/' % (field) root2images = '/Volumes/amb22/imagenes/f0%i/' % (field) catalog = root + 'f0%ip0%i_colorproext_%i_ISO.irmsF814W.free.cat' % ( field, pointing, ccd) ids, x, y, area = U.get_data(catalog, (0, 3, 4, 5)) dim = len(ids) valor = U.zeros(dim) ima1 = root2images + 'f0%ip0%i_F814W_%i.swp.seg.fits' % (field, pointing, ccd) ima2 = root2images + 'f0%ip0%i_F814W_%i.swp.segnomask.fits' % ( field, pointing, ccd) segm1 = pyfits.open(ima1)[0].data segm2 = pyfits.open(ima2)[0].data for ii in range(dim): xo = x[ii] yo = y[ii] dimx = U.shape(datos[yo - size:yo + size, xo - size:xo + size])[1] dimy = U.shape(datos[yo - size:yo + size, xo - size:xo + size])[0] perc[ii] = (datos[yo - size:yo + size, xo - size:xo + size].sum() / (dimx * dimy * 1.)) # Defining the sample to be keep. good = U.greater(valor, 0) idr = U.compress(good, ids) dim2 = len(idr) print 'Dimensions: Original: %i, Final: %i, Excluded: %i detections. ' % ( dim, dim2, dim - dim2) finalcat = root + 'f0%ip0%i_colorproext_%i_ISO.irmsF814W.free.cat' % ( field, pointing, ccd) data1 = coeio.loaddata(catalog) # Loading the whole catalog content. head = coeio.loadheader(catalog) data2 = data1[good, :] coeio.savedata(data2, finalcat, dir="", header=head) # Saving & creating a new catalog.
def remove_fakeabsorptions_F814W(field, pointing, ccd): """ Using the rmsweight images, it gets rid of detections with imrs_F814W < 0.5. ------------------------------------------- import alhambra_fakedets as AF AF.remove_fakeabsorptions_F814W(2,1,1) """ root = '/Volumes/amb22/catalogos/reduction_v4f/f0%i/' % (field) catalog = root + 'f0%ip0%i_colorproext_%i_ISO.cat' % (field, pointing, ccd) ids, x, y, area = U.get_data(catalog, (0, 3, 4, 5)) dim = len(ids) perc = U.zeros(dim) # Opening F814W Weight image ima = alh.alhambra_invrmsimagelist(field, pointing, ccd)[-1] datos = pyfits.open(ima)[0].data for ii in range(dim): if area[ii] > 1: size = int(round(U.sqrt(area[ii]) / 2.)) xo = x[ii] yo = y[ii] dimx = U.shape(datos[yo - size:yo + size, xo - size:xo + size])[1] dimy = U.shape(datos[yo - size:yo + size, xo - size:xo + size])[0] perc[ii] = (datos[yo - size:yo + size, xo - size:xo + size].sum() / (dimx * dimy * 1.)) # Defining the sample to be keep. good = U.greater(perc, 0.5) idr = U.compress(good, ids) dim2 = len(idr) print 'Dimensions: Original: %i, Final: %i, Excluded: %i detections. ' % ( dim, dim2, dim - dim2) finalcat = root + 'f0%ip0%i_colorproext_%i_ISO.irmsF814W.free.cat' % ( field, pointing, ccd) data1 = coeio.loaddata(catalog) # Loading the whole catalog content. head = coeio.loadheader(catalog) data2 = data1[good, :] coeio.savedata(data2, finalcat, dir="", header=head) # Saving & creating a new catalog.
if cat_models[new_stars[jjj]] == models[sss]: modelo = sss+1 if verbose: print 'model_cat: ',cat_models[new_stars[jjj]] print 'model_lib ', models[sss] raw_input('paused') mr_orig_sim = datos[new_stars[jjj],ref_column] dm = (mr_reference-mr_orig_sim) final_mat[kkk,:] = datos[new_stars[jjj],:] final_mat[kkk,nadd:nadd+n_filters] += dm final_mat[kkk,0] = modelo if verbose: print 'new_star: ',new_stars[jjj] print 'mr_reference',mr_reference print 'mr_orig_sim',mr_orig_sim print 'dm: ',dm print 'before' print datos[new_stars[jjj],:] print 'after' print final_mat[kkk,:] kkk += 1 # Saving new file C.savedata(final_mat,new_catalog, dir="",header=head)
nt = len(tiles_name) tiles = N.zeros(nt) for ss in range(nt): tiles[ss] = int(tiles_name[ss].split('-')[-1]) """ tiles = U.get_data(mastercat, 0) # use get_str with original cat! single_tiles = N.unique(tiles).astype(int) n_single_tiles = len(single_tiles) # Reading entire catalogue. header_mastercat = C.loadheader(mastercat) data_mastercat = C.loaddata(mastercat) for ii in range(n_single_tiles): tile_cat = root2cat + 'tile%i_S82.cat' % (single_tiles[ii]) good = N.equal(tiles, single_tiles[ii]) n_gals = len(data_mastercat[good, 1]) if not os.path.exists(tile_cat): """ fwhm = data_mastercat[good,9] magr = data_mastercat[good,82] # Petro seeing,stars = sct.get_seeing_from_data_pro(fwhm,magr) fwhm_norm = fwhm /(1.*seeing) data_mastercat[good,9] = fwhm_norm data_mastercat[good,0] = N.ones(n_gals) * int(single_tiles[ii]) """ C.savedata(data_mastercat[good, :], tile_cat, dir="", header=header_mastercat)
def replace_fakeabsorptions_pro(field, pointing, ccd): """ Updated version to get rid of detections with imrs_* < 0.5 setting their magnitudes to m=-99,em=0.0. Additionally,it removes detections with imrs_F814W == 0.0 ---------------------------------------------------------------------------- It replaces failed magnitudes in the ALHAMBRA catalogues (artificial absorptions, non-observed sources assigned as non-detected with upper limits) by m=-99,em=99 It might decrease the amount of low Odds at bright magnitudes. ---- import alhambra_photools as A A.replace_fakeabsorptions_pro(2,1,2) """ plots = 1 root = '/Volumes/amb22/catalogos/reduction_v4f/f0%i/' % (field) catalog = root + 'f0%ip0%i_colorproext_%i_ISO.irmsF814W.free.cat' % ( field, pointing, ccd) catweight = root + 'f0%ip0%i_ColorProBPZ_%i_ISO.rmsweights.dat' % ( field, pointing, ccd) dataweight = coeio.loaddata(catweight) # print U.shape(dataweight) # ids,x,y,area = U.get_data(catalog,(0,3,4,5)) cols1 = root + 'f0%ip0%i_%i_tot_ISO_eB10.columns' % (field, pointing, ccd) cols2 = root + 'f0%ip0%i_colorproext_%i_ISO_phz_eB10.columns' % ( field, pointing, ccd) if os.path.exists(cols1): columns = cols1 else: columns = cols2 data = coeio.loaddata(catalog) # Loading the whole catalog content. head = coeio.loadheader(catalog) # Loading the original header. vars, evars, posref, zpe, zpo = alh.get_usefulcolumns(columns) # print 'dim vars', len(vars) mags = data[:, vars] nl = U.shape(data)[ 0] # nl is the number of detections inside every single band. nf = len(vars) # nf is the number of bands inside the catalog. # print 'nl,nf: ',nl,nf kk = 0 for jj in range(nl): filtoff = 0 for ii in range(nf): pos_mag = vars[ii] pos_emag = evars[ii] if dataweight[jj, ii] < 0.5: # print data[jj,pos_mag],pos_mag,pos_emag data[jj, pos_mag] = -99.0000 data[jj, pos_emag] = 0.0000 data[jj, 67] -= 1 kk += 1 filtoff += 1 # print data[jj,0] # print data[jj,pos_mag] # if filtoff > 0: # print '%i excluded for detection %i: '%(filtoff,data[jj,0]) # # pausa = raw_input('paused') print 'Replaced %i magnitudes. ' % (kk) # New values of mags error overwrites now the original data. finalcatalog = root + 'f0%ip0%i_colorproext_%i_ISO.test.cat' % ( field, pointing, ccd) coeio.savedata(data, finalcatalog, dir="", header=head) # Saving & creating a new catalog.
#n_single_tiles = 1 for ii in range(n_single_tiles): tile_jplus_spz_upl_cat = root2cats + 'tile_%i_jplus2sdssdr12_z05.ecor.upp.cat' % ( single_tiles[ii]) good = N.equal(tiles, single_tiles[ii]) n_gals = len(data_jplus[good, 0]) if n_gals > 50: if not os.path.exists(tile_jplus_spz_upl_cat): tile_jplus_spz_cat = root2cats + 'tile_%i_jplus2sdssdr12_z05.cat' % ( single_tiles[ii]) # Individual (tile) catalogue. if not os.path.exists(tile_jplus_spz_cat): good = N.equal(tiles, single_tiles[ii]) C.savedata(data_jplus[good, :], tile_jplus_spz_cat, dir="", header=header_jplus) tile_jplus_spz_minerr_cat = root2cats + 'tile_%i_jplus2sdssdr12_z05.ecor.cat' % ( single_tiles[ii]) if not os.path.exists(tile_jplus_spz_minerr_cat): ct.minimum_photouncert(tile_jplus_spz_cat, jplus_spz_columns) # Including upper-limits if necessary. ct.replace_photo_uncert(tile_jplus_spz_minerr_cat, jplus_spz_columns) ### ZP-recalibration cali_columns = tile_jplus_spz_upl_cat[:-3] + 'cali.columns' if not os.path.exists(cali_columns):
datos_redu = datos_main[good_color, :] #print N.shape(datos_redu) #pausa = raw_input('paused') # Selecting 'n_new_stars' random models. new_stars = N.random.random_integers(0, new_dim_gr_models - 1, int(final_density_gr[sss])) n_new_stars = int(final_density_gr[sss]) for jjj in range(n_new_stars): mr_orig_sim = datos_redu[new_stars[jjj], ref_column] dm = (mr_reference - mr_orig_sim) final_mat[kkk, :] = datos_redu[new_stars[jjj], :] final_mat[kkk, nadd:nadd + n_filters] += dm final_mat[kkk, 0] = datos_redu[new_stars[jjj], 0] if verbose: print 'new_star: ', new_stars[jjj] print 'mr_reference', mr_reference print 'mr_orig_sim', mr_orig_sim print 'dm: ', dm print 'before' print datos_main[new_stars[jjj], :] print 'after' print final_mat[kkk, :] kkk += 1 # Saving new file C.savedata(final_mat[0:kkk, :], new_catalog, dir="", header=head_main)